VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 65755

Last change on this file since 65755 was 65755, checked in by vboxsync, 8 years ago

IEM: 0x0f 0x7f split up.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 667.7 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 65755 2017-02-13 09:17:43Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24#ifdef _MSC_VER
25# pragma warning(push)
26# pragma warning(disable: 4702) /* Unreachable code like return in iemOp_Grp6_lldt. */
27#endif
28
29
30/**
31 * Common worker for instructions like ADD, AND, OR, ++ with a byte
32 * memory/register as the destination.
33 *
34 * @param pImpl Pointer to the instruction implementation (assembly).
35 */
36FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
37{
38 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
39
40 /*
41 * If rm is denoting a register, no more instruction bytes.
42 */
43 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
44 {
45 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
46
47 IEM_MC_BEGIN(3, 0);
48 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
49 IEM_MC_ARG(uint8_t, u8Src, 1);
50 IEM_MC_ARG(uint32_t *, pEFlags, 2);
51
52 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
53 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
54 IEM_MC_REF_EFLAGS(pEFlags);
55 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
56
57 IEM_MC_ADVANCE_RIP();
58 IEM_MC_END();
59 }
60 else
61 {
62 /*
63 * We're accessing memory.
64 * Note! We're putting the eflags on the stack here so we can commit them
65 * after the memory.
66 */
67 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
68 IEM_MC_BEGIN(3, 2);
69 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
70 IEM_MC_ARG(uint8_t, u8Src, 1);
71 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
72 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
73
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 if (!pImpl->pfnLockedU8)
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
77 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
78 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
79 IEM_MC_FETCH_EFLAGS(EFlags);
80 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
81 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
82 else
83 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
84
85 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
86 IEM_MC_COMMIT_EFLAGS(EFlags);
87 IEM_MC_ADVANCE_RIP();
88 IEM_MC_END();
89 }
90 return VINF_SUCCESS;
91}
92
93
94/**
95 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
96 * memory/register as the destination.
97 *
98 * @param pImpl Pointer to the instruction implementation (assembly).
99 */
100FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
101{
102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
103
104 /*
105 * If rm is denoting a register, no more instruction bytes.
106 */
107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
108 {
109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
110
111 switch (pVCpu->iem.s.enmEffOpSize)
112 {
113 case IEMMODE_16BIT:
114 IEM_MC_BEGIN(3, 0);
115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
116 IEM_MC_ARG(uint16_t, u16Src, 1);
117 IEM_MC_ARG(uint32_t *, pEFlags, 2);
118
119 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
120 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
121 IEM_MC_REF_EFLAGS(pEFlags);
122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
123
124 IEM_MC_ADVANCE_RIP();
125 IEM_MC_END();
126 break;
127
128 case IEMMODE_32BIT:
129 IEM_MC_BEGIN(3, 0);
130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
131 IEM_MC_ARG(uint32_t, u32Src, 1);
132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
133
134 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
135 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
136 IEM_MC_REF_EFLAGS(pEFlags);
137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
138
139 if (pImpl != &g_iemAImpl_test)
140 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
141 IEM_MC_ADVANCE_RIP();
142 IEM_MC_END();
143 break;
144
145 case IEMMODE_64BIT:
146 IEM_MC_BEGIN(3, 0);
147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
148 IEM_MC_ARG(uint64_t, u64Src, 1);
149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
150
151 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
152 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
153 IEM_MC_REF_EFLAGS(pEFlags);
154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
155
156 IEM_MC_ADVANCE_RIP();
157 IEM_MC_END();
158 break;
159 }
160 }
161 else
162 {
163 /*
164 * We're accessing memory.
165 * Note! We're putting the eflags on the stack here so we can commit them
166 * after the memory.
167 */
168 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
169 switch (pVCpu->iem.s.enmEffOpSize)
170 {
171 case IEMMODE_16BIT:
172 IEM_MC_BEGIN(3, 2);
173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
174 IEM_MC_ARG(uint16_t, u16Src, 1);
175 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
177
178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
179 if (!pImpl->pfnLockedU16)
180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
181 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
182 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
183 IEM_MC_FETCH_EFLAGS(EFlags);
184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
186 else
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
188
189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
190 IEM_MC_COMMIT_EFLAGS(EFlags);
191 IEM_MC_ADVANCE_RIP();
192 IEM_MC_END();
193 break;
194
195 case IEMMODE_32BIT:
196 IEM_MC_BEGIN(3, 2);
197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
198 IEM_MC_ARG(uint32_t, u32Src, 1);
199 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
201
202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
203 if (!pImpl->pfnLockedU32)
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
206 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
207 IEM_MC_FETCH_EFLAGS(EFlags);
208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
210 else
211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
212
213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
214 IEM_MC_COMMIT_EFLAGS(EFlags);
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 break;
218
219 case IEMMODE_64BIT:
220 IEM_MC_BEGIN(3, 2);
221 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
222 IEM_MC_ARG(uint64_t, u64Src, 1);
223 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
225
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
227 if (!pImpl->pfnLockedU64)
228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
229 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
230 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
231 IEM_MC_FETCH_EFLAGS(EFlags);
232 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
234 else
235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
236
237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
238 IEM_MC_COMMIT_EFLAGS(EFlags);
239 IEM_MC_ADVANCE_RIP();
240 IEM_MC_END();
241 break;
242 }
243 }
244 return VINF_SUCCESS;
245}
246
247
248/**
249 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
250 * the destination.
251 *
252 * @param pImpl Pointer to the instruction implementation (assembly).
253 */
254FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
255{
256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
257
258 /*
259 * If rm is denoting a register, no more instruction bytes.
260 */
261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
262 {
263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
264 IEM_MC_BEGIN(3, 0);
265 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
266 IEM_MC_ARG(uint8_t, u8Src, 1);
267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
268
269 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
270 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
271 IEM_MC_REF_EFLAGS(pEFlags);
272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
273
274 IEM_MC_ADVANCE_RIP();
275 IEM_MC_END();
276 }
277 else
278 {
279 /*
280 * We're accessing memory.
281 */
282 IEM_MC_BEGIN(3, 1);
283 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
284 IEM_MC_ARG(uint8_t, u8Src, 1);
285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
287
288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
290 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
291 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
292 IEM_MC_REF_EFLAGS(pEFlags);
293 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
294
295 IEM_MC_ADVANCE_RIP();
296 IEM_MC_END();
297 }
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
304 * register as the destination.
305 *
306 * @param pImpl Pointer to the instruction implementation (assembly).
307 */
308FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
309{
310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
311
312 /*
313 * If rm is denoting a register, no more instruction bytes.
314 */
315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
316 {
317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
318 switch (pVCpu->iem.s.enmEffOpSize)
319 {
320 case IEMMODE_16BIT:
321 IEM_MC_BEGIN(3, 0);
322 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
323 IEM_MC_ARG(uint16_t, u16Src, 1);
324 IEM_MC_ARG(uint32_t *, pEFlags, 2);
325
326 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
327 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
328 IEM_MC_REF_EFLAGS(pEFlags);
329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
330
331 IEM_MC_ADVANCE_RIP();
332 IEM_MC_END();
333 break;
334
335 case IEMMODE_32BIT:
336 IEM_MC_BEGIN(3, 0);
337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
338 IEM_MC_ARG(uint32_t, u32Src, 1);
339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
340
341 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
342 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
343 IEM_MC_REF_EFLAGS(pEFlags);
344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
345
346 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
347 IEM_MC_ADVANCE_RIP();
348 IEM_MC_END();
349 break;
350
351 case IEMMODE_64BIT:
352 IEM_MC_BEGIN(3, 0);
353 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
354 IEM_MC_ARG(uint64_t, u64Src, 1);
355 IEM_MC_ARG(uint32_t *, pEFlags, 2);
356
357 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
358 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
359 IEM_MC_REF_EFLAGS(pEFlags);
360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
361
362 IEM_MC_ADVANCE_RIP();
363 IEM_MC_END();
364 break;
365 }
366 }
367 else
368 {
369 /*
370 * We're accessing memory.
371 */
372 switch (pVCpu->iem.s.enmEffOpSize)
373 {
374 case IEMMODE_16BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
377 IEM_MC_ARG(uint16_t, u16Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
383 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
384 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
385 IEM_MC_REF_EFLAGS(pEFlags);
386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
387
388 IEM_MC_ADVANCE_RIP();
389 IEM_MC_END();
390 break;
391
392 case IEMMODE_32BIT:
393 IEM_MC_BEGIN(3, 1);
394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
395 IEM_MC_ARG(uint32_t, u32Src, 1);
396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
398
399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
401 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
402 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
403 IEM_MC_REF_EFLAGS(pEFlags);
404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
405
406 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 break;
410
411 case IEMMODE_64BIT:
412 IEM_MC_BEGIN(3, 1);
413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
414 IEM_MC_ARG(uint64_t, u64Src, 1);
415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
417
418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
420 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
421 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
422 IEM_MC_REF_EFLAGS(pEFlags);
423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
424
425 IEM_MC_ADVANCE_RIP();
426 IEM_MC_END();
427 break;
428 }
429 }
430 return VINF_SUCCESS;
431}
432
433
434/**
435 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
436 * a byte immediate.
437 *
438 * @param pImpl Pointer to the instruction implementation (assembly).
439 */
440FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
441{
442 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
444
445 IEM_MC_BEGIN(3, 0);
446 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
447 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
449
450 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
451 IEM_MC_REF_EFLAGS(pEFlags);
452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
453
454 IEM_MC_ADVANCE_RIP();
455 IEM_MC_END();
456 return VINF_SUCCESS;
457}
458
459
460/**
461 * Common worker for instructions like ADD, AND, OR, ++ with working on
462 * AX/EAX/RAX with a word/dword immediate.
463 *
464 * @param pImpl Pointer to the instruction implementation (assembly).
465 */
466FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
467{
468 switch (pVCpu->iem.s.enmEffOpSize)
469 {
470 case IEMMODE_16BIT:
471 {
472 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
474
475 IEM_MC_BEGIN(3, 0);
476 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
477 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
479
480 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
481 IEM_MC_REF_EFLAGS(pEFlags);
482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
483
484 IEM_MC_ADVANCE_RIP();
485 IEM_MC_END();
486 return VINF_SUCCESS;
487 }
488
489 case IEMMODE_32BIT:
490 {
491 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493
494 IEM_MC_BEGIN(3, 0);
495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
496 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
498
499 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
500 IEM_MC_REF_EFLAGS(pEFlags);
501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
502
503 if (pImpl != &g_iemAImpl_test)
504 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
505 IEM_MC_ADVANCE_RIP();
506 IEM_MC_END();
507 return VINF_SUCCESS;
508 }
509
510 case IEMMODE_64BIT:
511 {
512 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
514
515 IEM_MC_BEGIN(3, 0);
516 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
517 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
519
520 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
521 IEM_MC_REF_EFLAGS(pEFlags);
522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
523
524 IEM_MC_ADVANCE_RIP();
525 IEM_MC_END();
526 return VINF_SUCCESS;
527 }
528
529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
530 }
531}
532
533
534/** Opcodes 0xf1, 0xd6. */
535FNIEMOP_DEF(iemOp_Invalid)
536{
537 IEMOP_MNEMONIC(Invalid, "Invalid");
538 return IEMOP_RAISE_INVALID_OPCODE();
539}
540
541
542/** Invalid with RM byte . */
543FNIEMOPRM_DEF(iemOp_InvalidWithRM)
544{
545 RT_NOREF_PV(bRm);
546 IEMOP_MNEMONIC(InvalidWithRm, "InvalidWithRM");
547 return IEMOP_RAISE_INVALID_OPCODE();
548}
549
550
551/** Invalid opcode where intel requires Mod R/M sequence. */
552FNIEMOP_DEF(iemOp_InvalidNeedRM)
553{
554 IEMOP_MNEMONIC(InvalidNeedRM, "InvalidNeedRM");
555 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
556 {
557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
558#ifndef TST_IEM_CHECK_MC
559 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
560 {
561 RTGCPTR GCPtrEff;
562 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
563 if (rcStrict != VINF_SUCCESS)
564 return rcStrict;
565 }
566#endif
567 IEMOP_HLP_DONE_DECODING();
568 }
569 return IEMOP_RAISE_INVALID_OPCODE();
570}
571
572
573/** Invalid opcode where intel requires Mod R/M sequence and 8-byte
574 * immediate. */
575FNIEMOP_DEF(iemOp_InvalidNeedRMImm8)
576{
577 IEMOP_MNEMONIC(InvalidNeedRMImm8, "InvalidNeedRMImm8");
578 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
579 {
580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
581#ifndef TST_IEM_CHECK_MC
582 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
583 {
584 RTGCPTR GCPtrEff;
585 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
586 if (rcStrict != VINF_SUCCESS)
587 return rcStrict;
588 }
589#endif
590 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
591 IEMOP_HLP_DONE_DECODING();
592 }
593 return IEMOP_RAISE_INVALID_OPCODE();
594}
595
596
597/** Invalid opcode where intel requires a 3rd escape byte and a Mod R/M
598 * sequence. */
599FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRM)
600{
601 IEMOP_MNEMONIC(InvalidNeed3ByteEscRM, "InvalidNeed3ByteEscRM");
602 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
603 {
604 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
606#ifndef TST_IEM_CHECK_MC
607 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
608 {
609 RTGCPTR GCPtrEff;
610 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
611 if (rcStrict != VINF_SUCCESS)
612 return rcStrict;
613 }
614#endif
615 IEMOP_HLP_DONE_DECODING();
616 }
617 return IEMOP_RAISE_INVALID_OPCODE();
618}
619
620
621/** Invalid opcode where intel requires a 3rd escape byte, Mod R/M sequence, and
622 * a 8-byte immediate. */
623FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRMImm8)
624{
625 IEMOP_MNEMONIC(InvalidNeed3ByteEscRMImm8, "InvalidNeed3ByteEscRMImm8");
626 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
627 {
628 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
630#ifndef TST_IEM_CHECK_MC
631 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
632 {
633 RTGCPTR GCPtrEff;
634 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 1, &GCPtrEff);
635 if (rcStrict != VINF_SUCCESS)
636 return rcStrict;
637 }
638#endif
639 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
640 IEMOP_HLP_DONE_DECODING();
641 }
642 return IEMOP_RAISE_INVALID_OPCODE();
643}
644
645
646
647/** @name ..... opcodes.
648 *
649 * @{
650 */
651
652/** @} */
653
654
655/** @name Two byte opcodes (first byte 0x0f).
656 *
657 * @{
658 */
659
660/** Opcode 0x0f 0x00 /0. */
661FNIEMOPRM_DEF(iemOp_Grp6_sldt)
662{
663 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
664 IEMOP_HLP_MIN_286();
665 IEMOP_HLP_NO_REAL_OR_V86_MODE();
666
667 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
668 {
669 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
670 switch (pVCpu->iem.s.enmEffOpSize)
671 {
672 case IEMMODE_16BIT:
673 IEM_MC_BEGIN(0, 1);
674 IEM_MC_LOCAL(uint16_t, u16Ldtr);
675 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
676 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
677 IEM_MC_ADVANCE_RIP();
678 IEM_MC_END();
679 break;
680
681 case IEMMODE_32BIT:
682 IEM_MC_BEGIN(0, 1);
683 IEM_MC_LOCAL(uint32_t, u32Ldtr);
684 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
685 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
686 IEM_MC_ADVANCE_RIP();
687 IEM_MC_END();
688 break;
689
690 case IEMMODE_64BIT:
691 IEM_MC_BEGIN(0, 1);
692 IEM_MC_LOCAL(uint64_t, u64Ldtr);
693 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
694 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
695 IEM_MC_ADVANCE_RIP();
696 IEM_MC_END();
697 break;
698
699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
700 }
701 }
702 else
703 {
704 IEM_MC_BEGIN(0, 2);
705 IEM_MC_LOCAL(uint16_t, u16Ldtr);
706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
708 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
709 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
710 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
711 IEM_MC_ADVANCE_RIP();
712 IEM_MC_END();
713 }
714 return VINF_SUCCESS;
715}
716
717
718/** Opcode 0x0f 0x00 /1. */
719FNIEMOPRM_DEF(iemOp_Grp6_str)
720{
721 IEMOP_MNEMONIC(str, "str Rv/Mw");
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 switch (pVCpu->iem.s.enmEffOpSize)
729 {
730 case IEMMODE_16BIT:
731 IEM_MC_BEGIN(0, 1);
732 IEM_MC_LOCAL(uint16_t, u16Tr);
733 IEM_MC_FETCH_TR_U16(u16Tr);
734 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
735 IEM_MC_ADVANCE_RIP();
736 IEM_MC_END();
737 break;
738
739 case IEMMODE_32BIT:
740 IEM_MC_BEGIN(0, 1);
741 IEM_MC_LOCAL(uint32_t, u32Tr);
742 IEM_MC_FETCH_TR_U32(u32Tr);
743 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
744 IEM_MC_ADVANCE_RIP();
745 IEM_MC_END();
746 break;
747
748 case IEMMODE_64BIT:
749 IEM_MC_BEGIN(0, 1);
750 IEM_MC_LOCAL(uint64_t, u64Tr);
751 IEM_MC_FETCH_TR_U64(u64Tr);
752 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
753 IEM_MC_ADVANCE_RIP();
754 IEM_MC_END();
755 break;
756
757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
758 }
759 }
760 else
761 {
762 IEM_MC_BEGIN(0, 2);
763 IEM_MC_LOCAL(uint16_t, u16Tr);
764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
766 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
767 IEM_MC_FETCH_TR_U16(u16Tr);
768 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
769 IEM_MC_ADVANCE_RIP();
770 IEM_MC_END();
771 }
772 return VINF_SUCCESS;
773}
774
775
776/** Opcode 0x0f 0x00 /2. */
777FNIEMOPRM_DEF(iemOp_Grp6_lldt)
778{
779 IEMOP_MNEMONIC(lldt, "lldt Ew");
780 IEMOP_HLP_MIN_286();
781 IEMOP_HLP_NO_REAL_OR_V86_MODE();
782
783 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
784 {
785 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
786 IEM_MC_BEGIN(1, 0);
787 IEM_MC_ARG(uint16_t, u16Sel, 0);
788 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
789 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
790 IEM_MC_END();
791 }
792 else
793 {
794 IEM_MC_BEGIN(1, 1);
795 IEM_MC_ARG(uint16_t, u16Sel, 0);
796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
798 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
799 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
800 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
802 IEM_MC_END();
803 }
804 return VINF_SUCCESS;
805}
806
807
808/** Opcode 0x0f 0x00 /3. */
809FNIEMOPRM_DEF(iemOp_Grp6_ltr)
810{
811 IEMOP_MNEMONIC(ltr, "ltr Ew");
812 IEMOP_HLP_MIN_286();
813 IEMOP_HLP_NO_REAL_OR_V86_MODE();
814
815 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
816 {
817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
818 IEM_MC_BEGIN(1, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 0);
820 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
821 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
822 IEM_MC_END();
823 }
824 else
825 {
826 IEM_MC_BEGIN(1, 1);
827 IEM_MC_ARG(uint16_t, u16Sel, 0);
828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
831 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
832 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
833 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
834 IEM_MC_END();
835 }
836 return VINF_SUCCESS;
837}
838
839
840/** Opcode 0x0f 0x00 /3. */
841FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
842{
843 IEMOP_HLP_MIN_286();
844 IEMOP_HLP_NO_REAL_OR_V86_MODE();
845
846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
847 {
848 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
849 IEM_MC_BEGIN(2, 0);
850 IEM_MC_ARG(uint16_t, u16Sel, 0);
851 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
852 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
853 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
854 IEM_MC_END();
855 }
856 else
857 {
858 IEM_MC_BEGIN(2, 1);
859 IEM_MC_ARG(uint16_t, u16Sel, 0);
860 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
863 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
864 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
865 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
866 IEM_MC_END();
867 }
868 return VINF_SUCCESS;
869}
870
871
872/** Opcode 0x0f 0x00 /4. */
873FNIEMOPRM_DEF(iemOp_Grp6_verr)
874{
875 IEMOP_MNEMONIC(verr, "verr Ew");
876 IEMOP_HLP_MIN_286();
877 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
878}
879
880
881/** Opcode 0x0f 0x00 /5. */
882FNIEMOPRM_DEF(iemOp_Grp6_verw)
883{
884 IEMOP_MNEMONIC(verw, "verw Ew");
885 IEMOP_HLP_MIN_286();
886 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
887}
888
889
890/**
891 * Group 6 jump table.
892 */
893IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
894{
895 iemOp_Grp6_sldt,
896 iemOp_Grp6_str,
897 iemOp_Grp6_lldt,
898 iemOp_Grp6_ltr,
899 iemOp_Grp6_verr,
900 iemOp_Grp6_verw,
901 iemOp_InvalidWithRM,
902 iemOp_InvalidWithRM
903};
904
905/** Opcode 0x0f 0x00. */
906FNIEMOP_DEF(iemOp_Grp6)
907{
908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
909 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
910}
911
912
913/** Opcode 0x0f 0x01 /0. */
914FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
915{
916 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
917 IEMOP_HLP_MIN_286();
918 IEMOP_HLP_64BIT_OP_SIZE();
919 IEM_MC_BEGIN(2, 1);
920 IEM_MC_ARG(uint8_t, iEffSeg, 0);
921 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
924 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
925 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
926 IEM_MC_END();
927 return VINF_SUCCESS;
928}
929
930
931/** Opcode 0x0f 0x01 /0. */
932FNIEMOP_DEF(iemOp_Grp7_vmcall)
933{
934 IEMOP_BITCH_ABOUT_STUB();
935 return IEMOP_RAISE_INVALID_OPCODE();
936}
937
938
939/** Opcode 0x0f 0x01 /0. */
940FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
941{
942 IEMOP_BITCH_ABOUT_STUB();
943 return IEMOP_RAISE_INVALID_OPCODE();
944}
945
946
947/** Opcode 0x0f 0x01 /0. */
948FNIEMOP_DEF(iemOp_Grp7_vmresume)
949{
950 IEMOP_BITCH_ABOUT_STUB();
951 return IEMOP_RAISE_INVALID_OPCODE();
952}
953
954
955/** Opcode 0x0f 0x01 /0. */
956FNIEMOP_DEF(iemOp_Grp7_vmxoff)
957{
958 IEMOP_BITCH_ABOUT_STUB();
959 return IEMOP_RAISE_INVALID_OPCODE();
960}
961
962
963/** Opcode 0x0f 0x01 /1. */
964FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
965{
966 IEMOP_MNEMONIC(sidt, "sidt Ms");
967 IEMOP_HLP_MIN_286();
968 IEMOP_HLP_64BIT_OP_SIZE();
969 IEM_MC_BEGIN(2, 1);
970 IEM_MC_ARG(uint8_t, iEffSeg, 0);
971 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
974 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
975 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
976 IEM_MC_END();
977 return VINF_SUCCESS;
978}
979
980
981/** Opcode 0x0f 0x01 /1. */
982FNIEMOP_DEF(iemOp_Grp7_monitor)
983{
984 IEMOP_MNEMONIC(monitor, "monitor");
985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
986 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
987}
988
989
990/** Opcode 0x0f 0x01 /1. */
991FNIEMOP_DEF(iemOp_Grp7_mwait)
992{
993 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
995 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
996}
997
998
999/** Opcode 0x0f 0x01 /2. */
1000FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1001{
1002 IEMOP_MNEMONIC(lgdt, "lgdt");
1003 IEMOP_HLP_64BIT_OP_SIZE();
1004 IEM_MC_BEGIN(3, 1);
1005 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1006 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1007 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1010 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1011 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1012 IEM_MC_END();
1013 return VINF_SUCCESS;
1014}
1015
1016
1017/** Opcode 0x0f 0x01 0xd0. */
1018FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1019{
1020 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1021 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1022 {
1023 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1024 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1025 }
1026 return IEMOP_RAISE_INVALID_OPCODE();
1027}
1028
1029
1030/** Opcode 0x0f 0x01 0xd1. */
1031FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1032{
1033 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1034 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1035 {
1036 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1037 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1038 }
1039 return IEMOP_RAISE_INVALID_OPCODE();
1040}
1041
1042
1043/** Opcode 0x0f 0x01 /3. */
1044FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1045{
1046 IEMOP_MNEMONIC(lidt, "lidt");
1047 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1048 ? IEMMODE_64BIT
1049 : pVCpu->iem.s.enmEffOpSize;
1050 IEM_MC_BEGIN(3, 1);
1051 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1052 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1053 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1056 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1057 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1058 IEM_MC_END();
1059 return VINF_SUCCESS;
1060}
1061
1062
1063/** Opcode 0x0f 0x01 0xd8. */
1064FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1065
1066/** Opcode 0x0f 0x01 0xd9. */
1067FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
1068
1069/** Opcode 0x0f 0x01 0xda. */
1070FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1071
1072/** Opcode 0x0f 0x01 0xdb. */
1073FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1074
1075/** Opcode 0x0f 0x01 0xdc. */
1076FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1077
1078/** Opcode 0x0f 0x01 0xdd. */
1079FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1080
1081/** Opcode 0x0f 0x01 0xde. */
1082FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1083
1084/** Opcode 0x0f 0x01 0xdf. */
1085FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1086
1087/** Opcode 0x0f 0x01 /4. */
1088FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1089{
1090 IEMOP_MNEMONIC(smsw, "smsw");
1091 IEMOP_HLP_MIN_286();
1092 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1093 {
1094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1095 switch (pVCpu->iem.s.enmEffOpSize)
1096 {
1097 case IEMMODE_16BIT:
1098 IEM_MC_BEGIN(0, 1);
1099 IEM_MC_LOCAL(uint16_t, u16Tmp);
1100 IEM_MC_FETCH_CR0_U16(u16Tmp);
1101 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1102 { /* likely */ }
1103 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
1104 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1105 else
1106 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1107 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
1108 IEM_MC_ADVANCE_RIP();
1109 IEM_MC_END();
1110 return VINF_SUCCESS;
1111
1112 case IEMMODE_32BIT:
1113 IEM_MC_BEGIN(0, 1);
1114 IEM_MC_LOCAL(uint32_t, u32Tmp);
1115 IEM_MC_FETCH_CR0_U32(u32Tmp);
1116 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
1117 IEM_MC_ADVANCE_RIP();
1118 IEM_MC_END();
1119 return VINF_SUCCESS;
1120
1121 case IEMMODE_64BIT:
1122 IEM_MC_BEGIN(0, 1);
1123 IEM_MC_LOCAL(uint64_t, u64Tmp);
1124 IEM_MC_FETCH_CR0_U64(u64Tmp);
1125 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
1126 IEM_MC_ADVANCE_RIP();
1127 IEM_MC_END();
1128 return VINF_SUCCESS;
1129
1130 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1131 }
1132 }
1133 else
1134 {
1135 /* Ignore operand size here, memory refs are always 16-bit. */
1136 IEM_MC_BEGIN(0, 2);
1137 IEM_MC_LOCAL(uint16_t, u16Tmp);
1138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1141 IEM_MC_FETCH_CR0_U16(u16Tmp);
1142 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1143 { /* likely */ }
1144 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
1145 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1146 else
1147 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1148 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
1149 IEM_MC_ADVANCE_RIP();
1150 IEM_MC_END();
1151 return VINF_SUCCESS;
1152 }
1153}
1154
1155
1156/** Opcode 0x0f 0x01 /6. */
1157FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1158{
1159 /* The operand size is effectively ignored, all is 16-bit and only the
1160 lower 3-bits are used. */
1161 IEMOP_MNEMONIC(lmsw, "lmsw");
1162 IEMOP_HLP_MIN_286();
1163 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1164 {
1165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1166 IEM_MC_BEGIN(1, 0);
1167 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1168 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1169 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1170 IEM_MC_END();
1171 }
1172 else
1173 {
1174 IEM_MC_BEGIN(1, 1);
1175 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1179 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1180 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1181 IEM_MC_END();
1182 }
1183 return VINF_SUCCESS;
1184}
1185
1186
1187/** Opcode 0x0f 0x01 /7. */
1188FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1189{
1190 IEMOP_MNEMONIC(invlpg, "invlpg");
1191 IEMOP_HLP_MIN_486();
1192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1193 IEM_MC_BEGIN(1, 1);
1194 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1196 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1197 IEM_MC_END();
1198 return VINF_SUCCESS;
1199}
1200
1201
1202/** Opcode 0x0f 0x01 /7. */
1203FNIEMOP_DEF(iemOp_Grp7_swapgs)
1204{
1205 IEMOP_MNEMONIC(swapgs, "swapgs");
1206 IEMOP_HLP_ONLY_64BIT();
1207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1208 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1209}
1210
1211
1212/** Opcode 0x0f 0x01 /7. */
1213FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1214{
1215 NOREF(pVCpu);
1216 IEMOP_BITCH_ABOUT_STUB();
1217 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1218}
1219
1220
1221/** Opcode 0x0f 0x01. */
1222FNIEMOP_DEF(iemOp_Grp7)
1223{
1224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1225 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1226 {
1227 case 0:
1228 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1229 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1230 switch (bRm & X86_MODRM_RM_MASK)
1231 {
1232 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1233 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1234 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1235 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1236 }
1237 return IEMOP_RAISE_INVALID_OPCODE();
1238
1239 case 1:
1240 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1241 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1242 switch (bRm & X86_MODRM_RM_MASK)
1243 {
1244 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1245 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1246 }
1247 return IEMOP_RAISE_INVALID_OPCODE();
1248
1249 case 2:
1250 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1251 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1252 switch (bRm & X86_MODRM_RM_MASK)
1253 {
1254 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1255 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1256 }
1257 return IEMOP_RAISE_INVALID_OPCODE();
1258
1259 case 3:
1260 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1261 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1262 switch (bRm & X86_MODRM_RM_MASK)
1263 {
1264 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1265 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1266 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1267 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1268 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1269 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1270 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1271 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1273 }
1274
1275 case 4:
1276 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1277
1278 case 5:
1279 return IEMOP_RAISE_INVALID_OPCODE();
1280
1281 case 6:
1282 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1283
1284 case 7:
1285 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1286 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1287 switch (bRm & X86_MODRM_RM_MASK)
1288 {
1289 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1290 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1291 }
1292 return IEMOP_RAISE_INVALID_OPCODE();
1293
1294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1295 }
1296}
1297
1298/** Opcode 0x0f 0x00 /3. */
1299FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1300{
1301 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1303
1304 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1305 {
1306 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1307 switch (pVCpu->iem.s.enmEffOpSize)
1308 {
1309 case IEMMODE_16BIT:
1310 {
1311 IEM_MC_BEGIN(3, 0);
1312 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1313 IEM_MC_ARG(uint16_t, u16Sel, 1);
1314 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1315
1316 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1317 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1318 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1319
1320 IEM_MC_END();
1321 return VINF_SUCCESS;
1322 }
1323
1324 case IEMMODE_32BIT:
1325 case IEMMODE_64BIT:
1326 {
1327 IEM_MC_BEGIN(3, 0);
1328 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1329 IEM_MC_ARG(uint16_t, u16Sel, 1);
1330 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1331
1332 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1333 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1334 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1335
1336 IEM_MC_END();
1337 return VINF_SUCCESS;
1338 }
1339
1340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1341 }
1342 }
1343 else
1344 {
1345 switch (pVCpu->iem.s.enmEffOpSize)
1346 {
1347 case IEMMODE_16BIT:
1348 {
1349 IEM_MC_BEGIN(3, 1);
1350 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1351 IEM_MC_ARG(uint16_t, u16Sel, 1);
1352 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1354
1355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1356 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1357
1358 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1359 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1360 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1361
1362 IEM_MC_END();
1363 return VINF_SUCCESS;
1364 }
1365
1366 case IEMMODE_32BIT:
1367 case IEMMODE_64BIT:
1368 {
1369 IEM_MC_BEGIN(3, 1);
1370 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1371 IEM_MC_ARG(uint16_t, u16Sel, 1);
1372 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1374
1375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1376 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1377/** @todo testcase: make sure it's a 16-bit read. */
1378
1379 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1380 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1381 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1382
1383 IEM_MC_END();
1384 return VINF_SUCCESS;
1385 }
1386
1387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1388 }
1389 }
1390}
1391
1392
1393
1394/** Opcode 0x0f 0x02. */
1395FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1396{
1397 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1398 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1399}
1400
1401
1402/** Opcode 0x0f 0x03. */
1403FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1404{
1405 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1406 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1407}
1408
1409
1410/** Opcode 0x0f 0x05. */
1411FNIEMOP_DEF(iemOp_syscall)
1412{
1413 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1416}
1417
1418
1419/** Opcode 0x0f 0x06. */
1420FNIEMOP_DEF(iemOp_clts)
1421{
1422 IEMOP_MNEMONIC(clts, "clts");
1423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1424 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1425}
1426
1427
1428/** Opcode 0x0f 0x07. */
1429FNIEMOP_DEF(iemOp_sysret)
1430{
1431 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1433 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1434}
1435
1436
1437/** Opcode 0x0f 0x08. */
1438FNIEMOP_STUB(iemOp_invd);
1439// IEMOP_HLP_MIN_486();
1440
1441
1442/** Opcode 0x0f 0x09. */
1443FNIEMOP_DEF(iemOp_wbinvd)
1444{
1445 IEMOP_MNEMONIC(wbinvd, "wbinvd");
1446 IEMOP_HLP_MIN_486();
1447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1448 IEM_MC_BEGIN(0, 0);
1449 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1450 IEM_MC_ADVANCE_RIP();
1451 IEM_MC_END();
1452 return VINF_SUCCESS; /* ignore for now */
1453}
1454
1455
1456/** Opcode 0x0f 0x0b. */
1457FNIEMOP_DEF(iemOp_ud2)
1458{
1459 IEMOP_MNEMONIC(ud2, "ud2");
1460 return IEMOP_RAISE_INVALID_OPCODE();
1461}
1462
1463/** Opcode 0x0f 0x0d. */
1464FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1465{
1466 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1467 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1468 {
1469 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1470 return IEMOP_RAISE_INVALID_OPCODE();
1471 }
1472
1473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1474 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1475 {
1476 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1477 return IEMOP_RAISE_INVALID_OPCODE();
1478 }
1479
1480 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1481 {
1482 case 2: /* Aliased to /0 for the time being. */
1483 case 4: /* Aliased to /0 for the time being. */
1484 case 5: /* Aliased to /0 for the time being. */
1485 case 6: /* Aliased to /0 for the time being. */
1486 case 7: /* Aliased to /0 for the time being. */
1487 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1488 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1489 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1491 }
1492
1493 IEM_MC_BEGIN(0, 1);
1494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1497 /* Currently a NOP. */
1498 NOREF(GCPtrEffSrc);
1499 IEM_MC_ADVANCE_RIP();
1500 IEM_MC_END();
1501 return VINF_SUCCESS;
1502}
1503
1504
1505/** Opcode 0x0f 0x0e. */
1506FNIEMOP_STUB(iemOp_femms);
1507
1508
1509/** Opcode 0x0f 0x0f 0x0c. */
1510FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1511
1512/** Opcode 0x0f 0x0f 0x0d. */
1513FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1514
1515/** Opcode 0x0f 0x0f 0x1c. */
1516FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1517
1518/** Opcode 0x0f 0x0f 0x1d. */
1519FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1520
1521/** Opcode 0x0f 0x0f 0x8a. */
1522FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1523
1524/** Opcode 0x0f 0x0f 0x8e. */
1525FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1526
1527/** Opcode 0x0f 0x0f 0x90. */
1528FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1529
1530/** Opcode 0x0f 0x0f 0x94. */
1531FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1532
1533/** Opcode 0x0f 0x0f 0x96. */
1534FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1535
1536/** Opcode 0x0f 0x0f 0x97. */
1537FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1538
1539/** Opcode 0x0f 0x0f 0x9a. */
1540FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1541
1542/** Opcode 0x0f 0x0f 0x9e. */
1543FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1544
1545/** Opcode 0x0f 0x0f 0xa0. */
1546FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1547
1548/** Opcode 0x0f 0x0f 0xa4. */
1549FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1550
1551/** Opcode 0x0f 0x0f 0xa6. */
1552FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1553
1554/** Opcode 0x0f 0x0f 0xa7. */
1555FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1556
1557/** Opcode 0x0f 0x0f 0xaa. */
1558FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1559
1560/** Opcode 0x0f 0x0f 0xae. */
1561FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1562
1563/** Opcode 0x0f 0x0f 0xb0. */
1564FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1565
1566/** Opcode 0x0f 0x0f 0xb4. */
1567FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1568
1569/** Opcode 0x0f 0x0f 0xb6. */
1570FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1571
1572/** Opcode 0x0f 0x0f 0xb7. */
1573FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1574
1575/** Opcode 0x0f 0x0f 0xbb. */
1576FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1577
1578/** Opcode 0x0f 0x0f 0xbf. */
1579FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1580
1581
1582/** Opcode 0x0f 0x0f. */
1583FNIEMOP_DEF(iemOp_3Dnow)
1584{
1585 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1586 {
1587 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1588 return IEMOP_RAISE_INVALID_OPCODE();
1589 }
1590
1591 /* This is pretty sparse, use switch instead of table. */
1592 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1593 switch (b)
1594 {
1595 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1596 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1597 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1598 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1599 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1600 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1601 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1602 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1603 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1604 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1605 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1606 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1607 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1608 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1609 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1610 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1611 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1612 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1613 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1614 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1615 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1616 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1617 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1618 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1619 default:
1620 return IEMOP_RAISE_INVALID_OPCODE();
1621 }
1622}
1623
1624
1625/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1626FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1627/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1628FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1629/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1630FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1631/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1632FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1633
1634
1635/** Opcode 0x0f 0x11 - vmovups Wps, Vps */
1636FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1637{
1638 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1640 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1641 {
1642 /*
1643 * Register, register.
1644 */
1645 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1646 IEM_MC_BEGIN(0, 0);
1647 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1648 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1649 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1650 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1651 IEM_MC_ADVANCE_RIP();
1652 IEM_MC_END();
1653 }
1654 else
1655 {
1656 /*
1657 * Memory, register.
1658 */
1659 IEM_MC_BEGIN(0, 2);
1660 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1662
1663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1664 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1665 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1666 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1667
1668 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1669 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1670
1671 IEM_MC_ADVANCE_RIP();
1672 IEM_MC_END();
1673 }
1674 return VINF_SUCCESS;
1675}
1676
1677
1678/** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */
1679FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
1680
1681/** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */
1682FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
1683
1684/** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */
1685FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1686{
1687 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1689 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1690 {
1691 /*
1692 * Register, register.
1693 */
1694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1695 IEM_MC_BEGIN(0, 1);
1696 IEM_MC_LOCAL(uint64_t, uSrc);
1697
1698 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1699 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1700 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1701 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1702
1703 IEM_MC_ADVANCE_RIP();
1704 IEM_MC_END();
1705 }
1706 else
1707 {
1708 /*
1709 * Memory, register.
1710 */
1711 IEM_MC_BEGIN(0, 2);
1712 IEM_MC_LOCAL(uint64_t, uSrc);
1713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1714
1715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1717 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1718 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1719
1720 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1721 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1722
1723 IEM_MC_ADVANCE_RIP();
1724 IEM_MC_END();
1725 }
1726 return VINF_SUCCESS;
1727}
1728
1729
1730/** Opcode 0x0f 0x12. */
1731FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1732
1733/** Opcode 0x66 0x0f 0x12. */
1734FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1735
1736/** Opcode 0xf3 0x0f 0x12. */
1737FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1738
1739/** Opcode 0xf2 0x0f 0x12. */
1740FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1741
1742/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1743FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1744
1745/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1746FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1747{
1748 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1750 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1751 {
1752#if 0
1753 /*
1754 * Register, register.
1755 */
1756 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1757 IEM_MC_BEGIN(0, 1);
1758 IEM_MC_LOCAL(uint64_t, uSrc);
1759 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1760 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1761 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1762 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1763 IEM_MC_ADVANCE_RIP();
1764 IEM_MC_END();
1765#else
1766 return IEMOP_RAISE_INVALID_OPCODE();
1767#endif
1768 }
1769 else
1770 {
1771 /*
1772 * Memory, register.
1773 */
1774 IEM_MC_BEGIN(0, 2);
1775 IEM_MC_LOCAL(uint64_t, uSrc);
1776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1777
1778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1779 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1780 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1781 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1782
1783 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1784 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1785
1786 IEM_MC_ADVANCE_RIP();
1787 IEM_MC_END();
1788 }
1789 return VINF_SUCCESS;
1790}
1791
1792/* Opcode 0xf3 0x0f 0x13 - invalid */
1793/* Opcode 0xf2 0x0f 0x13 - invalid */
1794
1795/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1796FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1797/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1798FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1799/* Opcode 0xf3 0x0f 0x14 - invalid */
1800/* Opcode 0xf2 0x0f 0x14 - invalid */
1801/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1802FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1803/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1804FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1805/* Opcode 0xf3 0x0f 0x15 - invalid */
1806/* Opcode 0xf2 0x0f 0x15 - invalid */
1807/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1808FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1809/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1810FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1811/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1812FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1813/* Opcode 0xf2 0x0f 0x16 - invalid */
1814/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1815FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1816/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1817FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1818/* Opcode 0xf3 0x0f 0x17 - invalid */
1819/* Opcode 0xf2 0x0f 0x17 - invalid */
1820
1821
1822/** Opcode 0x0f 0x18. */
1823FNIEMOP_DEF(iemOp_prefetch_Grp16)
1824{
1825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1826 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1827 {
1828 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1829 {
1830 case 4: /* Aliased to /0 for the time being according to AMD. */
1831 case 5: /* Aliased to /0 for the time being according to AMD. */
1832 case 6: /* Aliased to /0 for the time being according to AMD. */
1833 case 7: /* Aliased to /0 for the time being according to AMD. */
1834 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1835 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1836 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1837 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1839 }
1840
1841 IEM_MC_BEGIN(0, 1);
1842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1845 /* Currently a NOP. */
1846 NOREF(GCPtrEffSrc);
1847 IEM_MC_ADVANCE_RIP();
1848 IEM_MC_END();
1849 return VINF_SUCCESS;
1850 }
1851
1852 return IEMOP_RAISE_INVALID_OPCODE();
1853}
1854
1855
1856/** Opcode 0x0f 0x19..0x1f. */
1857FNIEMOP_DEF(iemOp_nop_Ev)
1858{
1859 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1861 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1862 {
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_BEGIN(0, 0);
1865 IEM_MC_ADVANCE_RIP();
1866 IEM_MC_END();
1867 }
1868 else
1869 {
1870 IEM_MC_BEGIN(0, 1);
1871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1874 /* Currently a NOP. */
1875 NOREF(GCPtrEffSrc);
1876 IEM_MC_ADVANCE_RIP();
1877 IEM_MC_END();
1878 }
1879 return VINF_SUCCESS;
1880}
1881
1882
1883/** Opcode 0x0f 0x20. */
1884FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1885{
1886 /* mod is ignored, as is operand size overrides. */
1887 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1888 IEMOP_HLP_MIN_386();
1889 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1890 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1891 else
1892 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1893
1894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1895 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1896 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1897 {
1898 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1899 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1900 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1901 iCrReg |= 8;
1902 }
1903 switch (iCrReg)
1904 {
1905 case 0: case 2: case 3: case 4: case 8:
1906 break;
1907 default:
1908 return IEMOP_RAISE_INVALID_OPCODE();
1909 }
1910 IEMOP_HLP_DONE_DECODING();
1911
1912 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1913}
1914
1915
1916/** Opcode 0x0f 0x21. */
1917FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1918{
1919 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1920 IEMOP_HLP_MIN_386();
1921 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1923 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1924 return IEMOP_RAISE_INVALID_OPCODE();
1925 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1926 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1927 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1928}
1929
1930
1931/** Opcode 0x0f 0x22. */
1932FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1933{
1934 /* mod is ignored, as is operand size overrides. */
1935 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1936 IEMOP_HLP_MIN_386();
1937 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1938 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1939 else
1940 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1941
1942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1943 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1944 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1945 {
1946 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1947 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1948 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1949 iCrReg |= 8;
1950 }
1951 switch (iCrReg)
1952 {
1953 case 0: case 2: case 3: case 4: case 8:
1954 break;
1955 default:
1956 return IEMOP_RAISE_INVALID_OPCODE();
1957 }
1958 IEMOP_HLP_DONE_DECODING();
1959
1960 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1961}
1962
1963
1964/** Opcode 0x0f 0x23. */
1965FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1966{
1967 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1968 IEMOP_HLP_MIN_386();
1969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1971 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1972 return IEMOP_RAISE_INVALID_OPCODE();
1973 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1974 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1975 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1976}
1977
1978
1979/** Opcode 0x0f 0x24. */
1980FNIEMOP_DEF(iemOp_mov_Rd_Td)
1981{
1982 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1983 /** @todo works on 386 and 486. */
1984 /* The RM byte is not considered, see testcase. */
1985 return IEMOP_RAISE_INVALID_OPCODE();
1986}
1987
1988
1989/** Opcode 0x0f 0x26. */
1990FNIEMOP_DEF(iemOp_mov_Td_Rd)
1991{
1992 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1993 /** @todo works on 386 and 486. */
1994 /* The RM byte is not considered, see testcase. */
1995 return IEMOP_RAISE_INVALID_OPCODE();
1996}
1997
1998
1999/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
2000FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
2001{
2002 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
2003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2004 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2005 {
2006 /*
2007 * Register, register.
2008 */
2009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2010 IEM_MC_BEGIN(0, 0);
2011 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2012 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2013 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2014 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2015 IEM_MC_ADVANCE_RIP();
2016 IEM_MC_END();
2017 }
2018 else
2019 {
2020 /*
2021 * Register, memory.
2022 */
2023 IEM_MC_BEGIN(0, 2);
2024 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2026
2027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2029 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2030 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2031
2032 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2033 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2034
2035 IEM_MC_ADVANCE_RIP();
2036 IEM_MC_END();
2037 }
2038 return VINF_SUCCESS;
2039}
2040
2041/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
2042FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
2043{
2044 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
2045 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2046 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2047 {
2048 /*
2049 * Register, register.
2050 */
2051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2052 IEM_MC_BEGIN(0, 0);
2053 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2054 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2055 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2056 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2057 IEM_MC_ADVANCE_RIP();
2058 IEM_MC_END();
2059 }
2060 else
2061 {
2062 /*
2063 * Register, memory.
2064 */
2065 IEM_MC_BEGIN(0, 2);
2066 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2068
2069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2071 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2072 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2073
2074 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2075 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2076
2077 IEM_MC_ADVANCE_RIP();
2078 IEM_MC_END();
2079 }
2080 return VINF_SUCCESS;
2081}
2082
2083/* Opcode 0xf3 0x0f 0x28 - invalid */
2084/* Opcode 0xf2 0x0f 0x28 - invalid */
2085
2086/** Opcode 0x0f 0x29. */
2087FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
2088{
2089 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2090 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
2091 else
2092 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
2093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2094 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2095 {
2096 /*
2097 * Register, register.
2098 */
2099 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
2100 IEM_MC_BEGIN(0, 0);
2101 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2102 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2103 else
2104 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2105 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2106 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2107 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2108 IEM_MC_ADVANCE_RIP();
2109 IEM_MC_END();
2110 }
2111 else
2112 {
2113 /*
2114 * Memory, register.
2115 */
2116 IEM_MC_BEGIN(0, 2);
2117 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2119
2120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2121 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2122 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2123 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2124 else
2125 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2126 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2127
2128 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2129 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2130
2131 IEM_MC_ADVANCE_RIP();
2132 IEM_MC_END();
2133 }
2134 return VINF_SUCCESS;
2135}
2136
2137
2138/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2139FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2140/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2141FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2142/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2143FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
2144/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2145FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
2146
2147
2148/** Opcode 0x0f 0x2b. */
2149FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
2150{
2151 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2152 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2153 else
2154 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2156 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2157 {
2158 /*
2159 * memory, register.
2160 */
2161 IEM_MC_BEGIN(0, 2);
2162 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2164
2165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2166 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2167 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2168 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2169 else
2170 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2171 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2172
2173 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2174 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2175
2176 IEM_MC_ADVANCE_RIP();
2177 IEM_MC_END();
2178 }
2179 /* The register, register encoding is invalid. */
2180 else
2181 return IEMOP_RAISE_INVALID_OPCODE();
2182 return VINF_SUCCESS;
2183}
2184
2185
2186/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2187FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2188/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2189FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2190/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
2191FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2192/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
2193FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2194
2195/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2196FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2197/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2198FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2199/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
2200FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2201/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
2202FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2203
2204/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
2205FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
2206/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
2207FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
2208/* Opcode 0xf3 0x0f 0x2e - invalid */
2209/* Opcode 0xf2 0x0f 0x2e - invalid */
2210
2211/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
2212FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
2213/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
2214FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
2215/* Opcode 0xf3 0x0f 0x2f - invalid */
2216/* Opcode 0xf2 0x0f 0x2f - invalid */
2217
2218/** Opcode 0x0f 0x30. */
2219FNIEMOP_DEF(iemOp_wrmsr)
2220{
2221 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2223 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2224}
2225
2226
2227/** Opcode 0x0f 0x31. */
2228FNIEMOP_DEF(iemOp_rdtsc)
2229{
2230 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2232 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2233}
2234
2235
2236/** Opcode 0x0f 0x33. */
2237FNIEMOP_DEF(iemOp_rdmsr)
2238{
2239 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2241 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2242}
2243
2244
2245/** Opcode 0x0f 0x34. */
2246FNIEMOP_STUB(iemOp_rdpmc);
2247/** Opcode 0x0f 0x34. */
2248FNIEMOP_STUB(iemOp_sysenter);
2249/** Opcode 0x0f 0x35. */
2250FNIEMOP_STUB(iemOp_sysexit);
2251/** Opcode 0x0f 0x37. */
2252FNIEMOP_STUB(iemOp_getsec);
2253/** Opcode 0x0f 0x38. */
2254FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2255/** Opcode 0x0f 0x3a. */
2256FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2257
2258
2259/**
2260 * Implements a conditional move.
2261 *
2262 * Wish there was an obvious way to do this where we could share and reduce
2263 * code bloat.
2264 *
2265 * @param a_Cnd The conditional "microcode" operation.
2266 */
2267#define CMOV_X(a_Cnd) \
2268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2269 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2270 { \
2271 switch (pVCpu->iem.s.enmEffOpSize) \
2272 { \
2273 case IEMMODE_16BIT: \
2274 IEM_MC_BEGIN(0, 1); \
2275 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2276 a_Cnd { \
2277 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2278 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2279 } IEM_MC_ENDIF(); \
2280 IEM_MC_ADVANCE_RIP(); \
2281 IEM_MC_END(); \
2282 return VINF_SUCCESS; \
2283 \
2284 case IEMMODE_32BIT: \
2285 IEM_MC_BEGIN(0, 1); \
2286 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2287 a_Cnd { \
2288 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2289 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2290 } IEM_MC_ELSE() { \
2291 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2292 } IEM_MC_ENDIF(); \
2293 IEM_MC_ADVANCE_RIP(); \
2294 IEM_MC_END(); \
2295 return VINF_SUCCESS; \
2296 \
2297 case IEMMODE_64BIT: \
2298 IEM_MC_BEGIN(0, 1); \
2299 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2300 a_Cnd { \
2301 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2302 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2303 } IEM_MC_ENDIF(); \
2304 IEM_MC_ADVANCE_RIP(); \
2305 IEM_MC_END(); \
2306 return VINF_SUCCESS; \
2307 \
2308 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2309 } \
2310 } \
2311 else \
2312 { \
2313 switch (pVCpu->iem.s.enmEffOpSize) \
2314 { \
2315 case IEMMODE_16BIT: \
2316 IEM_MC_BEGIN(0, 2); \
2317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2318 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2320 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2321 a_Cnd { \
2322 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2323 } IEM_MC_ENDIF(); \
2324 IEM_MC_ADVANCE_RIP(); \
2325 IEM_MC_END(); \
2326 return VINF_SUCCESS; \
2327 \
2328 case IEMMODE_32BIT: \
2329 IEM_MC_BEGIN(0, 2); \
2330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2331 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2333 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2334 a_Cnd { \
2335 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2336 } IEM_MC_ELSE() { \
2337 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2338 } IEM_MC_ENDIF(); \
2339 IEM_MC_ADVANCE_RIP(); \
2340 IEM_MC_END(); \
2341 return VINF_SUCCESS; \
2342 \
2343 case IEMMODE_64BIT: \
2344 IEM_MC_BEGIN(0, 2); \
2345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2346 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2348 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2349 a_Cnd { \
2350 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2351 } IEM_MC_ENDIF(); \
2352 IEM_MC_ADVANCE_RIP(); \
2353 IEM_MC_END(); \
2354 return VINF_SUCCESS; \
2355 \
2356 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2357 } \
2358 } do {} while (0)
2359
2360
2361
2362/** Opcode 0x0f 0x40. */
2363FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2364{
2365 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2366 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2367}
2368
2369
2370/** Opcode 0x0f 0x41. */
2371FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2372{
2373 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2374 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2375}
2376
2377
2378/** Opcode 0x0f 0x42. */
2379FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2380{
2381 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2382 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2383}
2384
2385
2386/** Opcode 0x0f 0x43. */
2387FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2388{
2389 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2390 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2391}
2392
2393
2394/** Opcode 0x0f 0x44. */
2395FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2396{
2397 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2398 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2399}
2400
2401
2402/** Opcode 0x0f 0x45. */
2403FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2404{
2405 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2406 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2407}
2408
2409
2410/** Opcode 0x0f 0x46. */
2411FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2412{
2413 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2414 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2415}
2416
2417
2418/** Opcode 0x0f 0x47. */
2419FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2420{
2421 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2422 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2423}
2424
2425
2426/** Opcode 0x0f 0x48. */
2427FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2428{
2429 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2430 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2431}
2432
2433
2434/** Opcode 0x0f 0x49. */
2435FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2436{
2437 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2438 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2439}
2440
2441
2442/** Opcode 0x0f 0x4a. */
2443FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2444{
2445 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2446 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2447}
2448
2449
2450/** Opcode 0x0f 0x4b. */
2451FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2452{
2453 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2454 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2455}
2456
2457
2458/** Opcode 0x0f 0x4c. */
2459FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2460{
2461 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2462 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2463}
2464
2465
2466/** Opcode 0x0f 0x4d. */
2467FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2468{
2469 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2470 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2471}
2472
2473
2474/** Opcode 0x0f 0x4e. */
2475FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2476{
2477 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2478 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2479}
2480
2481
2482/** Opcode 0x0f 0x4f. */
2483FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2484{
2485 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2486 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2487}
2488
2489#undef CMOV_X
2490
2491/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
2492FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
2493/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
2494FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
2495/* Opcode 0xf3 0x0f 0x50 - invalid */
2496/* Opcode 0xf2 0x0f 0x50 - invalid */
2497
2498/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
2499FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2500/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
2501FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2502/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
2503FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2504/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2505FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2506
2507/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
2508FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2509/* Opcode 0x66 0x0f 0x52 - invalid */
2510/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
2511FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2512/* Opcode 0xf2 0x0f 0x52 - invalid */
2513
2514/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
2515FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2516/* Opcode 0x66 0x0f 0x53 - invalid */
2517/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
2518FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2519/* Opcode 0xf2 0x0f 0x53 - invalid */
2520
2521/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
2522FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
2523/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
2524FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
2525/* Opcode 0xf3 0x0f 0x54 - invalid */
2526/* Opcode 0xf2 0x0f 0x54 - invalid */
2527
2528/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
2529FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
2530/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
2531FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
2532/* Opcode 0xf3 0x0f 0x55 - invalid */
2533/* Opcode 0xf2 0x0f 0x55 - invalid */
2534
2535/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
2536FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
2537/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
2538FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
2539/* Opcode 0xf3 0x0f 0x56 - invalid */
2540/* Opcode 0xf2 0x0f 0x56 - invalid */
2541
2542/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
2543FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
2544/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
2545FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
2546/* Opcode 0xf3 0x0f 0x57 - invalid */
2547/* Opcode 0xf2 0x0f 0x57 - invalid */
2548
2549/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2550FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2551/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2552FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2553/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2554FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2555/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2556FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2557
2558/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2559FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2560/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2561FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2562/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2563FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2564/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2565FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2566
2567/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2568FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2569/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2570FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2571/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2572FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2573/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2574FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2575
2576/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2577FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2578/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2579FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2580/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2581FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2582/* Opcode 0xf2 0x0f 0x5b - invalid */
2583
2584/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2585FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2586/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2587FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2588/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2589FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2590/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2591FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2592
2593/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2594FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2595/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2596FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2597/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2598FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2599/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2600FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2601
2602/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2603FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2604/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2605FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2606/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2607FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2608/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2609FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2610
2611/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2612FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2613/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2614FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2615/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2616FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2617/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2618FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2619
2620/**
2621 * Common worker for MMX instructions on the forms:
2622 * pxxxx mm1, mm2/mem32
2623 *
2624 * The 2nd operand is the first half of a register, which in the memory case
2625 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2626 * memory accessed for MMX.
2627 *
2628 * Exceptions type 4.
2629 */
2630FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2631{
2632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2633 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2634 {
2635 /*
2636 * Register, register.
2637 */
2638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2639 IEM_MC_BEGIN(2, 0);
2640 IEM_MC_ARG(uint128_t *, pDst, 0);
2641 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2642 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2643 IEM_MC_PREPARE_SSE_USAGE();
2644 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2645 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2646 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2647 IEM_MC_ADVANCE_RIP();
2648 IEM_MC_END();
2649 }
2650 else
2651 {
2652 /*
2653 * Register, memory.
2654 */
2655 IEM_MC_BEGIN(2, 2);
2656 IEM_MC_ARG(uint128_t *, pDst, 0);
2657 IEM_MC_LOCAL(uint64_t, uSrc);
2658 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2660
2661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2663 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2664 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2665
2666 IEM_MC_PREPARE_SSE_USAGE();
2667 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2668 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2669
2670 IEM_MC_ADVANCE_RIP();
2671 IEM_MC_END();
2672 }
2673 return VINF_SUCCESS;
2674}
2675
2676
2677/**
2678 * Common worker for SSE2 instructions on the forms:
2679 * pxxxx xmm1, xmm2/mem128
2680 *
2681 * The 2nd operand is the first half of a register, which in the memory case
2682 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2683 * memory accessed for MMX.
2684 *
2685 * Exceptions type 4.
2686 */
2687FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2688{
2689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2690 if (!pImpl->pfnU64)
2691 return IEMOP_RAISE_INVALID_OPCODE();
2692 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2693 {
2694 /*
2695 * Register, register.
2696 */
2697 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2698 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2700 IEM_MC_BEGIN(2, 0);
2701 IEM_MC_ARG(uint64_t *, pDst, 0);
2702 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2703 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2704 IEM_MC_PREPARE_FPU_USAGE();
2705 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2706 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2707 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2708 IEM_MC_ADVANCE_RIP();
2709 IEM_MC_END();
2710 }
2711 else
2712 {
2713 /*
2714 * Register, memory.
2715 */
2716 IEM_MC_BEGIN(2, 2);
2717 IEM_MC_ARG(uint64_t *, pDst, 0);
2718 IEM_MC_LOCAL(uint32_t, uSrc);
2719 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2721
2722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2724 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2725 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2726
2727 IEM_MC_PREPARE_FPU_USAGE();
2728 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2729 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2730
2731 IEM_MC_ADVANCE_RIP();
2732 IEM_MC_END();
2733 }
2734 return VINF_SUCCESS;
2735}
2736
2737
2738/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2739FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2740{
2741 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2742 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2743}
2744
2745/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2746FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2747{
2748 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2749 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2750}
2751
2752/* Opcode 0xf3 0x0f 0x60 - invalid */
2753
2754
2755/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2756FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2757{
2758 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2759 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2760}
2761
2762/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2763FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2764{
2765 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2766 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2767}
2768
2769/* Opcode 0xf3 0x0f 0x61 - invalid */
2770
2771
2772/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2773FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2774{
2775 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2776 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2777}
2778
2779/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2780FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2781{
2782 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2783 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2784}
2785
2786/* Opcode 0xf3 0x0f 0x62 - invalid */
2787
2788
2789
2790/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2791FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2792/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2793FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2794/* Opcode 0xf3 0x0f 0x63 - invalid */
2795
2796/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2797FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2798/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2799FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2800/* Opcode 0xf3 0x0f 0x64 - invalid */
2801
2802/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2803FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2804/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2805FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2806/* Opcode 0xf3 0x0f 0x65 - invalid */
2807
2808/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2809FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2810/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2811FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2812/* Opcode 0xf3 0x0f 0x66 - invalid */
2813
2814/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2815FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2816/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2817FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2818/* Opcode 0xf3 0x0f 0x67 - invalid */
2819
2820
2821/**
2822 * Common worker for MMX instructions on the form:
2823 * pxxxx mm1, mm2/mem64
2824 *
2825 * The 2nd operand is the second half of a register, which in the memory case
2826 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2827 * where it may read the full 128 bits or only the upper 64 bits.
2828 *
2829 * Exceptions type 4.
2830 */
2831FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2832{
2833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2834 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2835 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2836 {
2837 /*
2838 * Register, register.
2839 */
2840 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2841 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2843 IEM_MC_BEGIN(2, 0);
2844 IEM_MC_ARG(uint64_t *, pDst, 0);
2845 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2846 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2847 IEM_MC_PREPARE_FPU_USAGE();
2848 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2849 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2850 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2851 IEM_MC_ADVANCE_RIP();
2852 IEM_MC_END();
2853 }
2854 else
2855 {
2856 /*
2857 * Register, memory.
2858 */
2859 IEM_MC_BEGIN(2, 2);
2860 IEM_MC_ARG(uint64_t *, pDst, 0);
2861 IEM_MC_LOCAL(uint64_t, uSrc);
2862 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2864
2865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2867 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2868 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2869
2870 IEM_MC_PREPARE_FPU_USAGE();
2871 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2872 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2873
2874 IEM_MC_ADVANCE_RIP();
2875 IEM_MC_END();
2876 }
2877 return VINF_SUCCESS;
2878}
2879
2880
2881/**
2882 * Common worker for SSE2 instructions on the form:
2883 * pxxxx xmm1, xmm2/mem128
2884 *
2885 * The 2nd operand is the second half of a register, which in the memory case
2886 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2887 * where it may read the full 128 bits or only the upper 64 bits.
2888 *
2889 * Exceptions type 4.
2890 */
2891FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2892{
2893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2894 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2895 {
2896 /*
2897 * Register, register.
2898 */
2899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2900 IEM_MC_BEGIN(2, 0);
2901 IEM_MC_ARG(uint128_t *, pDst, 0);
2902 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2903 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2904 IEM_MC_PREPARE_SSE_USAGE();
2905 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2906 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2907 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2908 IEM_MC_ADVANCE_RIP();
2909 IEM_MC_END();
2910 }
2911 else
2912 {
2913 /*
2914 * Register, memory.
2915 */
2916 IEM_MC_BEGIN(2, 2);
2917 IEM_MC_ARG(uint128_t *, pDst, 0);
2918 IEM_MC_LOCAL(uint128_t, uSrc);
2919 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2921
2922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2924 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2925 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2926
2927 IEM_MC_PREPARE_SSE_USAGE();
2928 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2929 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2930
2931 IEM_MC_ADVANCE_RIP();
2932 IEM_MC_END();
2933 }
2934 return VINF_SUCCESS;
2935}
2936
2937
2938/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2939FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2940{
2941 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2942 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2943}
2944
2945/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2946FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2947{
2948 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2949 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2950}
2951/* Opcode 0xf3 0x0f 0x68 - invalid */
2952
2953
2954/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2955FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2956{
2957 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2958 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2959}
2960
2961/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2962FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2963{
2964 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2965 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2966
2967}
2968/* Opcode 0xf3 0x0f 0x69 - invalid */
2969
2970
2971/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2972FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2973{
2974 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2975 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2976}
2977
2978/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2979FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2980{
2981 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2982 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2983}
2984/* Opcode 0xf3 0x0f 0x6a - invalid */
2985
2986
2987/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2988FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2989/** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */
2990FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2991/* Opcode 0xf3 0x0f 0x6b - invalid */
2992
2993
2994/* Opcode 0x0f 0x6c - invalid */
2995
2996/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2997FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2998{
2999 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
3000 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3001}
3002
3003/* Opcode 0xf3 0x0f 0x6c - invalid */
3004/* Opcode 0xf2 0x0f 0x6c - invalid */
3005
3006
3007/* Opcode 0x0f 0x6d - invalid */
3008
3009/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
3010FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3011{
3012 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
3013 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3014}
3015
3016/* Opcode 0xf3 0x0f 0x6d - invalid */
3017
3018
3019/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
3020FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3021{
3022 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3023 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3024 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
3025 else
3026 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
3027 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3028 {
3029 /* MMX, greg */
3030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3031 IEM_MC_BEGIN(0, 1);
3032 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3033 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3034 IEM_MC_LOCAL(uint64_t, u64Tmp);
3035 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3036 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3037 else
3038 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3039 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3040 IEM_MC_ADVANCE_RIP();
3041 IEM_MC_END();
3042 }
3043 else
3044 {
3045 /* MMX, [mem] */
3046 IEM_MC_BEGIN(0, 2);
3047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3048 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3051 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3052 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3053 {
3054 IEM_MC_LOCAL(uint64_t, u64Tmp);
3055 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3056 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3057 }
3058 else
3059 {
3060 IEM_MC_LOCAL(uint32_t, u32Tmp);
3061 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3062 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3063 }
3064 IEM_MC_ADVANCE_RIP();
3065 IEM_MC_END();
3066 }
3067 return VINF_SUCCESS;
3068}
3069
3070/** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */
3071FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3072{
3073 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3074 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3075 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
3076 else
3077 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
3078 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3079 {
3080 /* XMM, greg*/
3081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3082 IEM_MC_BEGIN(0, 1);
3083 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3084 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3085 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3086 {
3087 IEM_MC_LOCAL(uint64_t, u64Tmp);
3088 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3089 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3090 }
3091 else
3092 {
3093 IEM_MC_LOCAL(uint32_t, u32Tmp);
3094 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3095 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3096 }
3097 IEM_MC_ADVANCE_RIP();
3098 IEM_MC_END();
3099 }
3100 else
3101 {
3102 /* XMM, [mem] */
3103 IEM_MC_BEGIN(0, 2);
3104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3105 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3108 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3109 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3110 {
3111 IEM_MC_LOCAL(uint64_t, u64Tmp);
3112 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3113 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3114 }
3115 else
3116 {
3117 IEM_MC_LOCAL(uint32_t, u32Tmp);
3118 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3119 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3120 }
3121 IEM_MC_ADVANCE_RIP();
3122 IEM_MC_END();
3123 }
3124 return VINF_SUCCESS;
3125}
3126
3127/* Opcode 0xf3 0x0f 0x6e - invalid */
3128
3129
3130/** Opcode 0x0f 0x6f - movq Pq, Qq */
3131FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3132{
3133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3134 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3135 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3136 {
3137 /*
3138 * Register, register.
3139 */
3140 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3141 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3143 IEM_MC_BEGIN(0, 1);
3144 IEM_MC_LOCAL(uint64_t, u64Tmp);
3145 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3146 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3147 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3148 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3149 IEM_MC_ADVANCE_RIP();
3150 IEM_MC_END();
3151 }
3152 else
3153 {
3154 /*
3155 * Register, memory.
3156 */
3157 IEM_MC_BEGIN(0, 2);
3158 IEM_MC_LOCAL(uint64_t, u64Tmp);
3159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3160
3161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3163 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3164 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3165 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3166 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3167
3168 IEM_MC_ADVANCE_RIP();
3169 IEM_MC_END();
3170 }
3171 return VINF_SUCCESS;
3172}
3173
3174/** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */
3175FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3176{
3177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3178 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3179 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3180 {
3181 /*
3182 * Register, register.
3183 */
3184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3185 IEM_MC_BEGIN(0, 0);
3186 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3187 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3188 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3189 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3190 IEM_MC_ADVANCE_RIP();
3191 IEM_MC_END();
3192 }
3193 else
3194 {
3195 /*
3196 * Register, memory.
3197 */
3198 IEM_MC_BEGIN(0, 2);
3199 IEM_MC_LOCAL(uint128_t, u128Tmp);
3200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3201
3202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3204 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3205 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3206 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3207 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3208
3209 IEM_MC_ADVANCE_RIP();
3210 IEM_MC_END();
3211 }
3212 return VINF_SUCCESS;
3213}
3214
3215/** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */
3216FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3217{
3218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3219 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3220 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3221 {
3222 /*
3223 * Register, register.
3224 */
3225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3226 IEM_MC_BEGIN(0, 0);
3227 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3228 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3229 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3230 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3231 IEM_MC_ADVANCE_RIP();
3232 IEM_MC_END();
3233 }
3234 else
3235 {
3236 /*
3237 * Register, memory.
3238 */
3239 IEM_MC_BEGIN(0, 2);
3240 IEM_MC_LOCAL(uint128_t, u128Tmp);
3241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3242
3243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3245 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3246 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3247 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3248 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3249
3250 IEM_MC_ADVANCE_RIP();
3251 IEM_MC_END();
3252 }
3253 return VINF_SUCCESS;
3254}
3255
3256
3257/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3258FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3259{
3260 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3262 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3263 {
3264 /*
3265 * Register, register.
3266 */
3267 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3269
3270 IEM_MC_BEGIN(3, 0);
3271 IEM_MC_ARG(uint64_t *, pDst, 0);
3272 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3273 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3274 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3275 IEM_MC_PREPARE_FPU_USAGE();
3276 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3277 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3278 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3279 IEM_MC_ADVANCE_RIP();
3280 IEM_MC_END();
3281 }
3282 else
3283 {
3284 /*
3285 * Register, memory.
3286 */
3287 IEM_MC_BEGIN(3, 2);
3288 IEM_MC_ARG(uint64_t *, pDst, 0);
3289 IEM_MC_LOCAL(uint64_t, uSrc);
3290 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3292
3293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3294 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3295 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3297 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3298
3299 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3300 IEM_MC_PREPARE_FPU_USAGE();
3301 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3302 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3303
3304 IEM_MC_ADVANCE_RIP();
3305 IEM_MC_END();
3306 }
3307 return VINF_SUCCESS;
3308}
3309
3310/** Opcode 0x66 0x0f 0x70 - vpshufd Vx, Wx, Ib */
3311FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3312{
3313 IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
3314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3316 {
3317 /*
3318 * Register, register.
3319 */
3320 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3322
3323 IEM_MC_BEGIN(3, 0);
3324 IEM_MC_ARG(uint128_t *, pDst, 0);
3325 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3326 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3327 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3328 IEM_MC_PREPARE_SSE_USAGE();
3329 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3330 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3331 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3332 IEM_MC_ADVANCE_RIP();
3333 IEM_MC_END();
3334 }
3335 else
3336 {
3337 /*
3338 * Register, memory.
3339 */
3340 IEM_MC_BEGIN(3, 2);
3341 IEM_MC_ARG(uint128_t *, pDst, 0);
3342 IEM_MC_LOCAL(uint128_t, uSrc);
3343 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3345
3346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3347 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3348 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3350 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3351
3352 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3353 IEM_MC_PREPARE_SSE_USAGE();
3354 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3355 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3356
3357 IEM_MC_ADVANCE_RIP();
3358 IEM_MC_END();
3359 }
3360 return VINF_SUCCESS;
3361}
3362
3363/** Opcode 0xf3 0x0f 0x70 - vpshufhw Vx, Wx, Ib */
3364FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3365{
3366 IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
3367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3368 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3369 {
3370 /*
3371 * Register, register.
3372 */
3373 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3375
3376 IEM_MC_BEGIN(3, 0);
3377 IEM_MC_ARG(uint128_t *, pDst, 0);
3378 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3379 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3380 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3381 IEM_MC_PREPARE_SSE_USAGE();
3382 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3383 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3384 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3385 IEM_MC_ADVANCE_RIP();
3386 IEM_MC_END();
3387 }
3388 else
3389 {
3390 /*
3391 * Register, memory.
3392 */
3393 IEM_MC_BEGIN(3, 2);
3394 IEM_MC_ARG(uint128_t *, pDst, 0);
3395 IEM_MC_LOCAL(uint128_t, uSrc);
3396 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3398
3399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3400 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3401 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3403 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3404
3405 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3406 IEM_MC_PREPARE_SSE_USAGE();
3407 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3408 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3409
3410 IEM_MC_ADVANCE_RIP();
3411 IEM_MC_END();
3412 }
3413 return VINF_SUCCESS;
3414}
3415
3416/** Opcode 0xf2 0x0f 0x70 - vpshuflw Vx, Wx, Ib */
3417FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3418{
3419 IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
3420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3421 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3422 {
3423 /*
3424 * Register, register.
3425 */
3426 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3428
3429 IEM_MC_BEGIN(3, 0);
3430 IEM_MC_ARG(uint128_t *, pDst, 0);
3431 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3432 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3433 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3434 IEM_MC_PREPARE_SSE_USAGE();
3435 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3436 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3437 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3438 IEM_MC_ADVANCE_RIP();
3439 IEM_MC_END();
3440 }
3441 else
3442 {
3443 /*
3444 * Register, memory.
3445 */
3446 IEM_MC_BEGIN(3, 2);
3447 IEM_MC_ARG(uint128_t *, pDst, 0);
3448 IEM_MC_LOCAL(uint128_t, uSrc);
3449 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3451
3452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3453 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3454 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3456 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3457
3458 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3459 IEM_MC_PREPARE_SSE_USAGE();
3460 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3461 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3462
3463 IEM_MC_ADVANCE_RIP();
3464 IEM_MC_END();
3465 }
3466 return VINF_SUCCESS;
3467}
3468
3469
3470/** Opcode 0x0f 0x71 11/2. */
3471FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3472
3473/** Opcode 0x66 0x0f 0x71 11/2. */
3474FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
3475
3476/** Opcode 0x0f 0x71 11/4. */
3477FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3478
3479/** Opcode 0x66 0x0f 0x71 11/4. */
3480FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
3481
3482/** Opcode 0x0f 0x71 11/6. */
3483FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3484
3485/** Opcode 0x66 0x0f 0x71 11/6. */
3486FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
3487
3488
3489/** Opcode 0x0f 0x71. */
3490FNIEMOP_DEF(iemOp_Grp12)
3491{
3492 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3493 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3494 return IEMOP_RAISE_INVALID_OPCODE();
3495 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3496 {
3497 case 0: case 1: case 3: case 5: case 7:
3498 return IEMOP_RAISE_INVALID_OPCODE();
3499 case 2:
3500 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3501 {
3502 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
3503 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
3504 default: return IEMOP_RAISE_INVALID_OPCODE();
3505 }
3506 case 4:
3507 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3508 {
3509 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
3510 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
3511 default: return IEMOP_RAISE_INVALID_OPCODE();
3512 }
3513 case 6:
3514 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3515 {
3516 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
3517 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
3518 default: return IEMOP_RAISE_INVALID_OPCODE();
3519 }
3520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3521 }
3522}
3523
3524
3525/** Opcode 0x0f 0x72 11/2. */
3526FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3527
3528/** Opcode 0x66 0x0f 0x72 11/2. */
3529FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
3530
3531/** Opcode 0x0f 0x72 11/4. */
3532FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3533
3534/** Opcode 0x66 0x0f 0x72 11/4. */
3535FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
3536
3537/** Opcode 0x0f 0x72 11/6. */
3538FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3539
3540/** Opcode 0x66 0x0f 0x72 11/6. */
3541FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
3542
3543
3544/** Opcode 0x0f 0x72. */
3545FNIEMOP_DEF(iemOp_Grp13)
3546{
3547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3548 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3549 return IEMOP_RAISE_INVALID_OPCODE();
3550 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3551 {
3552 case 0: case 1: case 3: case 5: case 7:
3553 return IEMOP_RAISE_INVALID_OPCODE();
3554 case 2:
3555 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3556 {
3557 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
3558 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
3559 default: return IEMOP_RAISE_INVALID_OPCODE();
3560 }
3561 case 4:
3562 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3563 {
3564 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
3565 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3566 default: return IEMOP_RAISE_INVALID_OPCODE();
3567 }
3568 case 6:
3569 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3570 {
3571 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3572 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3573 default: return IEMOP_RAISE_INVALID_OPCODE();
3574 }
3575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3576 }
3577}
3578
3579
3580/** Opcode 0x0f 0x73 11/2. */
3581FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3582
3583/** Opcode 0x66 0x0f 0x73 11/2. */
3584FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3585
3586/** Opcode 0x66 0x0f 0x73 11/3. */
3587FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3588
3589/** Opcode 0x0f 0x73 11/6. */
3590FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3591
3592/** Opcode 0x66 0x0f 0x73 11/6. */
3593FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3594
3595/** Opcode 0x66 0x0f 0x73 11/7. */
3596FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3597
3598
3599/** Opcode 0x0f 0x73. */
3600FNIEMOP_DEF(iemOp_Grp14)
3601{
3602 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3603 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3604 return IEMOP_RAISE_INVALID_OPCODE();
3605 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3606 {
3607 case 0: case 1: case 4: case 5:
3608 return IEMOP_RAISE_INVALID_OPCODE();
3609 case 2:
3610 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3611 {
3612 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3613 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3614 default: return IEMOP_RAISE_INVALID_OPCODE();
3615 }
3616 case 3:
3617 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3618 {
3619 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3620 default: return IEMOP_RAISE_INVALID_OPCODE();
3621 }
3622 case 6:
3623 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3624 {
3625 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3626 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3627 default: return IEMOP_RAISE_INVALID_OPCODE();
3628 }
3629 case 7:
3630 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3631 {
3632 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3633 default: return IEMOP_RAISE_INVALID_OPCODE();
3634 }
3635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3636 }
3637}
3638
3639
3640/**
3641 * Common worker for MMX instructions on the form:
3642 * pxxx mm1, mm2/mem64
3643 */
3644FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3645{
3646 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3647 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3648 {
3649 /*
3650 * Register, register.
3651 */
3652 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3653 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3655 IEM_MC_BEGIN(2, 0);
3656 IEM_MC_ARG(uint64_t *, pDst, 0);
3657 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3658 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3659 IEM_MC_PREPARE_FPU_USAGE();
3660 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3661 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3662 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3663 IEM_MC_ADVANCE_RIP();
3664 IEM_MC_END();
3665 }
3666 else
3667 {
3668 /*
3669 * Register, memory.
3670 */
3671 IEM_MC_BEGIN(2, 2);
3672 IEM_MC_ARG(uint64_t *, pDst, 0);
3673 IEM_MC_LOCAL(uint64_t, uSrc);
3674 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3676
3677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3679 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3680 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3681
3682 IEM_MC_PREPARE_FPU_USAGE();
3683 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3684 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3685
3686 IEM_MC_ADVANCE_RIP();
3687 IEM_MC_END();
3688 }
3689 return VINF_SUCCESS;
3690}
3691
3692
3693/**
3694 * Common worker for SSE2 instructions on the forms:
3695 * pxxx xmm1, xmm2/mem128
3696 *
3697 * Proper alignment of the 128-bit operand is enforced.
3698 * Exceptions type 4. SSE2 cpuid checks.
3699 */
3700FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3701{
3702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3703 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3704 {
3705 /*
3706 * Register, register.
3707 */
3708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3709 IEM_MC_BEGIN(2, 0);
3710 IEM_MC_ARG(uint128_t *, pDst, 0);
3711 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3712 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3713 IEM_MC_PREPARE_SSE_USAGE();
3714 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3715 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3716 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3717 IEM_MC_ADVANCE_RIP();
3718 IEM_MC_END();
3719 }
3720 else
3721 {
3722 /*
3723 * Register, memory.
3724 */
3725 IEM_MC_BEGIN(2, 2);
3726 IEM_MC_ARG(uint128_t *, pDst, 0);
3727 IEM_MC_LOCAL(uint128_t, uSrc);
3728 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3730
3731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3733 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3734 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3735
3736 IEM_MC_PREPARE_SSE_USAGE();
3737 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3738 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3739
3740 IEM_MC_ADVANCE_RIP();
3741 IEM_MC_END();
3742 }
3743 return VINF_SUCCESS;
3744}
3745
3746
3747/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3748FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3749{
3750 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3751 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3752}
3753
3754/** Opcode 0x66 0x0f 0x74 - vpcmpeqb Vx, Hx, Wx */
3755FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3756{
3757 IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb");
3758 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3759}
3760
3761/* Opcode 0xf3 0x0f 0x74 - invalid */
3762/* Opcode 0xf2 0x0f 0x74 - invalid */
3763
3764
3765/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3766FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3767{
3768 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3769 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3770}
3771
3772/** Opcode 0x66 0x0f 0x75 - vpcmpeqw Vx, Hx, Wx */
3773FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3774{
3775 IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw");
3776 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3777}
3778
3779/* Opcode 0xf3 0x0f 0x75 - invalid */
3780/* Opcode 0xf2 0x0f 0x75 - invalid */
3781
3782
3783/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3784FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3785{
3786 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3787 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3788}
3789
3790/** Opcode 0x66 0x0f 0x76 - vpcmpeqd Vx, Hx, Wx */
3791FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3792{
3793 IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd");
3794 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3795}
3796
3797/* Opcode 0xf3 0x0f 0x76 - invalid */
3798/* Opcode 0xf2 0x0f 0x76 - invalid */
3799
3800
3801/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3802FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3803/* Opcode 0x66 0x0f 0x77 - invalid */
3804/* Opcode 0xf3 0x0f 0x77 - invalid */
3805/* Opcode 0xf2 0x0f 0x77 - invalid */
3806
3807/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3808FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3809/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3810FNIEMOP_STUB(iemOp_AmdGrp17);
3811/* Opcode 0xf3 0x0f 0x78 - invalid */
3812/* Opcode 0xf2 0x0f 0x78 - invalid */
3813
3814/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3815FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3816/* Opcode 0x66 0x0f 0x79 - invalid */
3817/* Opcode 0xf3 0x0f 0x79 - invalid */
3818/* Opcode 0xf2 0x0f 0x79 - invalid */
3819
3820/* Opcode 0x0f 0x7a - invalid */
3821/* Opcode 0x66 0x0f 0x7a - invalid */
3822/* Opcode 0xf3 0x0f 0x7a - invalid */
3823/* Opcode 0xf2 0x0f 0x7a - invalid */
3824
3825/* Opcode 0x0f 0x7b - invalid */
3826/* Opcode 0x66 0x0f 0x7b - invalid */
3827/* Opcode 0xf3 0x0f 0x7b - invalid */
3828/* Opcode 0xf2 0x0f 0x7b - invalid */
3829
3830/* Opcode 0x0f 0x7c - invalid */
3831/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3832FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3833/* Opcode 0xf3 0x0f 0x7c - invalid */
3834/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3835FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3836
3837/* Opcode 0x0f 0x7d - invalid */
3838/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3839FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3840/* Opcode 0xf3 0x0f 0x7d - invalid */
3841/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3842FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3843
3844
3845/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3846FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3847{
3848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3849 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3850 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3851 else
3852 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3853 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3854 {
3855 /* greg, MMX */
3856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3857 IEM_MC_BEGIN(0, 1);
3858 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3859 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3860 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3861 {
3862 IEM_MC_LOCAL(uint64_t, u64Tmp);
3863 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3864 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3865 }
3866 else
3867 {
3868 IEM_MC_LOCAL(uint32_t, u32Tmp);
3869 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3870 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3871 }
3872 IEM_MC_ADVANCE_RIP();
3873 IEM_MC_END();
3874 }
3875 else
3876 {
3877 /* [mem], MMX */
3878 IEM_MC_BEGIN(0, 2);
3879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3880 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3883 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3884 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3885 {
3886 IEM_MC_LOCAL(uint64_t, u64Tmp);
3887 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3888 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3889 }
3890 else
3891 {
3892 IEM_MC_LOCAL(uint32_t, u32Tmp);
3893 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3894 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3895 }
3896 IEM_MC_ADVANCE_RIP();
3897 IEM_MC_END();
3898 }
3899 return VINF_SUCCESS;
3900}
3901
3902/** Opcode 0x66 0x0f 0x7e - vmovd_q Ey, Vy */
3903FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3904{
3905 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3906 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3907 IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq");
3908 else
3909 IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd");
3910 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3911 {
3912 /* greg, XMM */
3913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3914 IEM_MC_BEGIN(0, 1);
3915 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3916 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3917 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3918 {
3919 IEM_MC_LOCAL(uint64_t, u64Tmp);
3920 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3921 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3922 }
3923 else
3924 {
3925 IEM_MC_LOCAL(uint32_t, u32Tmp);
3926 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3927 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3928 }
3929 IEM_MC_ADVANCE_RIP();
3930 IEM_MC_END();
3931 }
3932 else
3933 {
3934 /* [mem], XMM */
3935 IEM_MC_BEGIN(0, 2);
3936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3937 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3940 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3941 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3942 {
3943 IEM_MC_LOCAL(uint64_t, u64Tmp);
3944 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3945 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3946 }
3947 else
3948 {
3949 IEM_MC_LOCAL(uint32_t, u32Tmp);
3950 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3951 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3952 }
3953 IEM_MC_ADVANCE_RIP();
3954 IEM_MC_END();
3955 }
3956 return VINF_SUCCESS;
3957}
3958
3959/** Opcode 0xf3 0x0f 0x7e - vmovq Vq, Wq */
3960FNIEMOP_STUB(iemOp_vmovq_Vq_Wq);
3961/* Opcode 0xf2 0x0f 0x7e - invalid */
3962
3963
3964/** Opcode 0x0f 0x7f - movq Qq, Pq */
3965FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3966{
3967 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3968 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3969 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3970 {
3971 /*
3972 * Register, register.
3973 */
3974 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3975 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3977 IEM_MC_BEGIN(0, 1);
3978 IEM_MC_LOCAL(uint64_t, u64Tmp);
3979 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3980 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3981 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3982 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3983 IEM_MC_ADVANCE_RIP();
3984 IEM_MC_END();
3985 }
3986 else
3987 {
3988 /*
3989 * Register, memory.
3990 */
3991 IEM_MC_BEGIN(0, 2);
3992 IEM_MC_LOCAL(uint64_t, u64Tmp);
3993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3994
3995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3997 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3998 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3999
4000 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4001 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4002
4003 IEM_MC_ADVANCE_RIP();
4004 IEM_MC_END();
4005 }
4006 return VINF_SUCCESS;
4007}
4008
4009/** Opcode 0x66 0x0f 0x7f - vmovdqa Wx,Vx */
4010FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
4011{
4012 IEMOP_MNEMONIC(vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx");
4013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4014 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4015 {
4016 /*
4017 * Register, register.
4018 */
4019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4020 IEM_MC_BEGIN(0, 0);
4021 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4022 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4023 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4024 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4025 IEM_MC_ADVANCE_RIP();
4026 IEM_MC_END();
4027 }
4028 else
4029 {
4030 /*
4031 * Register, memory.
4032 */
4033 IEM_MC_BEGIN(0, 2);
4034 IEM_MC_LOCAL(uint128_t, u128Tmp);
4035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4036
4037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4039 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4040 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4041
4042 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4043 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4044
4045 IEM_MC_ADVANCE_RIP();
4046 IEM_MC_END();
4047 }
4048 return VINF_SUCCESS;
4049}
4050
4051/** Opcode 0xf3 0x0f 0x7f - vmovdqu Wx,Vx */
4052FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
4053{
4054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4055 IEMOP_MNEMONIC(vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx");
4056 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4057 {
4058 /*
4059 * Register, register.
4060 */
4061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4062 IEM_MC_BEGIN(0, 0);
4063 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4064 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4065 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4066 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4067 IEM_MC_ADVANCE_RIP();
4068 IEM_MC_END();
4069 }
4070 else
4071 {
4072 /*
4073 * Register, memory.
4074 */
4075 IEM_MC_BEGIN(0, 2);
4076 IEM_MC_LOCAL(uint128_t, u128Tmp);
4077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4078
4079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4081 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4082 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4083
4084 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4085 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4086
4087 IEM_MC_ADVANCE_RIP();
4088 IEM_MC_END();
4089 }
4090 return VINF_SUCCESS;
4091}
4092
4093/* Opcode 0xf2 0x0f 0x7f - invalid */
4094
4095
4096
4097/** Opcode 0x0f 0x80. */
4098FNIEMOP_DEF(iemOp_jo_Jv)
4099{
4100 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4101 IEMOP_HLP_MIN_386();
4102 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4103 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4104 {
4105 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4107
4108 IEM_MC_BEGIN(0, 0);
4109 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4110 IEM_MC_REL_JMP_S16(i16Imm);
4111 } IEM_MC_ELSE() {
4112 IEM_MC_ADVANCE_RIP();
4113 } IEM_MC_ENDIF();
4114 IEM_MC_END();
4115 }
4116 else
4117 {
4118 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4120
4121 IEM_MC_BEGIN(0, 0);
4122 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4123 IEM_MC_REL_JMP_S32(i32Imm);
4124 } IEM_MC_ELSE() {
4125 IEM_MC_ADVANCE_RIP();
4126 } IEM_MC_ENDIF();
4127 IEM_MC_END();
4128 }
4129 return VINF_SUCCESS;
4130}
4131
4132
4133/** Opcode 0x0f 0x81. */
4134FNIEMOP_DEF(iemOp_jno_Jv)
4135{
4136 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4137 IEMOP_HLP_MIN_386();
4138 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4139 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4140 {
4141 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4143
4144 IEM_MC_BEGIN(0, 0);
4145 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4146 IEM_MC_ADVANCE_RIP();
4147 } IEM_MC_ELSE() {
4148 IEM_MC_REL_JMP_S16(i16Imm);
4149 } IEM_MC_ENDIF();
4150 IEM_MC_END();
4151 }
4152 else
4153 {
4154 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4156
4157 IEM_MC_BEGIN(0, 0);
4158 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4159 IEM_MC_ADVANCE_RIP();
4160 } IEM_MC_ELSE() {
4161 IEM_MC_REL_JMP_S32(i32Imm);
4162 } IEM_MC_ENDIF();
4163 IEM_MC_END();
4164 }
4165 return VINF_SUCCESS;
4166}
4167
4168
4169/** Opcode 0x0f 0x82. */
4170FNIEMOP_DEF(iemOp_jc_Jv)
4171{
4172 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4173 IEMOP_HLP_MIN_386();
4174 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4175 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4176 {
4177 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4179
4180 IEM_MC_BEGIN(0, 0);
4181 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4182 IEM_MC_REL_JMP_S16(i16Imm);
4183 } IEM_MC_ELSE() {
4184 IEM_MC_ADVANCE_RIP();
4185 } IEM_MC_ENDIF();
4186 IEM_MC_END();
4187 }
4188 else
4189 {
4190 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4192
4193 IEM_MC_BEGIN(0, 0);
4194 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4195 IEM_MC_REL_JMP_S32(i32Imm);
4196 } IEM_MC_ELSE() {
4197 IEM_MC_ADVANCE_RIP();
4198 } IEM_MC_ENDIF();
4199 IEM_MC_END();
4200 }
4201 return VINF_SUCCESS;
4202}
4203
4204
4205/** Opcode 0x0f 0x83. */
4206FNIEMOP_DEF(iemOp_jnc_Jv)
4207{
4208 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4209 IEMOP_HLP_MIN_386();
4210 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4211 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4212 {
4213 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4215
4216 IEM_MC_BEGIN(0, 0);
4217 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4218 IEM_MC_ADVANCE_RIP();
4219 } IEM_MC_ELSE() {
4220 IEM_MC_REL_JMP_S16(i16Imm);
4221 } IEM_MC_ENDIF();
4222 IEM_MC_END();
4223 }
4224 else
4225 {
4226 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4228
4229 IEM_MC_BEGIN(0, 0);
4230 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4231 IEM_MC_ADVANCE_RIP();
4232 } IEM_MC_ELSE() {
4233 IEM_MC_REL_JMP_S32(i32Imm);
4234 } IEM_MC_ENDIF();
4235 IEM_MC_END();
4236 }
4237 return VINF_SUCCESS;
4238}
4239
4240
4241/** Opcode 0x0f 0x84. */
4242FNIEMOP_DEF(iemOp_je_Jv)
4243{
4244 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4245 IEMOP_HLP_MIN_386();
4246 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4247 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4248 {
4249 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4251
4252 IEM_MC_BEGIN(0, 0);
4253 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4254 IEM_MC_REL_JMP_S16(i16Imm);
4255 } IEM_MC_ELSE() {
4256 IEM_MC_ADVANCE_RIP();
4257 } IEM_MC_ENDIF();
4258 IEM_MC_END();
4259 }
4260 else
4261 {
4262 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4264
4265 IEM_MC_BEGIN(0, 0);
4266 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4267 IEM_MC_REL_JMP_S32(i32Imm);
4268 } IEM_MC_ELSE() {
4269 IEM_MC_ADVANCE_RIP();
4270 } IEM_MC_ENDIF();
4271 IEM_MC_END();
4272 }
4273 return VINF_SUCCESS;
4274}
4275
4276
4277/** Opcode 0x0f 0x85. */
4278FNIEMOP_DEF(iemOp_jne_Jv)
4279{
4280 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4281 IEMOP_HLP_MIN_386();
4282 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4283 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4284 {
4285 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4287
4288 IEM_MC_BEGIN(0, 0);
4289 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4290 IEM_MC_ADVANCE_RIP();
4291 } IEM_MC_ELSE() {
4292 IEM_MC_REL_JMP_S16(i16Imm);
4293 } IEM_MC_ENDIF();
4294 IEM_MC_END();
4295 }
4296 else
4297 {
4298 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4300
4301 IEM_MC_BEGIN(0, 0);
4302 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4303 IEM_MC_ADVANCE_RIP();
4304 } IEM_MC_ELSE() {
4305 IEM_MC_REL_JMP_S32(i32Imm);
4306 } IEM_MC_ENDIF();
4307 IEM_MC_END();
4308 }
4309 return VINF_SUCCESS;
4310}
4311
4312
4313/** Opcode 0x0f 0x86. */
4314FNIEMOP_DEF(iemOp_jbe_Jv)
4315{
4316 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4317 IEMOP_HLP_MIN_386();
4318 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4319 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4320 {
4321 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4323
4324 IEM_MC_BEGIN(0, 0);
4325 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4326 IEM_MC_REL_JMP_S16(i16Imm);
4327 } IEM_MC_ELSE() {
4328 IEM_MC_ADVANCE_RIP();
4329 } IEM_MC_ENDIF();
4330 IEM_MC_END();
4331 }
4332 else
4333 {
4334 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4336
4337 IEM_MC_BEGIN(0, 0);
4338 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4339 IEM_MC_REL_JMP_S32(i32Imm);
4340 } IEM_MC_ELSE() {
4341 IEM_MC_ADVANCE_RIP();
4342 } IEM_MC_ENDIF();
4343 IEM_MC_END();
4344 }
4345 return VINF_SUCCESS;
4346}
4347
4348
4349/** Opcode 0x0f 0x87. */
4350FNIEMOP_DEF(iemOp_jnbe_Jv)
4351{
4352 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4353 IEMOP_HLP_MIN_386();
4354 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4355 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4356 {
4357 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4359
4360 IEM_MC_BEGIN(0, 0);
4361 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4362 IEM_MC_ADVANCE_RIP();
4363 } IEM_MC_ELSE() {
4364 IEM_MC_REL_JMP_S16(i16Imm);
4365 } IEM_MC_ENDIF();
4366 IEM_MC_END();
4367 }
4368 else
4369 {
4370 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4372
4373 IEM_MC_BEGIN(0, 0);
4374 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4375 IEM_MC_ADVANCE_RIP();
4376 } IEM_MC_ELSE() {
4377 IEM_MC_REL_JMP_S32(i32Imm);
4378 } IEM_MC_ENDIF();
4379 IEM_MC_END();
4380 }
4381 return VINF_SUCCESS;
4382}
4383
4384
4385/** Opcode 0x0f 0x88. */
4386FNIEMOP_DEF(iemOp_js_Jv)
4387{
4388 IEMOP_MNEMONIC(js_Jv, "js Jv");
4389 IEMOP_HLP_MIN_386();
4390 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4391 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4392 {
4393 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4395
4396 IEM_MC_BEGIN(0, 0);
4397 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4398 IEM_MC_REL_JMP_S16(i16Imm);
4399 } IEM_MC_ELSE() {
4400 IEM_MC_ADVANCE_RIP();
4401 } IEM_MC_ENDIF();
4402 IEM_MC_END();
4403 }
4404 else
4405 {
4406 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4408
4409 IEM_MC_BEGIN(0, 0);
4410 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4411 IEM_MC_REL_JMP_S32(i32Imm);
4412 } IEM_MC_ELSE() {
4413 IEM_MC_ADVANCE_RIP();
4414 } IEM_MC_ENDIF();
4415 IEM_MC_END();
4416 }
4417 return VINF_SUCCESS;
4418}
4419
4420
4421/** Opcode 0x0f 0x89. */
4422FNIEMOP_DEF(iemOp_jns_Jv)
4423{
4424 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4425 IEMOP_HLP_MIN_386();
4426 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4427 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4428 {
4429 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4431
4432 IEM_MC_BEGIN(0, 0);
4433 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4434 IEM_MC_ADVANCE_RIP();
4435 } IEM_MC_ELSE() {
4436 IEM_MC_REL_JMP_S16(i16Imm);
4437 } IEM_MC_ENDIF();
4438 IEM_MC_END();
4439 }
4440 else
4441 {
4442 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4444
4445 IEM_MC_BEGIN(0, 0);
4446 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4447 IEM_MC_ADVANCE_RIP();
4448 } IEM_MC_ELSE() {
4449 IEM_MC_REL_JMP_S32(i32Imm);
4450 } IEM_MC_ENDIF();
4451 IEM_MC_END();
4452 }
4453 return VINF_SUCCESS;
4454}
4455
4456
4457/** Opcode 0x0f 0x8a. */
4458FNIEMOP_DEF(iemOp_jp_Jv)
4459{
4460 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4461 IEMOP_HLP_MIN_386();
4462 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4463 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4464 {
4465 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4467
4468 IEM_MC_BEGIN(0, 0);
4469 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4470 IEM_MC_REL_JMP_S16(i16Imm);
4471 } IEM_MC_ELSE() {
4472 IEM_MC_ADVANCE_RIP();
4473 } IEM_MC_ENDIF();
4474 IEM_MC_END();
4475 }
4476 else
4477 {
4478 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4480
4481 IEM_MC_BEGIN(0, 0);
4482 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4483 IEM_MC_REL_JMP_S32(i32Imm);
4484 } IEM_MC_ELSE() {
4485 IEM_MC_ADVANCE_RIP();
4486 } IEM_MC_ENDIF();
4487 IEM_MC_END();
4488 }
4489 return VINF_SUCCESS;
4490}
4491
4492
4493/** Opcode 0x0f 0x8b. */
4494FNIEMOP_DEF(iemOp_jnp_Jv)
4495{
4496 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4497 IEMOP_HLP_MIN_386();
4498 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4499 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4500 {
4501 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4503
4504 IEM_MC_BEGIN(0, 0);
4505 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4506 IEM_MC_ADVANCE_RIP();
4507 } IEM_MC_ELSE() {
4508 IEM_MC_REL_JMP_S16(i16Imm);
4509 } IEM_MC_ENDIF();
4510 IEM_MC_END();
4511 }
4512 else
4513 {
4514 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4516
4517 IEM_MC_BEGIN(0, 0);
4518 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4519 IEM_MC_ADVANCE_RIP();
4520 } IEM_MC_ELSE() {
4521 IEM_MC_REL_JMP_S32(i32Imm);
4522 } IEM_MC_ENDIF();
4523 IEM_MC_END();
4524 }
4525 return VINF_SUCCESS;
4526}
4527
4528
4529/** Opcode 0x0f 0x8c. */
4530FNIEMOP_DEF(iemOp_jl_Jv)
4531{
4532 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4533 IEMOP_HLP_MIN_386();
4534 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4535 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4536 {
4537 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4539
4540 IEM_MC_BEGIN(0, 0);
4541 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4542 IEM_MC_REL_JMP_S16(i16Imm);
4543 } IEM_MC_ELSE() {
4544 IEM_MC_ADVANCE_RIP();
4545 } IEM_MC_ENDIF();
4546 IEM_MC_END();
4547 }
4548 else
4549 {
4550 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4552
4553 IEM_MC_BEGIN(0, 0);
4554 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4555 IEM_MC_REL_JMP_S32(i32Imm);
4556 } IEM_MC_ELSE() {
4557 IEM_MC_ADVANCE_RIP();
4558 } IEM_MC_ENDIF();
4559 IEM_MC_END();
4560 }
4561 return VINF_SUCCESS;
4562}
4563
4564
4565/** Opcode 0x0f 0x8d. */
4566FNIEMOP_DEF(iemOp_jnl_Jv)
4567{
4568 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4569 IEMOP_HLP_MIN_386();
4570 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4571 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4572 {
4573 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4575
4576 IEM_MC_BEGIN(0, 0);
4577 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4578 IEM_MC_ADVANCE_RIP();
4579 } IEM_MC_ELSE() {
4580 IEM_MC_REL_JMP_S16(i16Imm);
4581 } IEM_MC_ENDIF();
4582 IEM_MC_END();
4583 }
4584 else
4585 {
4586 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4588
4589 IEM_MC_BEGIN(0, 0);
4590 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4591 IEM_MC_ADVANCE_RIP();
4592 } IEM_MC_ELSE() {
4593 IEM_MC_REL_JMP_S32(i32Imm);
4594 } IEM_MC_ENDIF();
4595 IEM_MC_END();
4596 }
4597 return VINF_SUCCESS;
4598}
4599
4600
4601/** Opcode 0x0f 0x8e. */
4602FNIEMOP_DEF(iemOp_jle_Jv)
4603{
4604 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4605 IEMOP_HLP_MIN_386();
4606 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4607 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4608 {
4609 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4611
4612 IEM_MC_BEGIN(0, 0);
4613 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4614 IEM_MC_REL_JMP_S16(i16Imm);
4615 } IEM_MC_ELSE() {
4616 IEM_MC_ADVANCE_RIP();
4617 } IEM_MC_ENDIF();
4618 IEM_MC_END();
4619 }
4620 else
4621 {
4622 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4624
4625 IEM_MC_BEGIN(0, 0);
4626 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4627 IEM_MC_REL_JMP_S32(i32Imm);
4628 } IEM_MC_ELSE() {
4629 IEM_MC_ADVANCE_RIP();
4630 } IEM_MC_ENDIF();
4631 IEM_MC_END();
4632 }
4633 return VINF_SUCCESS;
4634}
4635
4636
4637/** Opcode 0x0f 0x8f. */
4638FNIEMOP_DEF(iemOp_jnle_Jv)
4639{
4640 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4641 IEMOP_HLP_MIN_386();
4642 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4643 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4644 {
4645 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4647
4648 IEM_MC_BEGIN(0, 0);
4649 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4650 IEM_MC_ADVANCE_RIP();
4651 } IEM_MC_ELSE() {
4652 IEM_MC_REL_JMP_S16(i16Imm);
4653 } IEM_MC_ENDIF();
4654 IEM_MC_END();
4655 }
4656 else
4657 {
4658 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4660
4661 IEM_MC_BEGIN(0, 0);
4662 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4663 IEM_MC_ADVANCE_RIP();
4664 } IEM_MC_ELSE() {
4665 IEM_MC_REL_JMP_S32(i32Imm);
4666 } IEM_MC_ENDIF();
4667 IEM_MC_END();
4668 }
4669 return VINF_SUCCESS;
4670}
4671
4672
4673/** Opcode 0x0f 0x90. */
4674FNIEMOP_DEF(iemOp_seto_Eb)
4675{
4676 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4677 IEMOP_HLP_MIN_386();
4678 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4679
4680 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4681 * any way. AMD says it's "unused", whatever that means. We're
4682 * ignoring for now. */
4683 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4684 {
4685 /* register target */
4686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4687 IEM_MC_BEGIN(0, 0);
4688 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4689 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4690 } IEM_MC_ELSE() {
4691 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4692 } IEM_MC_ENDIF();
4693 IEM_MC_ADVANCE_RIP();
4694 IEM_MC_END();
4695 }
4696 else
4697 {
4698 /* memory target */
4699 IEM_MC_BEGIN(0, 1);
4700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4703 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4704 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4705 } IEM_MC_ELSE() {
4706 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4707 } IEM_MC_ENDIF();
4708 IEM_MC_ADVANCE_RIP();
4709 IEM_MC_END();
4710 }
4711 return VINF_SUCCESS;
4712}
4713
4714
4715/** Opcode 0x0f 0x91. */
4716FNIEMOP_DEF(iemOp_setno_Eb)
4717{
4718 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4719 IEMOP_HLP_MIN_386();
4720 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4721
4722 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4723 * any way. AMD says it's "unused", whatever that means. We're
4724 * ignoring for now. */
4725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4726 {
4727 /* register target */
4728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4729 IEM_MC_BEGIN(0, 0);
4730 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4731 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4732 } IEM_MC_ELSE() {
4733 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4734 } IEM_MC_ENDIF();
4735 IEM_MC_ADVANCE_RIP();
4736 IEM_MC_END();
4737 }
4738 else
4739 {
4740 /* memory target */
4741 IEM_MC_BEGIN(0, 1);
4742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4745 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4746 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4747 } IEM_MC_ELSE() {
4748 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4749 } IEM_MC_ENDIF();
4750 IEM_MC_ADVANCE_RIP();
4751 IEM_MC_END();
4752 }
4753 return VINF_SUCCESS;
4754}
4755
4756
4757/** Opcode 0x0f 0x92. */
4758FNIEMOP_DEF(iemOp_setc_Eb)
4759{
4760 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4761 IEMOP_HLP_MIN_386();
4762 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4763
4764 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4765 * any way. AMD says it's "unused", whatever that means. We're
4766 * ignoring for now. */
4767 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4768 {
4769 /* register target */
4770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4771 IEM_MC_BEGIN(0, 0);
4772 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4773 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4774 } IEM_MC_ELSE() {
4775 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4776 } IEM_MC_ENDIF();
4777 IEM_MC_ADVANCE_RIP();
4778 IEM_MC_END();
4779 }
4780 else
4781 {
4782 /* memory target */
4783 IEM_MC_BEGIN(0, 1);
4784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4787 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4788 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4789 } IEM_MC_ELSE() {
4790 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4791 } IEM_MC_ENDIF();
4792 IEM_MC_ADVANCE_RIP();
4793 IEM_MC_END();
4794 }
4795 return VINF_SUCCESS;
4796}
4797
4798
4799/** Opcode 0x0f 0x93. */
4800FNIEMOP_DEF(iemOp_setnc_Eb)
4801{
4802 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4803 IEMOP_HLP_MIN_386();
4804 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4805
4806 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4807 * any way. AMD says it's "unused", whatever that means. We're
4808 * ignoring for now. */
4809 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4810 {
4811 /* register target */
4812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4813 IEM_MC_BEGIN(0, 0);
4814 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4815 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4816 } IEM_MC_ELSE() {
4817 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4818 } IEM_MC_ENDIF();
4819 IEM_MC_ADVANCE_RIP();
4820 IEM_MC_END();
4821 }
4822 else
4823 {
4824 /* memory target */
4825 IEM_MC_BEGIN(0, 1);
4826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4829 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4830 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4831 } IEM_MC_ELSE() {
4832 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4833 } IEM_MC_ENDIF();
4834 IEM_MC_ADVANCE_RIP();
4835 IEM_MC_END();
4836 }
4837 return VINF_SUCCESS;
4838}
4839
4840
4841/** Opcode 0x0f 0x94. */
4842FNIEMOP_DEF(iemOp_sete_Eb)
4843{
4844 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4845 IEMOP_HLP_MIN_386();
4846 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4847
4848 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4849 * any way. AMD says it's "unused", whatever that means. We're
4850 * ignoring for now. */
4851 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4852 {
4853 /* register target */
4854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4855 IEM_MC_BEGIN(0, 0);
4856 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4857 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4858 } IEM_MC_ELSE() {
4859 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4860 } IEM_MC_ENDIF();
4861 IEM_MC_ADVANCE_RIP();
4862 IEM_MC_END();
4863 }
4864 else
4865 {
4866 /* memory target */
4867 IEM_MC_BEGIN(0, 1);
4868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4871 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4872 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4873 } IEM_MC_ELSE() {
4874 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4875 } IEM_MC_ENDIF();
4876 IEM_MC_ADVANCE_RIP();
4877 IEM_MC_END();
4878 }
4879 return VINF_SUCCESS;
4880}
4881
4882
4883/** Opcode 0x0f 0x95. */
4884FNIEMOP_DEF(iemOp_setne_Eb)
4885{
4886 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4887 IEMOP_HLP_MIN_386();
4888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4889
4890 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4891 * any way. AMD says it's "unused", whatever that means. We're
4892 * ignoring for now. */
4893 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4894 {
4895 /* register target */
4896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4897 IEM_MC_BEGIN(0, 0);
4898 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4899 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4900 } IEM_MC_ELSE() {
4901 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4902 } IEM_MC_ENDIF();
4903 IEM_MC_ADVANCE_RIP();
4904 IEM_MC_END();
4905 }
4906 else
4907 {
4908 /* memory target */
4909 IEM_MC_BEGIN(0, 1);
4910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4913 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4914 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4915 } IEM_MC_ELSE() {
4916 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4917 } IEM_MC_ENDIF();
4918 IEM_MC_ADVANCE_RIP();
4919 IEM_MC_END();
4920 }
4921 return VINF_SUCCESS;
4922}
4923
4924
4925/** Opcode 0x0f 0x96. */
4926FNIEMOP_DEF(iemOp_setbe_Eb)
4927{
4928 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4929 IEMOP_HLP_MIN_386();
4930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4931
4932 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4933 * any way. AMD says it's "unused", whatever that means. We're
4934 * ignoring for now. */
4935 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4936 {
4937 /* register target */
4938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4939 IEM_MC_BEGIN(0, 0);
4940 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4941 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4942 } IEM_MC_ELSE() {
4943 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4944 } IEM_MC_ENDIF();
4945 IEM_MC_ADVANCE_RIP();
4946 IEM_MC_END();
4947 }
4948 else
4949 {
4950 /* memory target */
4951 IEM_MC_BEGIN(0, 1);
4952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4955 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4956 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4957 } IEM_MC_ELSE() {
4958 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4959 } IEM_MC_ENDIF();
4960 IEM_MC_ADVANCE_RIP();
4961 IEM_MC_END();
4962 }
4963 return VINF_SUCCESS;
4964}
4965
4966
4967/** Opcode 0x0f 0x97. */
4968FNIEMOP_DEF(iemOp_setnbe_Eb)
4969{
4970 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4971 IEMOP_HLP_MIN_386();
4972 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4973
4974 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4975 * any way. AMD says it's "unused", whatever that means. We're
4976 * ignoring for now. */
4977 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4978 {
4979 /* register target */
4980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4981 IEM_MC_BEGIN(0, 0);
4982 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4983 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4984 } IEM_MC_ELSE() {
4985 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4986 } IEM_MC_ENDIF();
4987 IEM_MC_ADVANCE_RIP();
4988 IEM_MC_END();
4989 }
4990 else
4991 {
4992 /* memory target */
4993 IEM_MC_BEGIN(0, 1);
4994 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4997 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4998 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4999 } IEM_MC_ELSE() {
5000 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5001 } IEM_MC_ENDIF();
5002 IEM_MC_ADVANCE_RIP();
5003 IEM_MC_END();
5004 }
5005 return VINF_SUCCESS;
5006}
5007
5008
5009/** Opcode 0x0f 0x98. */
5010FNIEMOP_DEF(iemOp_sets_Eb)
5011{
5012 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5013 IEMOP_HLP_MIN_386();
5014 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5015
5016 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5017 * any way. AMD says it's "unused", whatever that means. We're
5018 * ignoring for now. */
5019 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5020 {
5021 /* register target */
5022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5023 IEM_MC_BEGIN(0, 0);
5024 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5025 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5026 } IEM_MC_ELSE() {
5027 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5028 } IEM_MC_ENDIF();
5029 IEM_MC_ADVANCE_RIP();
5030 IEM_MC_END();
5031 }
5032 else
5033 {
5034 /* memory target */
5035 IEM_MC_BEGIN(0, 1);
5036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5039 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5040 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5041 } IEM_MC_ELSE() {
5042 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5043 } IEM_MC_ENDIF();
5044 IEM_MC_ADVANCE_RIP();
5045 IEM_MC_END();
5046 }
5047 return VINF_SUCCESS;
5048}
5049
5050
5051/** Opcode 0x0f 0x99. */
5052FNIEMOP_DEF(iemOp_setns_Eb)
5053{
5054 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5055 IEMOP_HLP_MIN_386();
5056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5057
5058 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5059 * any way. AMD says it's "unused", whatever that means. We're
5060 * ignoring for now. */
5061 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5062 {
5063 /* register target */
5064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5065 IEM_MC_BEGIN(0, 0);
5066 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5067 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5068 } IEM_MC_ELSE() {
5069 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5070 } IEM_MC_ENDIF();
5071 IEM_MC_ADVANCE_RIP();
5072 IEM_MC_END();
5073 }
5074 else
5075 {
5076 /* memory target */
5077 IEM_MC_BEGIN(0, 1);
5078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5081 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5082 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5083 } IEM_MC_ELSE() {
5084 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5085 } IEM_MC_ENDIF();
5086 IEM_MC_ADVANCE_RIP();
5087 IEM_MC_END();
5088 }
5089 return VINF_SUCCESS;
5090}
5091
5092
5093/** Opcode 0x0f 0x9a. */
5094FNIEMOP_DEF(iemOp_setp_Eb)
5095{
5096 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5097 IEMOP_HLP_MIN_386();
5098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5099
5100 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5101 * any way. AMD says it's "unused", whatever that means. We're
5102 * ignoring for now. */
5103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5104 {
5105 /* register target */
5106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5107 IEM_MC_BEGIN(0, 0);
5108 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5109 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5110 } IEM_MC_ELSE() {
5111 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5112 } IEM_MC_ENDIF();
5113 IEM_MC_ADVANCE_RIP();
5114 IEM_MC_END();
5115 }
5116 else
5117 {
5118 /* memory target */
5119 IEM_MC_BEGIN(0, 1);
5120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5123 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5124 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5125 } IEM_MC_ELSE() {
5126 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5127 } IEM_MC_ENDIF();
5128 IEM_MC_ADVANCE_RIP();
5129 IEM_MC_END();
5130 }
5131 return VINF_SUCCESS;
5132}
5133
5134
5135/** Opcode 0x0f 0x9b. */
5136FNIEMOP_DEF(iemOp_setnp_Eb)
5137{
5138 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5139 IEMOP_HLP_MIN_386();
5140 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5141
5142 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5143 * any way. AMD says it's "unused", whatever that means. We're
5144 * ignoring for now. */
5145 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5146 {
5147 /* register target */
5148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5149 IEM_MC_BEGIN(0, 0);
5150 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5151 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5152 } IEM_MC_ELSE() {
5153 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5154 } IEM_MC_ENDIF();
5155 IEM_MC_ADVANCE_RIP();
5156 IEM_MC_END();
5157 }
5158 else
5159 {
5160 /* memory target */
5161 IEM_MC_BEGIN(0, 1);
5162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5165 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5166 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5167 } IEM_MC_ELSE() {
5168 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5169 } IEM_MC_ENDIF();
5170 IEM_MC_ADVANCE_RIP();
5171 IEM_MC_END();
5172 }
5173 return VINF_SUCCESS;
5174}
5175
5176
5177/** Opcode 0x0f 0x9c. */
5178FNIEMOP_DEF(iemOp_setl_Eb)
5179{
5180 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5181 IEMOP_HLP_MIN_386();
5182 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5183
5184 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5185 * any way. AMD says it's "unused", whatever that means. We're
5186 * ignoring for now. */
5187 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5188 {
5189 /* register target */
5190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5191 IEM_MC_BEGIN(0, 0);
5192 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5193 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5194 } IEM_MC_ELSE() {
5195 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5196 } IEM_MC_ENDIF();
5197 IEM_MC_ADVANCE_RIP();
5198 IEM_MC_END();
5199 }
5200 else
5201 {
5202 /* memory target */
5203 IEM_MC_BEGIN(0, 1);
5204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5207 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5208 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5209 } IEM_MC_ELSE() {
5210 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5211 } IEM_MC_ENDIF();
5212 IEM_MC_ADVANCE_RIP();
5213 IEM_MC_END();
5214 }
5215 return VINF_SUCCESS;
5216}
5217
5218
5219/** Opcode 0x0f 0x9d. */
5220FNIEMOP_DEF(iemOp_setnl_Eb)
5221{
5222 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5223 IEMOP_HLP_MIN_386();
5224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5225
5226 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5227 * any way. AMD says it's "unused", whatever that means. We're
5228 * ignoring for now. */
5229 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5230 {
5231 /* register target */
5232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5233 IEM_MC_BEGIN(0, 0);
5234 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5235 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5236 } IEM_MC_ELSE() {
5237 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5238 } IEM_MC_ENDIF();
5239 IEM_MC_ADVANCE_RIP();
5240 IEM_MC_END();
5241 }
5242 else
5243 {
5244 /* memory target */
5245 IEM_MC_BEGIN(0, 1);
5246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5249 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5250 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5251 } IEM_MC_ELSE() {
5252 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5253 } IEM_MC_ENDIF();
5254 IEM_MC_ADVANCE_RIP();
5255 IEM_MC_END();
5256 }
5257 return VINF_SUCCESS;
5258}
5259
5260
5261/** Opcode 0x0f 0x9e. */
5262FNIEMOP_DEF(iemOp_setle_Eb)
5263{
5264 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5265 IEMOP_HLP_MIN_386();
5266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5267
5268 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5269 * any way. AMD says it's "unused", whatever that means. We're
5270 * ignoring for now. */
5271 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5272 {
5273 /* register target */
5274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5275 IEM_MC_BEGIN(0, 0);
5276 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5277 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5278 } IEM_MC_ELSE() {
5279 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5280 } IEM_MC_ENDIF();
5281 IEM_MC_ADVANCE_RIP();
5282 IEM_MC_END();
5283 }
5284 else
5285 {
5286 /* memory target */
5287 IEM_MC_BEGIN(0, 1);
5288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5291 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5292 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5293 } IEM_MC_ELSE() {
5294 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5295 } IEM_MC_ENDIF();
5296 IEM_MC_ADVANCE_RIP();
5297 IEM_MC_END();
5298 }
5299 return VINF_SUCCESS;
5300}
5301
5302
5303/** Opcode 0x0f 0x9f. */
5304FNIEMOP_DEF(iemOp_setnle_Eb)
5305{
5306 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5307 IEMOP_HLP_MIN_386();
5308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5309
5310 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5311 * any way. AMD says it's "unused", whatever that means. We're
5312 * ignoring for now. */
5313 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5314 {
5315 /* register target */
5316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5317 IEM_MC_BEGIN(0, 0);
5318 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5319 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5320 } IEM_MC_ELSE() {
5321 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5322 } IEM_MC_ENDIF();
5323 IEM_MC_ADVANCE_RIP();
5324 IEM_MC_END();
5325 }
5326 else
5327 {
5328 /* memory target */
5329 IEM_MC_BEGIN(0, 1);
5330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5333 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5334 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5335 } IEM_MC_ELSE() {
5336 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5337 } IEM_MC_ENDIF();
5338 IEM_MC_ADVANCE_RIP();
5339 IEM_MC_END();
5340 }
5341 return VINF_SUCCESS;
5342}
5343
5344
5345/**
5346 * Common 'push segment-register' helper.
5347 */
5348FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5349{
5350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5351 if (iReg < X86_SREG_FS)
5352 IEMOP_HLP_NO_64BIT();
5353 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5354
5355 switch (pVCpu->iem.s.enmEffOpSize)
5356 {
5357 case IEMMODE_16BIT:
5358 IEM_MC_BEGIN(0, 1);
5359 IEM_MC_LOCAL(uint16_t, u16Value);
5360 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5361 IEM_MC_PUSH_U16(u16Value);
5362 IEM_MC_ADVANCE_RIP();
5363 IEM_MC_END();
5364 break;
5365
5366 case IEMMODE_32BIT:
5367 IEM_MC_BEGIN(0, 1);
5368 IEM_MC_LOCAL(uint32_t, u32Value);
5369 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5370 IEM_MC_PUSH_U32_SREG(u32Value);
5371 IEM_MC_ADVANCE_RIP();
5372 IEM_MC_END();
5373 break;
5374
5375 case IEMMODE_64BIT:
5376 IEM_MC_BEGIN(0, 1);
5377 IEM_MC_LOCAL(uint64_t, u64Value);
5378 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5379 IEM_MC_PUSH_U64(u64Value);
5380 IEM_MC_ADVANCE_RIP();
5381 IEM_MC_END();
5382 break;
5383 }
5384
5385 return VINF_SUCCESS;
5386}
5387
5388
5389/** Opcode 0x0f 0xa0. */
5390FNIEMOP_DEF(iemOp_push_fs)
5391{
5392 IEMOP_MNEMONIC(push_fs, "push fs");
5393 IEMOP_HLP_MIN_386();
5394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5395 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5396}
5397
5398
5399/** Opcode 0x0f 0xa1. */
5400FNIEMOP_DEF(iemOp_pop_fs)
5401{
5402 IEMOP_MNEMONIC(pop_fs, "pop fs");
5403 IEMOP_HLP_MIN_386();
5404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5405 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5406}
5407
5408
5409/** Opcode 0x0f 0xa2. */
5410FNIEMOP_DEF(iemOp_cpuid)
5411{
5412 IEMOP_MNEMONIC(cpuid, "cpuid");
5413 IEMOP_HLP_MIN_486(); /* not all 486es. */
5414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5416}
5417
5418
5419/**
5420 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5421 * iemOp_bts_Ev_Gv.
5422 */
5423FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5424{
5425 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5426 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5427
5428 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5429 {
5430 /* register destination. */
5431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5432 switch (pVCpu->iem.s.enmEffOpSize)
5433 {
5434 case IEMMODE_16BIT:
5435 IEM_MC_BEGIN(3, 0);
5436 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5437 IEM_MC_ARG(uint16_t, u16Src, 1);
5438 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5439
5440 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5441 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5442 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5443 IEM_MC_REF_EFLAGS(pEFlags);
5444 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5445
5446 IEM_MC_ADVANCE_RIP();
5447 IEM_MC_END();
5448 return VINF_SUCCESS;
5449
5450 case IEMMODE_32BIT:
5451 IEM_MC_BEGIN(3, 0);
5452 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5453 IEM_MC_ARG(uint32_t, u32Src, 1);
5454 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5455
5456 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5457 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5458 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5459 IEM_MC_REF_EFLAGS(pEFlags);
5460 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5461
5462 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5463 IEM_MC_ADVANCE_RIP();
5464 IEM_MC_END();
5465 return VINF_SUCCESS;
5466
5467 case IEMMODE_64BIT:
5468 IEM_MC_BEGIN(3, 0);
5469 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5470 IEM_MC_ARG(uint64_t, u64Src, 1);
5471 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5472
5473 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5474 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5475 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5476 IEM_MC_REF_EFLAGS(pEFlags);
5477 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5478
5479 IEM_MC_ADVANCE_RIP();
5480 IEM_MC_END();
5481 return VINF_SUCCESS;
5482
5483 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5484 }
5485 }
5486 else
5487 {
5488 /* memory destination. */
5489
5490 uint32_t fAccess;
5491 if (pImpl->pfnLockedU16)
5492 fAccess = IEM_ACCESS_DATA_RW;
5493 else /* BT */
5494 fAccess = IEM_ACCESS_DATA_R;
5495
5496 /** @todo test negative bit offsets! */
5497 switch (pVCpu->iem.s.enmEffOpSize)
5498 {
5499 case IEMMODE_16BIT:
5500 IEM_MC_BEGIN(3, 2);
5501 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5502 IEM_MC_ARG(uint16_t, u16Src, 1);
5503 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5505 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5506
5507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5508 if (pImpl->pfnLockedU16)
5509 IEMOP_HLP_DONE_DECODING();
5510 else
5511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5512 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5513 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5514 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5515 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5516 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5517 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5518 IEM_MC_FETCH_EFLAGS(EFlags);
5519
5520 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5521 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5523 else
5524 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5525 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5526
5527 IEM_MC_COMMIT_EFLAGS(EFlags);
5528 IEM_MC_ADVANCE_RIP();
5529 IEM_MC_END();
5530 return VINF_SUCCESS;
5531
5532 case IEMMODE_32BIT:
5533 IEM_MC_BEGIN(3, 2);
5534 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5535 IEM_MC_ARG(uint32_t, u32Src, 1);
5536 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5538 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5539
5540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5541 if (pImpl->pfnLockedU16)
5542 IEMOP_HLP_DONE_DECODING();
5543 else
5544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5545 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5546 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5547 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5548 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5549 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5550 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5551 IEM_MC_FETCH_EFLAGS(EFlags);
5552
5553 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5554 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5555 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5556 else
5557 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5558 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5559
5560 IEM_MC_COMMIT_EFLAGS(EFlags);
5561 IEM_MC_ADVANCE_RIP();
5562 IEM_MC_END();
5563 return VINF_SUCCESS;
5564
5565 case IEMMODE_64BIT:
5566 IEM_MC_BEGIN(3, 2);
5567 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5568 IEM_MC_ARG(uint64_t, u64Src, 1);
5569 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5571 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5572
5573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5574 if (pImpl->pfnLockedU16)
5575 IEMOP_HLP_DONE_DECODING();
5576 else
5577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5578 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5579 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5580 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5581 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5582 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5583 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5584 IEM_MC_FETCH_EFLAGS(EFlags);
5585
5586 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5587 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5588 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5589 else
5590 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5591 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5592
5593 IEM_MC_COMMIT_EFLAGS(EFlags);
5594 IEM_MC_ADVANCE_RIP();
5595 IEM_MC_END();
5596 return VINF_SUCCESS;
5597
5598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5599 }
5600 }
5601}
5602
5603
5604/** Opcode 0x0f 0xa3. */
5605FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5606{
5607 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5608 IEMOP_HLP_MIN_386();
5609 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5610}
5611
5612
5613/**
5614 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5615 */
5616FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5617{
5618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5619 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5620
5621 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5622 {
5623 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5625
5626 switch (pVCpu->iem.s.enmEffOpSize)
5627 {
5628 case IEMMODE_16BIT:
5629 IEM_MC_BEGIN(4, 0);
5630 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5631 IEM_MC_ARG(uint16_t, u16Src, 1);
5632 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5633 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5634
5635 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5636 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5637 IEM_MC_REF_EFLAGS(pEFlags);
5638 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5639
5640 IEM_MC_ADVANCE_RIP();
5641 IEM_MC_END();
5642 return VINF_SUCCESS;
5643
5644 case IEMMODE_32BIT:
5645 IEM_MC_BEGIN(4, 0);
5646 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5647 IEM_MC_ARG(uint32_t, u32Src, 1);
5648 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5649 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5650
5651 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5652 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5653 IEM_MC_REF_EFLAGS(pEFlags);
5654 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5655
5656 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5657 IEM_MC_ADVANCE_RIP();
5658 IEM_MC_END();
5659 return VINF_SUCCESS;
5660
5661 case IEMMODE_64BIT:
5662 IEM_MC_BEGIN(4, 0);
5663 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5664 IEM_MC_ARG(uint64_t, u64Src, 1);
5665 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5666 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5667
5668 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5669 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5670 IEM_MC_REF_EFLAGS(pEFlags);
5671 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5672
5673 IEM_MC_ADVANCE_RIP();
5674 IEM_MC_END();
5675 return VINF_SUCCESS;
5676
5677 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5678 }
5679 }
5680 else
5681 {
5682 switch (pVCpu->iem.s.enmEffOpSize)
5683 {
5684 case IEMMODE_16BIT:
5685 IEM_MC_BEGIN(4, 2);
5686 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5687 IEM_MC_ARG(uint16_t, u16Src, 1);
5688 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5689 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5691
5692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5693 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5694 IEM_MC_ASSIGN(cShiftArg, cShift);
5695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5696 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5697 IEM_MC_FETCH_EFLAGS(EFlags);
5698 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5699 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5700
5701 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5702 IEM_MC_COMMIT_EFLAGS(EFlags);
5703 IEM_MC_ADVANCE_RIP();
5704 IEM_MC_END();
5705 return VINF_SUCCESS;
5706
5707 case IEMMODE_32BIT:
5708 IEM_MC_BEGIN(4, 2);
5709 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5710 IEM_MC_ARG(uint32_t, u32Src, 1);
5711 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5712 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5714
5715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5716 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5717 IEM_MC_ASSIGN(cShiftArg, cShift);
5718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5719 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5720 IEM_MC_FETCH_EFLAGS(EFlags);
5721 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5722 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5723
5724 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5725 IEM_MC_COMMIT_EFLAGS(EFlags);
5726 IEM_MC_ADVANCE_RIP();
5727 IEM_MC_END();
5728 return VINF_SUCCESS;
5729
5730 case IEMMODE_64BIT:
5731 IEM_MC_BEGIN(4, 2);
5732 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5733 IEM_MC_ARG(uint64_t, u64Src, 1);
5734 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5735 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5737
5738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5739 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5740 IEM_MC_ASSIGN(cShiftArg, cShift);
5741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5742 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5743 IEM_MC_FETCH_EFLAGS(EFlags);
5744 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5745 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5746
5747 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5748 IEM_MC_COMMIT_EFLAGS(EFlags);
5749 IEM_MC_ADVANCE_RIP();
5750 IEM_MC_END();
5751 return VINF_SUCCESS;
5752
5753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5754 }
5755 }
5756}
5757
5758
5759/**
5760 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5761 */
5762FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5763{
5764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5765 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5766
5767 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5768 {
5769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5770
5771 switch (pVCpu->iem.s.enmEffOpSize)
5772 {
5773 case IEMMODE_16BIT:
5774 IEM_MC_BEGIN(4, 0);
5775 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5776 IEM_MC_ARG(uint16_t, u16Src, 1);
5777 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5778 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5779
5780 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5781 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5782 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5783 IEM_MC_REF_EFLAGS(pEFlags);
5784 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5785
5786 IEM_MC_ADVANCE_RIP();
5787 IEM_MC_END();
5788 return VINF_SUCCESS;
5789
5790 case IEMMODE_32BIT:
5791 IEM_MC_BEGIN(4, 0);
5792 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5793 IEM_MC_ARG(uint32_t, u32Src, 1);
5794 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5795 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5796
5797 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5798 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5799 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5800 IEM_MC_REF_EFLAGS(pEFlags);
5801 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5802
5803 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5804 IEM_MC_ADVANCE_RIP();
5805 IEM_MC_END();
5806 return VINF_SUCCESS;
5807
5808 case IEMMODE_64BIT:
5809 IEM_MC_BEGIN(4, 0);
5810 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5811 IEM_MC_ARG(uint64_t, u64Src, 1);
5812 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5813 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5814
5815 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5816 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5817 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5818 IEM_MC_REF_EFLAGS(pEFlags);
5819 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5820
5821 IEM_MC_ADVANCE_RIP();
5822 IEM_MC_END();
5823 return VINF_SUCCESS;
5824
5825 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5826 }
5827 }
5828 else
5829 {
5830 switch (pVCpu->iem.s.enmEffOpSize)
5831 {
5832 case IEMMODE_16BIT:
5833 IEM_MC_BEGIN(4, 2);
5834 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5835 IEM_MC_ARG(uint16_t, u16Src, 1);
5836 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5837 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5839
5840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5842 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5843 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5844 IEM_MC_FETCH_EFLAGS(EFlags);
5845 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5846 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5847
5848 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5849 IEM_MC_COMMIT_EFLAGS(EFlags);
5850 IEM_MC_ADVANCE_RIP();
5851 IEM_MC_END();
5852 return VINF_SUCCESS;
5853
5854 case IEMMODE_32BIT:
5855 IEM_MC_BEGIN(4, 2);
5856 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5857 IEM_MC_ARG(uint32_t, u32Src, 1);
5858 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5859 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5861
5862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5864 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5865 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5866 IEM_MC_FETCH_EFLAGS(EFlags);
5867 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5868 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5869
5870 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5871 IEM_MC_COMMIT_EFLAGS(EFlags);
5872 IEM_MC_ADVANCE_RIP();
5873 IEM_MC_END();
5874 return VINF_SUCCESS;
5875
5876 case IEMMODE_64BIT:
5877 IEM_MC_BEGIN(4, 2);
5878 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5879 IEM_MC_ARG(uint64_t, u64Src, 1);
5880 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5881 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5883
5884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5886 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5887 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5888 IEM_MC_FETCH_EFLAGS(EFlags);
5889 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5890 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5891
5892 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5893 IEM_MC_COMMIT_EFLAGS(EFlags);
5894 IEM_MC_ADVANCE_RIP();
5895 IEM_MC_END();
5896 return VINF_SUCCESS;
5897
5898 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5899 }
5900 }
5901}
5902
5903
5904
5905/** Opcode 0x0f 0xa4. */
5906FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5907{
5908 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5909 IEMOP_HLP_MIN_386();
5910 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5911}
5912
5913
5914/** Opcode 0x0f 0xa5. */
5915FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5916{
5917 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5918 IEMOP_HLP_MIN_386();
5919 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5920}
5921
5922
5923/** Opcode 0x0f 0xa8. */
5924FNIEMOP_DEF(iemOp_push_gs)
5925{
5926 IEMOP_MNEMONIC(push_gs, "push gs");
5927 IEMOP_HLP_MIN_386();
5928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5929 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5930}
5931
5932
5933/** Opcode 0x0f 0xa9. */
5934FNIEMOP_DEF(iemOp_pop_gs)
5935{
5936 IEMOP_MNEMONIC(pop_gs, "pop gs");
5937 IEMOP_HLP_MIN_386();
5938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5939 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5940}
5941
5942
5943/** Opcode 0x0f 0xaa. */
5944FNIEMOP_STUB(iemOp_rsm);
5945//IEMOP_HLP_MIN_386();
5946
5947
5948/** Opcode 0x0f 0xab. */
5949FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5950{
5951 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5952 IEMOP_HLP_MIN_386();
5953 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5954}
5955
5956
5957/** Opcode 0x0f 0xac. */
5958FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5959{
5960 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5961 IEMOP_HLP_MIN_386();
5962 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5963}
5964
5965
5966/** Opcode 0x0f 0xad. */
5967FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5968{
5969 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5970 IEMOP_HLP_MIN_386();
5971 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5972}
5973
5974
5975/** Opcode 0x0f 0xae mem/0. */
5976FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5977{
5978 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5979 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5980 return IEMOP_RAISE_INVALID_OPCODE();
5981
5982 IEM_MC_BEGIN(3, 1);
5983 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5984 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5985 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5988 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5989 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5990 IEM_MC_END();
5991 return VINF_SUCCESS;
5992}
5993
5994
5995/** Opcode 0x0f 0xae mem/1. */
5996FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5997{
5998 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5999 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6000 return IEMOP_RAISE_INVALID_OPCODE();
6001
6002 IEM_MC_BEGIN(3, 1);
6003 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6004 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6005 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6008 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6009 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6010 IEM_MC_END();
6011 return VINF_SUCCESS;
6012}
6013
6014
6015/** Opcode 0x0f 0xae mem/2. */
6016FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
6017
6018/** Opcode 0x0f 0xae mem/3. */
6019FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
6020
6021/** Opcode 0x0f 0xae mem/4. */
6022FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
6023
6024/** Opcode 0x0f 0xae mem/5. */
6025FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
6026
6027/** Opcode 0x0f 0xae mem/6. */
6028FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6029
6030/** Opcode 0x0f 0xae mem/7. */
6031FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
6032
6033
6034/** Opcode 0x0f 0xae 11b/5. */
6035FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6036{
6037 RT_NOREF_PV(bRm);
6038 IEMOP_MNEMONIC(lfence, "lfence");
6039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6040 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6041 return IEMOP_RAISE_INVALID_OPCODE();
6042
6043 IEM_MC_BEGIN(0, 0);
6044 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6045 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6046 else
6047 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6048 IEM_MC_ADVANCE_RIP();
6049 IEM_MC_END();
6050 return VINF_SUCCESS;
6051}
6052
6053
6054/** Opcode 0x0f 0xae 11b/6. */
6055FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6056{
6057 RT_NOREF_PV(bRm);
6058 IEMOP_MNEMONIC(mfence, "mfence");
6059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6060 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6061 return IEMOP_RAISE_INVALID_OPCODE();
6062
6063 IEM_MC_BEGIN(0, 0);
6064 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6065 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6066 else
6067 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6068 IEM_MC_ADVANCE_RIP();
6069 IEM_MC_END();
6070 return VINF_SUCCESS;
6071}
6072
6073
6074/** Opcode 0x0f 0xae 11b/7. */
6075FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6076{
6077 RT_NOREF_PV(bRm);
6078 IEMOP_MNEMONIC(sfence, "sfence");
6079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6080 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6081 return IEMOP_RAISE_INVALID_OPCODE();
6082
6083 IEM_MC_BEGIN(0, 0);
6084 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6085 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6086 else
6087 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6088 IEM_MC_ADVANCE_RIP();
6089 IEM_MC_END();
6090 return VINF_SUCCESS;
6091}
6092
6093
6094/** Opcode 0xf3 0x0f 0xae 11b/0. */
6095FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6096
6097/** Opcode 0xf3 0x0f 0xae 11b/1. */
6098FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6099
6100/** Opcode 0xf3 0x0f 0xae 11b/2. */
6101FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6102
6103/** Opcode 0xf3 0x0f 0xae 11b/3. */
6104FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6105
6106
6107/** Opcode 0x0f 0xae. */
6108FNIEMOP_DEF(iemOp_Grp15)
6109{
6110 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6111 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6112 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6113 {
6114 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6115 {
6116 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
6117 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
6118 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
6119 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
6120 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
6121 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
6122 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
6123 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
6124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6125 }
6126 }
6127 else
6128 {
6129 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
6130 {
6131 case 0:
6132 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6133 {
6134 case 0: return IEMOP_RAISE_INVALID_OPCODE();
6135 case 1: return IEMOP_RAISE_INVALID_OPCODE();
6136 case 2: return IEMOP_RAISE_INVALID_OPCODE();
6137 case 3: return IEMOP_RAISE_INVALID_OPCODE();
6138 case 4: return IEMOP_RAISE_INVALID_OPCODE();
6139 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
6140 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
6141 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
6142 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6143 }
6144 break;
6145
6146 case IEM_OP_PRF_REPZ:
6147 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6148 {
6149 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
6150 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
6151 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
6152 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
6153 case 4: return IEMOP_RAISE_INVALID_OPCODE();
6154 case 5: return IEMOP_RAISE_INVALID_OPCODE();
6155 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6156 case 7: return IEMOP_RAISE_INVALID_OPCODE();
6157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6158 }
6159 break;
6160
6161 default:
6162 return IEMOP_RAISE_INVALID_OPCODE();
6163 }
6164 }
6165}
6166
6167
6168/** Opcode 0x0f 0xaf. */
6169FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6170{
6171 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6172 IEMOP_HLP_MIN_386();
6173 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6174 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6175}
6176
6177
6178/** Opcode 0x0f 0xb0. */
6179FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6180{
6181 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6182 IEMOP_HLP_MIN_486();
6183 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6184
6185 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6186 {
6187 IEMOP_HLP_DONE_DECODING();
6188 IEM_MC_BEGIN(4, 0);
6189 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6190 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6191 IEM_MC_ARG(uint8_t, u8Src, 2);
6192 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6193
6194 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6195 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6196 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6197 IEM_MC_REF_EFLAGS(pEFlags);
6198 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6199 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6200 else
6201 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6202
6203 IEM_MC_ADVANCE_RIP();
6204 IEM_MC_END();
6205 }
6206 else
6207 {
6208 IEM_MC_BEGIN(4, 3);
6209 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6210 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6211 IEM_MC_ARG(uint8_t, u8Src, 2);
6212 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6214 IEM_MC_LOCAL(uint8_t, u8Al);
6215
6216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6217 IEMOP_HLP_DONE_DECODING();
6218 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6219 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6220 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6221 IEM_MC_FETCH_EFLAGS(EFlags);
6222 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6223 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6224 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6225 else
6226 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6227
6228 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6229 IEM_MC_COMMIT_EFLAGS(EFlags);
6230 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6231 IEM_MC_ADVANCE_RIP();
6232 IEM_MC_END();
6233 }
6234 return VINF_SUCCESS;
6235}
6236
6237/** Opcode 0x0f 0xb1. */
6238FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6239{
6240 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6241 IEMOP_HLP_MIN_486();
6242 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6243
6244 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6245 {
6246 IEMOP_HLP_DONE_DECODING();
6247 switch (pVCpu->iem.s.enmEffOpSize)
6248 {
6249 case IEMMODE_16BIT:
6250 IEM_MC_BEGIN(4, 0);
6251 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6252 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6253 IEM_MC_ARG(uint16_t, u16Src, 2);
6254 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6255
6256 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6257 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6258 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6259 IEM_MC_REF_EFLAGS(pEFlags);
6260 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6261 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6262 else
6263 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6264
6265 IEM_MC_ADVANCE_RIP();
6266 IEM_MC_END();
6267 return VINF_SUCCESS;
6268
6269 case IEMMODE_32BIT:
6270 IEM_MC_BEGIN(4, 0);
6271 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6272 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6273 IEM_MC_ARG(uint32_t, u32Src, 2);
6274 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6275
6276 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6277 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6278 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6279 IEM_MC_REF_EFLAGS(pEFlags);
6280 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6281 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6282 else
6283 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6284
6285 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6286 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6287 IEM_MC_ADVANCE_RIP();
6288 IEM_MC_END();
6289 return VINF_SUCCESS;
6290
6291 case IEMMODE_64BIT:
6292 IEM_MC_BEGIN(4, 0);
6293 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6294 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6295#ifdef RT_ARCH_X86
6296 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6297#else
6298 IEM_MC_ARG(uint64_t, u64Src, 2);
6299#endif
6300 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6301
6302 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6303 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6304 IEM_MC_REF_EFLAGS(pEFlags);
6305#ifdef RT_ARCH_X86
6306 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6307 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6308 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6309 else
6310 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6311#else
6312 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6313 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6314 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6315 else
6316 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6317#endif
6318
6319 IEM_MC_ADVANCE_RIP();
6320 IEM_MC_END();
6321 return VINF_SUCCESS;
6322
6323 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6324 }
6325 }
6326 else
6327 {
6328 switch (pVCpu->iem.s.enmEffOpSize)
6329 {
6330 case IEMMODE_16BIT:
6331 IEM_MC_BEGIN(4, 3);
6332 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6333 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6334 IEM_MC_ARG(uint16_t, u16Src, 2);
6335 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6337 IEM_MC_LOCAL(uint16_t, u16Ax);
6338
6339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6340 IEMOP_HLP_DONE_DECODING();
6341 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6342 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6343 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6344 IEM_MC_FETCH_EFLAGS(EFlags);
6345 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6346 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6347 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6348 else
6349 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6350
6351 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6352 IEM_MC_COMMIT_EFLAGS(EFlags);
6353 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6354 IEM_MC_ADVANCE_RIP();
6355 IEM_MC_END();
6356 return VINF_SUCCESS;
6357
6358 case IEMMODE_32BIT:
6359 IEM_MC_BEGIN(4, 3);
6360 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6361 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6362 IEM_MC_ARG(uint32_t, u32Src, 2);
6363 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6365 IEM_MC_LOCAL(uint32_t, u32Eax);
6366
6367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6368 IEMOP_HLP_DONE_DECODING();
6369 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6370 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6371 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6372 IEM_MC_FETCH_EFLAGS(EFlags);
6373 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6374 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6375 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6376 else
6377 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6378
6379 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6380 IEM_MC_COMMIT_EFLAGS(EFlags);
6381 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6382 IEM_MC_ADVANCE_RIP();
6383 IEM_MC_END();
6384 return VINF_SUCCESS;
6385
6386 case IEMMODE_64BIT:
6387 IEM_MC_BEGIN(4, 3);
6388 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6389 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6390#ifdef RT_ARCH_X86
6391 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6392#else
6393 IEM_MC_ARG(uint64_t, u64Src, 2);
6394#endif
6395 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6397 IEM_MC_LOCAL(uint64_t, u64Rax);
6398
6399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6400 IEMOP_HLP_DONE_DECODING();
6401 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6402 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6403 IEM_MC_FETCH_EFLAGS(EFlags);
6404 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6405#ifdef RT_ARCH_X86
6406 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6407 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6408 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6409 else
6410 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6411#else
6412 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6413 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6414 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6415 else
6416 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6417#endif
6418
6419 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6420 IEM_MC_COMMIT_EFLAGS(EFlags);
6421 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6422 IEM_MC_ADVANCE_RIP();
6423 IEM_MC_END();
6424 return VINF_SUCCESS;
6425
6426 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6427 }
6428 }
6429}
6430
6431
6432FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6433{
6434 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6435 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6436
6437 switch (pVCpu->iem.s.enmEffOpSize)
6438 {
6439 case IEMMODE_16BIT:
6440 IEM_MC_BEGIN(5, 1);
6441 IEM_MC_ARG(uint16_t, uSel, 0);
6442 IEM_MC_ARG(uint16_t, offSeg, 1);
6443 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6444 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6445 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6446 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6449 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6450 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6451 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6452 IEM_MC_END();
6453 return VINF_SUCCESS;
6454
6455 case IEMMODE_32BIT:
6456 IEM_MC_BEGIN(5, 1);
6457 IEM_MC_ARG(uint16_t, uSel, 0);
6458 IEM_MC_ARG(uint32_t, offSeg, 1);
6459 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6460 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6461 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6462 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6465 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6466 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6467 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6468 IEM_MC_END();
6469 return VINF_SUCCESS;
6470
6471 case IEMMODE_64BIT:
6472 IEM_MC_BEGIN(5, 1);
6473 IEM_MC_ARG(uint16_t, uSel, 0);
6474 IEM_MC_ARG(uint64_t, offSeg, 1);
6475 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6476 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6477 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6478 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6481 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6482 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6483 else
6484 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6485 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6486 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6487 IEM_MC_END();
6488 return VINF_SUCCESS;
6489
6490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6491 }
6492}
6493
6494
6495/** Opcode 0x0f 0xb2. */
6496FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6497{
6498 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6499 IEMOP_HLP_MIN_386();
6500 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6501 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6502 return IEMOP_RAISE_INVALID_OPCODE();
6503 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6504}
6505
6506
6507/** Opcode 0x0f 0xb3. */
6508FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6509{
6510 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6511 IEMOP_HLP_MIN_386();
6512 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6513}
6514
6515
6516/** Opcode 0x0f 0xb4. */
6517FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6518{
6519 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6520 IEMOP_HLP_MIN_386();
6521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6522 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6523 return IEMOP_RAISE_INVALID_OPCODE();
6524 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6525}
6526
6527
6528/** Opcode 0x0f 0xb5. */
6529FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6530{
6531 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6532 IEMOP_HLP_MIN_386();
6533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6534 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6535 return IEMOP_RAISE_INVALID_OPCODE();
6536 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6537}
6538
6539
6540/** Opcode 0x0f 0xb6. */
6541FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6542{
6543 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6544 IEMOP_HLP_MIN_386();
6545
6546 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6547
6548 /*
6549 * If rm is denoting a register, no more instruction bytes.
6550 */
6551 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6552 {
6553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6554 switch (pVCpu->iem.s.enmEffOpSize)
6555 {
6556 case IEMMODE_16BIT:
6557 IEM_MC_BEGIN(0, 1);
6558 IEM_MC_LOCAL(uint16_t, u16Value);
6559 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6560 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6561 IEM_MC_ADVANCE_RIP();
6562 IEM_MC_END();
6563 return VINF_SUCCESS;
6564
6565 case IEMMODE_32BIT:
6566 IEM_MC_BEGIN(0, 1);
6567 IEM_MC_LOCAL(uint32_t, u32Value);
6568 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6569 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6570 IEM_MC_ADVANCE_RIP();
6571 IEM_MC_END();
6572 return VINF_SUCCESS;
6573
6574 case IEMMODE_64BIT:
6575 IEM_MC_BEGIN(0, 1);
6576 IEM_MC_LOCAL(uint64_t, u64Value);
6577 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6578 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6579 IEM_MC_ADVANCE_RIP();
6580 IEM_MC_END();
6581 return VINF_SUCCESS;
6582
6583 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6584 }
6585 }
6586 else
6587 {
6588 /*
6589 * We're loading a register from memory.
6590 */
6591 switch (pVCpu->iem.s.enmEffOpSize)
6592 {
6593 case IEMMODE_16BIT:
6594 IEM_MC_BEGIN(0, 2);
6595 IEM_MC_LOCAL(uint16_t, u16Value);
6596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6599 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6600 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6601 IEM_MC_ADVANCE_RIP();
6602 IEM_MC_END();
6603 return VINF_SUCCESS;
6604
6605 case IEMMODE_32BIT:
6606 IEM_MC_BEGIN(0, 2);
6607 IEM_MC_LOCAL(uint32_t, u32Value);
6608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6611 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6612 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6613 IEM_MC_ADVANCE_RIP();
6614 IEM_MC_END();
6615 return VINF_SUCCESS;
6616
6617 case IEMMODE_64BIT:
6618 IEM_MC_BEGIN(0, 2);
6619 IEM_MC_LOCAL(uint64_t, u64Value);
6620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6623 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6624 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6625 IEM_MC_ADVANCE_RIP();
6626 IEM_MC_END();
6627 return VINF_SUCCESS;
6628
6629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6630 }
6631 }
6632}
6633
6634
6635/** Opcode 0x0f 0xb7. */
6636FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6637{
6638 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6639 IEMOP_HLP_MIN_386();
6640
6641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6642
6643 /** @todo Not entirely sure how the operand size prefix is handled here,
6644 * assuming that it will be ignored. Would be nice to have a few
6645 * test for this. */
6646 /*
6647 * If rm is denoting a register, no more instruction bytes.
6648 */
6649 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6650 {
6651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6652 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6653 {
6654 IEM_MC_BEGIN(0, 1);
6655 IEM_MC_LOCAL(uint32_t, u32Value);
6656 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6657 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6658 IEM_MC_ADVANCE_RIP();
6659 IEM_MC_END();
6660 }
6661 else
6662 {
6663 IEM_MC_BEGIN(0, 1);
6664 IEM_MC_LOCAL(uint64_t, u64Value);
6665 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6666 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6667 IEM_MC_ADVANCE_RIP();
6668 IEM_MC_END();
6669 }
6670 }
6671 else
6672 {
6673 /*
6674 * We're loading a register from memory.
6675 */
6676 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6677 {
6678 IEM_MC_BEGIN(0, 2);
6679 IEM_MC_LOCAL(uint32_t, u32Value);
6680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6683 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6684 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6685 IEM_MC_ADVANCE_RIP();
6686 IEM_MC_END();
6687 }
6688 else
6689 {
6690 IEM_MC_BEGIN(0, 2);
6691 IEM_MC_LOCAL(uint64_t, u64Value);
6692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6695 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6696 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6697 IEM_MC_ADVANCE_RIP();
6698 IEM_MC_END();
6699 }
6700 }
6701 return VINF_SUCCESS;
6702}
6703
6704
6705/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6706FNIEMOP_UD_STUB(iemOp_jmpe);
6707/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6708FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6709
6710
6711/** Opcode 0x0f 0xb9. */
6712FNIEMOP_DEF(iemOp_Grp10)
6713{
6714 Log(("iemOp_Grp10 -> #UD\n"));
6715 return IEMOP_RAISE_INVALID_OPCODE();
6716}
6717
6718
6719/** Opcode 0x0f 0xba. */
6720FNIEMOP_DEF(iemOp_Grp8)
6721{
6722 IEMOP_HLP_MIN_386();
6723 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6724 PCIEMOPBINSIZES pImpl;
6725 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6726 {
6727 case 0: case 1: case 2: case 3:
6728 return IEMOP_RAISE_INVALID_OPCODE();
6729 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6730 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6731 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6732 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6734 }
6735 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6736
6737 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6738 {
6739 /* register destination. */
6740 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6742
6743 switch (pVCpu->iem.s.enmEffOpSize)
6744 {
6745 case IEMMODE_16BIT:
6746 IEM_MC_BEGIN(3, 0);
6747 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6748 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6749 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6750
6751 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6752 IEM_MC_REF_EFLAGS(pEFlags);
6753 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6754
6755 IEM_MC_ADVANCE_RIP();
6756 IEM_MC_END();
6757 return VINF_SUCCESS;
6758
6759 case IEMMODE_32BIT:
6760 IEM_MC_BEGIN(3, 0);
6761 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6762 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6763 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6764
6765 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6766 IEM_MC_REF_EFLAGS(pEFlags);
6767 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6768
6769 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6770 IEM_MC_ADVANCE_RIP();
6771 IEM_MC_END();
6772 return VINF_SUCCESS;
6773
6774 case IEMMODE_64BIT:
6775 IEM_MC_BEGIN(3, 0);
6776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6777 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6778 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6779
6780 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6781 IEM_MC_REF_EFLAGS(pEFlags);
6782 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6783
6784 IEM_MC_ADVANCE_RIP();
6785 IEM_MC_END();
6786 return VINF_SUCCESS;
6787
6788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6789 }
6790 }
6791 else
6792 {
6793 /* memory destination. */
6794
6795 uint32_t fAccess;
6796 if (pImpl->pfnLockedU16)
6797 fAccess = IEM_ACCESS_DATA_RW;
6798 else /* BT */
6799 fAccess = IEM_ACCESS_DATA_R;
6800
6801 /** @todo test negative bit offsets! */
6802 switch (pVCpu->iem.s.enmEffOpSize)
6803 {
6804 case IEMMODE_16BIT:
6805 IEM_MC_BEGIN(3, 1);
6806 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6807 IEM_MC_ARG(uint16_t, u16Src, 1);
6808 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6810
6811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6812 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6813 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6814 if (pImpl->pfnLockedU16)
6815 IEMOP_HLP_DONE_DECODING();
6816 else
6817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6818 IEM_MC_FETCH_EFLAGS(EFlags);
6819 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6820 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6821 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6822 else
6823 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6824 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6825
6826 IEM_MC_COMMIT_EFLAGS(EFlags);
6827 IEM_MC_ADVANCE_RIP();
6828 IEM_MC_END();
6829 return VINF_SUCCESS;
6830
6831 case IEMMODE_32BIT:
6832 IEM_MC_BEGIN(3, 1);
6833 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6834 IEM_MC_ARG(uint32_t, u32Src, 1);
6835 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6837
6838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6839 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6840 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6841 if (pImpl->pfnLockedU16)
6842 IEMOP_HLP_DONE_DECODING();
6843 else
6844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6845 IEM_MC_FETCH_EFLAGS(EFlags);
6846 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6847 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6848 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6849 else
6850 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6851 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6852
6853 IEM_MC_COMMIT_EFLAGS(EFlags);
6854 IEM_MC_ADVANCE_RIP();
6855 IEM_MC_END();
6856 return VINF_SUCCESS;
6857
6858 case IEMMODE_64BIT:
6859 IEM_MC_BEGIN(3, 1);
6860 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6861 IEM_MC_ARG(uint64_t, u64Src, 1);
6862 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6864
6865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6866 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6867 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6868 if (pImpl->pfnLockedU16)
6869 IEMOP_HLP_DONE_DECODING();
6870 else
6871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6872 IEM_MC_FETCH_EFLAGS(EFlags);
6873 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6874 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6875 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6876 else
6877 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6878 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6879
6880 IEM_MC_COMMIT_EFLAGS(EFlags);
6881 IEM_MC_ADVANCE_RIP();
6882 IEM_MC_END();
6883 return VINF_SUCCESS;
6884
6885 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6886 }
6887 }
6888
6889}
6890
6891
6892/** Opcode 0x0f 0xbb. */
6893FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6894{
6895 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6896 IEMOP_HLP_MIN_386();
6897 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6898}
6899
6900
6901/** Opcode 0x0f 0xbc. */
6902FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6903{
6904 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6905 IEMOP_HLP_MIN_386();
6906 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6907 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6908}
6909
6910
6911/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6912FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6913
6914
6915/** Opcode 0x0f 0xbd. */
6916FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6917{
6918 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6919 IEMOP_HLP_MIN_386();
6920 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6921 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6922}
6923
6924
6925/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6926FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6927
6928
6929/** Opcode 0x0f 0xbe. */
6930FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6931{
6932 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6933 IEMOP_HLP_MIN_386();
6934
6935 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6936
6937 /*
6938 * If rm is denoting a register, no more instruction bytes.
6939 */
6940 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6941 {
6942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6943 switch (pVCpu->iem.s.enmEffOpSize)
6944 {
6945 case IEMMODE_16BIT:
6946 IEM_MC_BEGIN(0, 1);
6947 IEM_MC_LOCAL(uint16_t, u16Value);
6948 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6949 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6950 IEM_MC_ADVANCE_RIP();
6951 IEM_MC_END();
6952 return VINF_SUCCESS;
6953
6954 case IEMMODE_32BIT:
6955 IEM_MC_BEGIN(0, 1);
6956 IEM_MC_LOCAL(uint32_t, u32Value);
6957 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6958 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6959 IEM_MC_ADVANCE_RIP();
6960 IEM_MC_END();
6961 return VINF_SUCCESS;
6962
6963 case IEMMODE_64BIT:
6964 IEM_MC_BEGIN(0, 1);
6965 IEM_MC_LOCAL(uint64_t, u64Value);
6966 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6967 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6968 IEM_MC_ADVANCE_RIP();
6969 IEM_MC_END();
6970 return VINF_SUCCESS;
6971
6972 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6973 }
6974 }
6975 else
6976 {
6977 /*
6978 * We're loading a register from memory.
6979 */
6980 switch (pVCpu->iem.s.enmEffOpSize)
6981 {
6982 case IEMMODE_16BIT:
6983 IEM_MC_BEGIN(0, 2);
6984 IEM_MC_LOCAL(uint16_t, u16Value);
6985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6988 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6989 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6990 IEM_MC_ADVANCE_RIP();
6991 IEM_MC_END();
6992 return VINF_SUCCESS;
6993
6994 case IEMMODE_32BIT:
6995 IEM_MC_BEGIN(0, 2);
6996 IEM_MC_LOCAL(uint32_t, u32Value);
6997 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7000 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7001 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7002 IEM_MC_ADVANCE_RIP();
7003 IEM_MC_END();
7004 return VINF_SUCCESS;
7005
7006 case IEMMODE_64BIT:
7007 IEM_MC_BEGIN(0, 2);
7008 IEM_MC_LOCAL(uint64_t, u64Value);
7009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7012 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7013 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7014 IEM_MC_ADVANCE_RIP();
7015 IEM_MC_END();
7016 return VINF_SUCCESS;
7017
7018 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7019 }
7020 }
7021}
7022
7023
7024/** Opcode 0x0f 0xbf. */
7025FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7026{
7027 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7028 IEMOP_HLP_MIN_386();
7029
7030 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7031
7032 /** @todo Not entirely sure how the operand size prefix is handled here,
7033 * assuming that it will be ignored. Would be nice to have a few
7034 * test for this. */
7035 /*
7036 * If rm is denoting a register, no more instruction bytes.
7037 */
7038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7039 {
7040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7041 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7042 {
7043 IEM_MC_BEGIN(0, 1);
7044 IEM_MC_LOCAL(uint32_t, u32Value);
7045 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7046 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7047 IEM_MC_ADVANCE_RIP();
7048 IEM_MC_END();
7049 }
7050 else
7051 {
7052 IEM_MC_BEGIN(0, 1);
7053 IEM_MC_LOCAL(uint64_t, u64Value);
7054 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7055 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7056 IEM_MC_ADVANCE_RIP();
7057 IEM_MC_END();
7058 }
7059 }
7060 else
7061 {
7062 /*
7063 * We're loading a register from memory.
7064 */
7065 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7066 {
7067 IEM_MC_BEGIN(0, 2);
7068 IEM_MC_LOCAL(uint32_t, u32Value);
7069 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7072 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7073 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7074 IEM_MC_ADVANCE_RIP();
7075 IEM_MC_END();
7076 }
7077 else
7078 {
7079 IEM_MC_BEGIN(0, 2);
7080 IEM_MC_LOCAL(uint64_t, u64Value);
7081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7084 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7085 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7086 IEM_MC_ADVANCE_RIP();
7087 IEM_MC_END();
7088 }
7089 }
7090 return VINF_SUCCESS;
7091}
7092
7093
7094/** Opcode 0x0f 0xc0. */
7095FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7096{
7097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7098 IEMOP_HLP_MIN_486();
7099 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7100
7101 /*
7102 * If rm is denoting a register, no more instruction bytes.
7103 */
7104 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7105 {
7106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7107
7108 IEM_MC_BEGIN(3, 0);
7109 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7110 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7111 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7112
7113 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7114 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7115 IEM_MC_REF_EFLAGS(pEFlags);
7116 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7117
7118 IEM_MC_ADVANCE_RIP();
7119 IEM_MC_END();
7120 }
7121 else
7122 {
7123 /*
7124 * We're accessing memory.
7125 */
7126 IEM_MC_BEGIN(3, 3);
7127 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7128 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7129 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7130 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7132
7133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7134 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7135 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7136 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7137 IEM_MC_FETCH_EFLAGS(EFlags);
7138 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7139 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7140 else
7141 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7142
7143 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7144 IEM_MC_COMMIT_EFLAGS(EFlags);
7145 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7146 IEM_MC_ADVANCE_RIP();
7147 IEM_MC_END();
7148 return VINF_SUCCESS;
7149 }
7150 return VINF_SUCCESS;
7151}
7152
7153
7154/** Opcode 0x0f 0xc1. */
7155FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7156{
7157 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7158 IEMOP_HLP_MIN_486();
7159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7160
7161 /*
7162 * If rm is denoting a register, no more instruction bytes.
7163 */
7164 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7165 {
7166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7167
7168 switch (pVCpu->iem.s.enmEffOpSize)
7169 {
7170 case IEMMODE_16BIT:
7171 IEM_MC_BEGIN(3, 0);
7172 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7173 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7174 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7175
7176 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7177 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7178 IEM_MC_REF_EFLAGS(pEFlags);
7179 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7180
7181 IEM_MC_ADVANCE_RIP();
7182 IEM_MC_END();
7183 return VINF_SUCCESS;
7184
7185 case IEMMODE_32BIT:
7186 IEM_MC_BEGIN(3, 0);
7187 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7188 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7189 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7190
7191 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7192 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7193 IEM_MC_REF_EFLAGS(pEFlags);
7194 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7195
7196 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7197 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7198 IEM_MC_ADVANCE_RIP();
7199 IEM_MC_END();
7200 return VINF_SUCCESS;
7201
7202 case IEMMODE_64BIT:
7203 IEM_MC_BEGIN(3, 0);
7204 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7205 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7206 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7207
7208 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7209 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7210 IEM_MC_REF_EFLAGS(pEFlags);
7211 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7212
7213 IEM_MC_ADVANCE_RIP();
7214 IEM_MC_END();
7215 return VINF_SUCCESS;
7216
7217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7218 }
7219 }
7220 else
7221 {
7222 /*
7223 * We're accessing memory.
7224 */
7225 switch (pVCpu->iem.s.enmEffOpSize)
7226 {
7227 case IEMMODE_16BIT:
7228 IEM_MC_BEGIN(3, 3);
7229 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7230 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7231 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7232 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7234
7235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7236 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7237 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7238 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7239 IEM_MC_FETCH_EFLAGS(EFlags);
7240 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7241 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7242 else
7243 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7244
7245 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7246 IEM_MC_COMMIT_EFLAGS(EFlags);
7247 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7248 IEM_MC_ADVANCE_RIP();
7249 IEM_MC_END();
7250 return VINF_SUCCESS;
7251
7252 case IEMMODE_32BIT:
7253 IEM_MC_BEGIN(3, 3);
7254 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7255 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7256 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7257 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7259
7260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7261 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7262 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7263 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7264 IEM_MC_FETCH_EFLAGS(EFlags);
7265 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7266 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7267 else
7268 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7269
7270 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7271 IEM_MC_COMMIT_EFLAGS(EFlags);
7272 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7273 IEM_MC_ADVANCE_RIP();
7274 IEM_MC_END();
7275 return VINF_SUCCESS;
7276
7277 case IEMMODE_64BIT:
7278 IEM_MC_BEGIN(3, 3);
7279 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7280 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7281 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7282 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7284
7285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7286 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7287 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7288 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7289 IEM_MC_FETCH_EFLAGS(EFlags);
7290 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7291 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7292 else
7293 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7294
7295 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7296 IEM_MC_COMMIT_EFLAGS(EFlags);
7297 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7298 IEM_MC_ADVANCE_RIP();
7299 IEM_MC_END();
7300 return VINF_SUCCESS;
7301
7302 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7303 }
7304 }
7305}
7306
7307
7308/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
7309FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
7310/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
7311FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
7312/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
7313FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
7314/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
7315FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
7316
7317
7318/** Opcode 0x0f 0xc3. */
7319FNIEMOP_DEF(iemOp_movnti_My_Gy)
7320{
7321 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7322
7323 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7324
7325 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7326 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7327 {
7328 switch (pVCpu->iem.s.enmEffOpSize)
7329 {
7330 case IEMMODE_32BIT:
7331 IEM_MC_BEGIN(0, 2);
7332 IEM_MC_LOCAL(uint32_t, u32Value);
7333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7334
7335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7337 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7338 return IEMOP_RAISE_INVALID_OPCODE();
7339
7340 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7341 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7342 IEM_MC_ADVANCE_RIP();
7343 IEM_MC_END();
7344 break;
7345
7346 case IEMMODE_64BIT:
7347 IEM_MC_BEGIN(0, 2);
7348 IEM_MC_LOCAL(uint64_t, u64Value);
7349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7350
7351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7353 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7354 return IEMOP_RAISE_INVALID_OPCODE();
7355
7356 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7357 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7358 IEM_MC_ADVANCE_RIP();
7359 IEM_MC_END();
7360 break;
7361
7362 case IEMMODE_16BIT:
7363 /** @todo check this form. */
7364 return IEMOP_RAISE_INVALID_OPCODE();
7365 }
7366 }
7367 else
7368 return IEMOP_RAISE_INVALID_OPCODE();
7369 return VINF_SUCCESS;
7370}
7371/* Opcode 0x66 0x0f 0xc3 - invalid */
7372/* Opcode 0xf3 0x0f 0xc3 - invalid */
7373/* Opcode 0xf2 0x0f 0xc3 - invalid */
7374
7375/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
7376FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7377/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
7378FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
7379/* Opcode 0xf3 0x0f 0xc4 - invalid */
7380/* Opcode 0xf2 0x0f 0xc4 - invalid */
7381
7382/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7383FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7384/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
7385FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
7386/* Opcode 0xf3 0x0f 0xc5 - invalid */
7387/* Opcode 0xf2 0x0f 0xc5 - invalid */
7388
7389/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
7390FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
7391/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
7392FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
7393/* Opcode 0xf3 0x0f 0xc6 - invalid */
7394/* Opcode 0xf2 0x0f 0xc6 - invalid */
7395
7396
7397/** Opcode 0x0f 0xc7 !11/1. */
7398FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7399{
7400 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7401
7402 IEM_MC_BEGIN(4, 3);
7403 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7404 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7405 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7406 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7407 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7408 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7410
7411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7412 IEMOP_HLP_DONE_DECODING();
7413 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7414
7415 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7416 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7417 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7418
7419 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7420 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7421 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7422
7423 IEM_MC_FETCH_EFLAGS(EFlags);
7424 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7425 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7426 else
7427 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7428
7429 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7430 IEM_MC_COMMIT_EFLAGS(EFlags);
7431 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7432 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7433 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7434 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7435 IEM_MC_ENDIF();
7436 IEM_MC_ADVANCE_RIP();
7437
7438 IEM_MC_END();
7439 return VINF_SUCCESS;
7440}
7441
7442
7443/** Opcode REX.W 0x0f 0xc7 !11/1. */
7444FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7445{
7446 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7447 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7448 {
7449#if 0
7450 RT_NOREF(bRm);
7451 IEMOP_BITCH_ABOUT_STUB();
7452 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7453#else
7454 IEM_MC_BEGIN(4, 3);
7455 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7456 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7457 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7458 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7459 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7460 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7462
7463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7464 IEMOP_HLP_DONE_DECODING();
7465 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7466 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7467
7468 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7469 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7470 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7471
7472 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7473 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7474 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7475
7476 IEM_MC_FETCH_EFLAGS(EFlags);
7477# ifdef RT_ARCH_AMD64
7478 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7479 {
7480 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7481 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7482 else
7483 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7484 }
7485 else
7486# endif
7487 {
7488 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7489 accesses and not all all atomic, which works fine on in UNI CPU guest
7490 configuration (ignoring DMA). If guest SMP is active we have no choice
7491 but to use a rendezvous callback here. Sigh. */
7492 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7493 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7494 else
7495 {
7496 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7497 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7498 }
7499 }
7500
7501 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7502 IEM_MC_COMMIT_EFLAGS(EFlags);
7503 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7504 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7505 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7506 IEM_MC_ENDIF();
7507 IEM_MC_ADVANCE_RIP();
7508
7509 IEM_MC_END();
7510 return VINF_SUCCESS;
7511#endif
7512 }
7513 Log(("cmpxchg16b -> #UD\n"));
7514 return IEMOP_RAISE_INVALID_OPCODE();
7515}
7516
7517
7518/** Opcode 0x0f 0xc7 11/6. */
7519FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7520
7521/** Opcode 0x0f 0xc7 !11/6. */
7522FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7523
7524/** Opcode 0x66 0x0f 0xc7 !11/6. */
7525FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7526
7527/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7528FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7529
7530/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7531FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7532
7533
7534/** Opcode 0x0f 0xc7. */
7535FNIEMOP_DEF(iemOp_Grp9)
7536{
7537 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
7538 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7539 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7540 {
7541 case 0: case 2: case 3: case 4: case 5:
7542 return IEMOP_RAISE_INVALID_OPCODE();
7543 case 1:
7544 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
7545 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
7546 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
7547 return IEMOP_RAISE_INVALID_OPCODE();
7548 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7549 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7550 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7551 case 6:
7552 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7553 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
7554 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7555 {
7556 case 0:
7557 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
7558 case IEM_OP_PRF_SIZE_OP:
7559 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
7560 case IEM_OP_PRF_REPZ:
7561 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
7562 default:
7563 return IEMOP_RAISE_INVALID_OPCODE();
7564 }
7565 case 7:
7566 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7567 {
7568 case 0:
7569 case IEM_OP_PRF_REPZ:
7570 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
7571 default:
7572 return IEMOP_RAISE_INVALID_OPCODE();
7573 }
7574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7575 }
7576}
7577
7578
7579/**
7580 * Common 'bswap register' helper.
7581 */
7582FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7583{
7584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7585 switch (pVCpu->iem.s.enmEffOpSize)
7586 {
7587 case IEMMODE_16BIT:
7588 IEM_MC_BEGIN(1, 0);
7589 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7590 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7591 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7592 IEM_MC_ADVANCE_RIP();
7593 IEM_MC_END();
7594 return VINF_SUCCESS;
7595
7596 case IEMMODE_32BIT:
7597 IEM_MC_BEGIN(1, 0);
7598 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7599 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7600 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7601 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7602 IEM_MC_ADVANCE_RIP();
7603 IEM_MC_END();
7604 return VINF_SUCCESS;
7605
7606 case IEMMODE_64BIT:
7607 IEM_MC_BEGIN(1, 0);
7608 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7609 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7610 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7611 IEM_MC_ADVANCE_RIP();
7612 IEM_MC_END();
7613 return VINF_SUCCESS;
7614
7615 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7616 }
7617}
7618
7619
7620/** Opcode 0x0f 0xc8. */
7621FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7622{
7623 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7624 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7625 prefix. REX.B is the correct prefix it appears. For a parallel
7626 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7627 IEMOP_HLP_MIN_486();
7628 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7629}
7630
7631
7632/** Opcode 0x0f 0xc9. */
7633FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7634{
7635 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7636 IEMOP_HLP_MIN_486();
7637 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7638}
7639
7640
7641/** Opcode 0x0f 0xca. */
7642FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7643{
7644 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7645 IEMOP_HLP_MIN_486();
7646 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7647}
7648
7649
7650/** Opcode 0x0f 0xcb. */
7651FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7652{
7653 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7654 IEMOP_HLP_MIN_486();
7655 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7656}
7657
7658
7659/** Opcode 0x0f 0xcc. */
7660FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7661{
7662 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7663 IEMOP_HLP_MIN_486();
7664 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7665}
7666
7667
7668/** Opcode 0x0f 0xcd. */
7669FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7670{
7671 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7672 IEMOP_HLP_MIN_486();
7673 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7674}
7675
7676
7677/** Opcode 0x0f 0xce. */
7678FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7679{
7680 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7681 IEMOP_HLP_MIN_486();
7682 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7683}
7684
7685
7686/** Opcode 0x0f 0xcf. */
7687FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7688{
7689 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7690 IEMOP_HLP_MIN_486();
7691 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7692}
7693
7694
7695/* Opcode 0x0f 0xd0 - invalid */
7696/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7697FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7698/* Opcode 0xf3 0x0f 0xd0 - invalid */
7699/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7700FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7701
7702/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7703FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7704/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7705FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7706/* Opcode 0xf3 0x0f 0xd1 - invalid */
7707/* Opcode 0xf2 0x0f 0xd1 - invalid */
7708
7709/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7710FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7711/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7712FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7713/* Opcode 0xf3 0x0f 0xd2 - invalid */
7714/* Opcode 0xf2 0x0f 0xd2 - invalid */
7715
7716/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7717FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7718/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7719FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7720/* Opcode 0xf3 0x0f 0xd3 - invalid */
7721/* Opcode 0xf2 0x0f 0xd3 - invalid */
7722
7723/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7724FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7725/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7726FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7727/* Opcode 0xf3 0x0f 0xd4 - invalid */
7728/* Opcode 0xf2 0x0f 0xd4 - invalid */
7729
7730/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7731FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7732/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7733FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7734/* Opcode 0xf3 0x0f 0xd5 - invalid */
7735/* Opcode 0xf2 0x0f 0xd5 - invalid */
7736
7737/* Opcode 0x0f 0xd6 - invalid */
7738/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7739FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7740/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7741FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7742/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7743FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7744#if 0
7745FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7746{
7747 /* Docs says register only. */
7748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7749
7750 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7751 {
7752 case IEM_OP_PRF_SIZE_OP: /* SSE */
7753 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7754 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7755 IEM_MC_BEGIN(2, 0);
7756 IEM_MC_ARG(uint64_t *, pDst, 0);
7757 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7758 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7759 IEM_MC_PREPARE_SSE_USAGE();
7760 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7761 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7762 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7763 IEM_MC_ADVANCE_RIP();
7764 IEM_MC_END();
7765 return VINF_SUCCESS;
7766
7767 case 0: /* MMX */
7768 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7769 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7770 IEM_MC_BEGIN(2, 0);
7771 IEM_MC_ARG(uint64_t *, pDst, 0);
7772 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7773 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7774 IEM_MC_PREPARE_FPU_USAGE();
7775 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7776 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7777 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7778 IEM_MC_ADVANCE_RIP();
7779 IEM_MC_END();
7780 return VINF_SUCCESS;
7781
7782 default:
7783 return IEMOP_RAISE_INVALID_OPCODE();
7784 }
7785}
7786#endif
7787
7788
7789/** Opcode 0x0f 0xd7. */
7790FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
7791{
7792 /* Docs says register only. */
7793 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7794 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7795 return IEMOP_RAISE_INVALID_OPCODE();
7796
7797 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7798 /** @todo testcase: Check that the instruction implicitly clears the high
7799 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7800 * and opcode modifications are made to work with the whole width (not
7801 * just 128). */
7802 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7803 {
7804 case IEM_OP_PRF_SIZE_OP: /* SSE */
7805 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "pmovmskb Gd,Nq");
7806 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7807 IEM_MC_BEGIN(2, 0);
7808 IEM_MC_ARG(uint64_t *, pDst, 0);
7809 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7810 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7811 IEM_MC_PREPARE_SSE_USAGE();
7812 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7813 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7814 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7815 IEM_MC_ADVANCE_RIP();
7816 IEM_MC_END();
7817 return VINF_SUCCESS;
7818
7819 case 0: /* MMX */
7820 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7821 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7822 IEM_MC_BEGIN(2, 0);
7823 IEM_MC_ARG(uint64_t *, pDst, 0);
7824 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7825 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7826 IEM_MC_PREPARE_FPU_USAGE();
7827 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7828 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7829 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7830 IEM_MC_ADVANCE_RIP();
7831 IEM_MC_END();
7832 return VINF_SUCCESS;
7833
7834 default:
7835 return IEMOP_RAISE_INVALID_OPCODE();
7836 }
7837}
7838
7839
7840/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7841FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7842/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7843FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7844/* Opcode 0xf3 0x0f 0xd8 - invalid */
7845/* Opcode 0xf2 0x0f 0xd8 - invalid */
7846
7847/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7848FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7849/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7850FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7851/* Opcode 0xf3 0x0f 0xd9 - invalid */
7852/* Opcode 0xf2 0x0f 0xd9 - invalid */
7853
7854/** Opcode 0x0f 0xda - pminub Pq, Qq */
7855FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7856/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7857FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7858/* Opcode 0xf3 0x0f 0xda - invalid */
7859/* Opcode 0xf2 0x0f 0xda - invalid */
7860
7861/** Opcode 0x0f 0xdb - pand Pq, Qq */
7862FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7863/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7864FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7865/* Opcode 0xf3 0x0f 0xdb - invalid */
7866/* Opcode 0xf2 0x0f 0xdb - invalid */
7867
7868/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7869FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7870/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7871FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7872/* Opcode 0xf3 0x0f 0xdc - invalid */
7873/* Opcode 0xf2 0x0f 0xdc - invalid */
7874
7875/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7876FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7877/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7878FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7879/* Opcode 0xf3 0x0f 0xdd - invalid */
7880/* Opcode 0xf2 0x0f 0xdd - invalid */
7881
7882/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7883FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7884/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7885FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7886/* Opcode 0xf3 0x0f 0xde - invalid */
7887/* Opcode 0xf2 0x0f 0xde - invalid */
7888
7889/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7890FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7891/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7892FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7893/* Opcode 0xf3 0x0f 0xdf - invalid */
7894/* Opcode 0xf2 0x0f 0xdf - invalid */
7895
7896/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7897FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7898/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7899FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7900/* Opcode 0xf3 0x0f 0xe0 - invalid */
7901/* Opcode 0xf2 0x0f 0xe0 - invalid */
7902
7903/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7904FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7905/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7906FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7907/* Opcode 0xf3 0x0f 0xe1 - invalid */
7908/* Opcode 0xf2 0x0f 0xe1 - invalid */
7909
7910/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7911FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7912/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7913FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7914/* Opcode 0xf3 0x0f 0xe2 - invalid */
7915/* Opcode 0xf2 0x0f 0xe2 - invalid */
7916
7917/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7918FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7919/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7920FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7921/* Opcode 0xf3 0x0f 0xe3 - invalid */
7922/* Opcode 0xf2 0x0f 0xe3 - invalid */
7923
7924/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7925FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7926/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7927FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7928/* Opcode 0xf3 0x0f 0xe4 - invalid */
7929/* Opcode 0xf2 0x0f 0xe4 - invalid */
7930
7931/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7932FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7933/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7934FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7935/* Opcode 0xf3 0x0f 0xe5 - invalid */
7936/* Opcode 0xf2 0x0f 0xe5 - invalid */
7937
7938/* Opcode 0x0f 0xe6 - invalid */
7939/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7940FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7941/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7942FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7943/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7944FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7945
7946
7947/** Opcode 0x0f 0xe7. */
7948FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
7949{
7950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7951 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7952 {
7953 /*
7954 * Register, memory.
7955 */
7956/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
7957 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7958 {
7959
7960 case IEM_OP_PRF_SIZE_OP: /* SSE */
7961 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7962 IEM_MC_BEGIN(0, 2);
7963 IEM_MC_LOCAL(uint128_t, uSrc);
7964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7965
7966 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7968 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7969 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7970
7971 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7972 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7973
7974 IEM_MC_ADVANCE_RIP();
7975 IEM_MC_END();
7976 break;
7977
7978 case 0: /* MMX */
7979 IEMOP_MNEMONIC(movntdq_Mdq_Vdq, "movntdq Mdq,Vdq");
7980 IEM_MC_BEGIN(0, 2);
7981 IEM_MC_LOCAL(uint64_t, uSrc);
7982 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7983
7984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7986 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7987 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7988
7989 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7990 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7991
7992 IEM_MC_ADVANCE_RIP();
7993 IEM_MC_END();
7994 break;
7995
7996 default:
7997 return IEMOP_RAISE_INVALID_OPCODE();
7998 }
7999 }
8000 /* The register, register encoding is invalid. */
8001 else
8002 return IEMOP_RAISE_INVALID_OPCODE();
8003 return VINF_SUCCESS;
8004}
8005
8006
8007/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8008FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8009/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
8010FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
8011/* Opcode 0xf3 0x0f 0xe8 - invalid */
8012/* Opcode 0xf2 0x0f 0xe8 - invalid */
8013
8014/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8015FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8016/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
8017FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
8018/* Opcode 0xf3 0x0f 0xe9 - invalid */
8019/* Opcode 0xf2 0x0f 0xe9 - invalid */
8020
8021/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8022FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8023/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
8024FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
8025/* Opcode 0xf3 0x0f 0xea - invalid */
8026/* Opcode 0xf2 0x0f 0xea - invalid */
8027
8028/** Opcode 0x0f 0xeb - por Pq, Qq */
8029FNIEMOP_STUB(iemOp_por_Pq_Qq);
8030/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
8031FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
8032/* Opcode 0xf3 0x0f 0xeb - invalid */
8033/* Opcode 0xf2 0x0f 0xeb - invalid */
8034
8035/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8036FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8037/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
8038FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
8039/* Opcode 0xf3 0x0f 0xec - invalid */
8040/* Opcode 0xf2 0x0f 0xec - invalid */
8041
8042/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8043FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8044/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
8045FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
8046/* Opcode 0xf3 0x0f 0xed - invalid */
8047/* Opcode 0xf2 0x0f 0xed - invalid */
8048
8049/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8050FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8051/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
8052FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
8053/* Opcode 0xf3 0x0f 0xee - invalid */
8054/* Opcode 0xf2 0x0f 0xee - invalid */
8055
8056
8057/** Opcode 0x0f 0xef - pxor Pq, Qq */
8058FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8059{
8060 IEMOP_MNEMONIC(pxor, "pxor");
8061 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8062}
8063
8064/** Opcode 0x66 0x0f 0xef - vpxor Vx, Hx, Wx */
8065FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
8066{
8067 IEMOP_MNEMONIC(vpxor, "vpxor");
8068 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8069}
8070
8071/* Opcode 0xf3 0x0f 0xef - invalid */
8072/* Opcode 0xf2 0x0f 0xef - invalid */
8073
8074/* Opcode 0x0f 0xf0 - invalid */
8075/* Opcode 0x66 0x0f 0xf0 - invalid */
8076/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
8077FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
8078
8079/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8080FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8081/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
8082FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
8083/* Opcode 0xf2 0x0f 0xf1 - invalid */
8084
8085/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8086FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8087/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
8088FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
8089/* Opcode 0xf2 0x0f 0xf2 - invalid */
8090
8091/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8092FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8093/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
8094FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
8095/* Opcode 0xf2 0x0f 0xf3 - invalid */
8096
8097/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8098FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8099/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
8100FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
8101/* Opcode 0xf2 0x0f 0xf4 - invalid */
8102
8103/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8104FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8105/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
8106FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
8107/* Opcode 0xf2 0x0f 0xf5 - invalid */
8108
8109/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8110FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8111/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
8112FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
8113/* Opcode 0xf2 0x0f 0xf6 - invalid */
8114
8115/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8116FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8117/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
8118FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
8119/* Opcode 0xf2 0x0f 0xf7 - invalid */
8120
8121/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8122FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8123/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
8124FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
8125/* Opcode 0xf2 0x0f 0xf8 - invalid */
8126
8127/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8128FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8129/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
8130FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
8131/* Opcode 0xf2 0x0f 0xf9 - invalid */
8132
8133/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8134FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8135/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
8136FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
8137/* Opcode 0xf2 0x0f 0xfa - invalid */
8138
8139/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8140FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8141/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
8142FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
8143/* Opcode 0xf2 0x0f 0xfb - invalid */
8144
8145/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8146FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8147/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
8148FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
8149/* Opcode 0xf2 0x0f 0xfc - invalid */
8150
8151/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8152FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8153/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
8154FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
8155/* Opcode 0xf2 0x0f 0xfd - invalid */
8156
8157/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8158FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8159/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
8160FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
8161/* Opcode 0xf2 0x0f 0xfe - invalid */
8162
8163
8164/** Opcode **** 0x0f 0xff - UD0 */
8165FNIEMOP_DEF(iemOp_ud0)
8166{
8167 IEMOP_MNEMONIC(ud0, "ud0");
8168 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8169 {
8170 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8171#ifndef TST_IEM_CHECK_MC
8172 RTGCPTR GCPtrEff;
8173 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8174 if (rcStrict != VINF_SUCCESS)
8175 return rcStrict;
8176#endif
8177 IEMOP_HLP_DONE_DECODING();
8178 }
8179 return IEMOP_RAISE_INVALID_OPCODE();
8180}
8181
8182
8183
8184/** Repeats a_fn four times. For decoding tables. */
8185#define IEMOP_X4(a_fn) a_fn, a_fn, a_fn, a_fn
8186
8187IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8188{
8189 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8190 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8191 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8192 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8193 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8194 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8195 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8196 /* 0x06 */ IEMOP_X4(iemOp_clts),
8197 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8198 /* 0x08 */ IEMOP_X4(iemOp_invd),
8199 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8200 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8201 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8202 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8203 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8204 /* 0x0e */ IEMOP_X4(iemOp_femms),
8205 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8206
8207 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
8208 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
8209 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
8210 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8211 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8212 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8213 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
8214 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8215 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8216 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8217 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8218 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8219 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8220 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8221 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8222 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8223
8224 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8225 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8226 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8227 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8228 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8229 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8230 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8231 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8232 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8233 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM,
8234 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
8235 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8236 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
8237 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
8238 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8239 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8240
8241 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8242 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8243 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8244 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8245 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8246 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8247 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8248 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8249 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
8250 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8251 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
8252 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8253 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8254 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8255 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8256 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8257
8258 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8259 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8260 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8261 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8262 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8263 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8264 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8265 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8266 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8267 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8268 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8269 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8270 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8271 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8272 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8273 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8274
8275 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8276 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8277 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8278 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8279 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8280 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8281 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8282 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8283 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8284 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8285 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8286 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8287 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8288 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8289 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8290 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8291
8292 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8293 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8294 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8295 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8296 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8297 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8298 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8299 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8300 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8301 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8302 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8303 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8304 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8305 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8306 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8307 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8308
8309 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8310 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8311 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8312 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8313 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8314 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8315 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8316 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8317
8318 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8319 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8320 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8321 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8322 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8323 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8324 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
8325 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
8326
8327 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8328 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8329 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8330 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8331 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8332 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8333 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8334 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8335 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8336 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8337 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8338 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8339 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8340 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8341 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8342 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8343
8344 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8345 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8346 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8347 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8348 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8349 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8350 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8351 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8352 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8353 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8354 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8355 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8356 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8357 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8358 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8359 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8360
8361 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8362 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8363 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8364 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8365 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8366 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8367 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8368 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8369 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8370 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8371 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8372 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8373 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8374 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8375 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8376 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8377
8378 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8379 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8380 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8381 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8382 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8383 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8384 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8385 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8386 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8387 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8388 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8389 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8390 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8391 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8392 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8393 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8394
8395 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8396 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8397 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8398 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8399 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8400 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8401 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8402 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8403 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8404 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8405 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8406 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8407 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8408 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8409 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8410 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8411
8412 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8413 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8414 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8415 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8416 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8417 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8418 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8419 /* 0xd7 */ IEMOP_X4(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq),
8420 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8421 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8422 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8423 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8424 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8425 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8426 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8427 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8428
8429 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8430 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8431 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8432 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8433 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8434 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8435 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8436 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8437 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8438 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8439 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8440 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8441 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8442 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8443 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8444 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8445
8446 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8447 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8448 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8449 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8450 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8451 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8452 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8453 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8454 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8455 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8456 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8457 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8458 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8459 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8460 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8461 /* 0xff */ IEMOP_X4(iemOp_ud0),
8462};
8463AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8464/** @} */
8465
8466
8467/** @name One byte opcodes.
8468 *
8469 * @{
8470 */
8471
8472/** Opcode 0x00. */
8473FNIEMOP_DEF(iemOp_add_Eb_Gb)
8474{
8475 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
8476 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
8477}
8478
8479
8480/** Opcode 0x01. */
8481FNIEMOP_DEF(iemOp_add_Ev_Gv)
8482{
8483 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
8484 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
8485}
8486
8487
8488/** Opcode 0x02. */
8489FNIEMOP_DEF(iemOp_add_Gb_Eb)
8490{
8491 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
8492 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
8493}
8494
8495
8496/** Opcode 0x03. */
8497FNIEMOP_DEF(iemOp_add_Gv_Ev)
8498{
8499 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
8500 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
8501}
8502
8503
8504/** Opcode 0x04. */
8505FNIEMOP_DEF(iemOp_add_Al_Ib)
8506{
8507 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
8508 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
8509}
8510
8511
8512/** Opcode 0x05. */
8513FNIEMOP_DEF(iemOp_add_eAX_Iz)
8514{
8515 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
8516 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
8517}
8518
8519
8520/** Opcode 0x06. */
8521FNIEMOP_DEF(iemOp_push_ES)
8522{
8523 IEMOP_MNEMONIC(push_es, "push es");
8524 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
8525}
8526
8527
8528/** Opcode 0x07. */
8529FNIEMOP_DEF(iemOp_pop_ES)
8530{
8531 IEMOP_MNEMONIC(pop_es, "pop es");
8532 IEMOP_HLP_NO_64BIT();
8533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8534 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
8535}
8536
8537
8538/** Opcode 0x08. */
8539FNIEMOP_DEF(iemOp_or_Eb_Gb)
8540{
8541 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
8542 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8543 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
8544}
8545
8546
8547/** Opcode 0x09. */
8548FNIEMOP_DEF(iemOp_or_Ev_Gv)
8549{
8550 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
8551 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8552 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
8553}
8554
8555
8556/** Opcode 0x0a. */
8557FNIEMOP_DEF(iemOp_or_Gb_Eb)
8558{
8559 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
8560 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8561 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
8562}
8563
8564
8565/** Opcode 0x0b. */
8566FNIEMOP_DEF(iemOp_or_Gv_Ev)
8567{
8568 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
8569 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8570 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
8571}
8572
8573
8574/** Opcode 0x0c. */
8575FNIEMOP_DEF(iemOp_or_Al_Ib)
8576{
8577 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
8578 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8579 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
8580}
8581
8582
8583/** Opcode 0x0d. */
8584FNIEMOP_DEF(iemOp_or_eAX_Iz)
8585{
8586 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
8587 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8588 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
8589}
8590
8591
8592/** Opcode 0x0e. */
8593FNIEMOP_DEF(iemOp_push_CS)
8594{
8595 IEMOP_MNEMONIC(push_cs, "push cs");
8596 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
8597}
8598
8599
8600/** Opcode 0x0f. */
8601FNIEMOP_DEF(iemOp_2byteEscape)
8602{
8603#ifdef VBOX_STRICT
8604 static bool s_fTested = false;
8605 if (RT_LIKELY(s_fTested)) { /* likely */ }
8606 else
8607 {
8608 s_fTested = true;
8609 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
8610 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
8611 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
8612 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
8613 }
8614#endif
8615
8616 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8617
8618 /** @todo PUSH CS on 8086, undefined on 80186. */
8619 IEMOP_HLP_MIN_286();
8620 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
8621}
8622
8623/** Opcode 0x10. */
8624FNIEMOP_DEF(iemOp_adc_Eb_Gb)
8625{
8626 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
8627 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
8628}
8629
8630
8631/** Opcode 0x11. */
8632FNIEMOP_DEF(iemOp_adc_Ev_Gv)
8633{
8634 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
8635 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
8636}
8637
8638
8639/** Opcode 0x12. */
8640FNIEMOP_DEF(iemOp_adc_Gb_Eb)
8641{
8642 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
8643 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
8644}
8645
8646
8647/** Opcode 0x13. */
8648FNIEMOP_DEF(iemOp_adc_Gv_Ev)
8649{
8650 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
8651 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
8652}
8653
8654
8655/** Opcode 0x14. */
8656FNIEMOP_DEF(iemOp_adc_Al_Ib)
8657{
8658 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
8659 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
8660}
8661
8662
8663/** Opcode 0x15. */
8664FNIEMOP_DEF(iemOp_adc_eAX_Iz)
8665{
8666 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
8667 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
8668}
8669
8670
8671/** Opcode 0x16. */
8672FNIEMOP_DEF(iemOp_push_SS)
8673{
8674 IEMOP_MNEMONIC(push_ss, "push ss");
8675 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
8676}
8677
8678
8679/** Opcode 0x17. */
8680FNIEMOP_DEF(iemOp_pop_SS)
8681{
8682 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
8683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8684 IEMOP_HLP_NO_64BIT();
8685 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
8686}
8687
8688
8689/** Opcode 0x18. */
8690FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
8691{
8692 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
8693 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
8694}
8695
8696
8697/** Opcode 0x19. */
8698FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
8699{
8700 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
8701 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
8702}
8703
8704
8705/** Opcode 0x1a. */
8706FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
8707{
8708 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
8709 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
8710}
8711
8712
8713/** Opcode 0x1b. */
8714FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
8715{
8716 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
8717 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
8718}
8719
8720
8721/** Opcode 0x1c. */
8722FNIEMOP_DEF(iemOp_sbb_Al_Ib)
8723{
8724 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
8725 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
8726}
8727
8728
8729/** Opcode 0x1d. */
8730FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
8731{
8732 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
8733 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
8734}
8735
8736
8737/** Opcode 0x1e. */
8738FNIEMOP_DEF(iemOp_push_DS)
8739{
8740 IEMOP_MNEMONIC(push_ds, "push ds");
8741 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
8742}
8743
8744
8745/** Opcode 0x1f. */
8746FNIEMOP_DEF(iemOp_pop_DS)
8747{
8748 IEMOP_MNEMONIC(pop_ds, "pop ds");
8749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8750 IEMOP_HLP_NO_64BIT();
8751 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
8752}
8753
8754
8755/** Opcode 0x20. */
8756FNIEMOP_DEF(iemOp_and_Eb_Gb)
8757{
8758 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
8759 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8760 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
8761}
8762
8763
8764/** Opcode 0x21. */
8765FNIEMOP_DEF(iemOp_and_Ev_Gv)
8766{
8767 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
8768 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8769 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
8770}
8771
8772
8773/** Opcode 0x22. */
8774FNIEMOP_DEF(iemOp_and_Gb_Eb)
8775{
8776 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
8777 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8778 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
8779}
8780
8781
8782/** Opcode 0x23. */
8783FNIEMOP_DEF(iemOp_and_Gv_Ev)
8784{
8785 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
8786 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8787 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
8788}
8789
8790
8791/** Opcode 0x24. */
8792FNIEMOP_DEF(iemOp_and_Al_Ib)
8793{
8794 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
8795 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8796 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
8797}
8798
8799
8800/** Opcode 0x25. */
8801FNIEMOP_DEF(iemOp_and_eAX_Iz)
8802{
8803 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
8804 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8805 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
8806}
8807
8808
8809/** Opcode 0x26. */
8810FNIEMOP_DEF(iemOp_seg_ES)
8811{
8812 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
8813 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
8814 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
8815
8816 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8817 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8818}
8819
8820
8821/** Opcode 0x27. */
8822FNIEMOP_DEF(iemOp_daa)
8823{
8824 IEMOP_MNEMONIC(daa_AL, "daa AL");
8825 IEMOP_HLP_NO_64BIT();
8826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8827 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8828 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
8829}
8830
8831
8832/** Opcode 0x28. */
8833FNIEMOP_DEF(iemOp_sub_Eb_Gb)
8834{
8835 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
8836 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
8837}
8838
8839
8840/** Opcode 0x29. */
8841FNIEMOP_DEF(iemOp_sub_Ev_Gv)
8842{
8843 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
8844 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
8845}
8846
8847
8848/** Opcode 0x2a. */
8849FNIEMOP_DEF(iemOp_sub_Gb_Eb)
8850{
8851 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
8852 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
8853}
8854
8855
8856/** Opcode 0x2b. */
8857FNIEMOP_DEF(iemOp_sub_Gv_Ev)
8858{
8859 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
8860 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
8861}
8862
8863
8864/** Opcode 0x2c. */
8865FNIEMOP_DEF(iemOp_sub_Al_Ib)
8866{
8867 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
8868 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
8869}
8870
8871
8872/** Opcode 0x2d. */
8873FNIEMOP_DEF(iemOp_sub_eAX_Iz)
8874{
8875 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
8876 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
8877}
8878
8879
8880/** Opcode 0x2e. */
8881FNIEMOP_DEF(iemOp_seg_CS)
8882{
8883 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
8884 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
8885 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
8886
8887 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8888 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8889}
8890
8891
8892/** Opcode 0x2f. */
8893FNIEMOP_DEF(iemOp_das)
8894{
8895 IEMOP_MNEMONIC(das_AL, "das AL");
8896 IEMOP_HLP_NO_64BIT();
8897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8898 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8899 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
8900}
8901
8902
8903/** Opcode 0x30. */
8904FNIEMOP_DEF(iemOp_xor_Eb_Gb)
8905{
8906 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
8907 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8908 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
8909}
8910
8911
8912/** Opcode 0x31. */
8913FNIEMOP_DEF(iemOp_xor_Ev_Gv)
8914{
8915 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
8916 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8917 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
8918}
8919
8920
8921/** Opcode 0x32. */
8922FNIEMOP_DEF(iemOp_xor_Gb_Eb)
8923{
8924 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
8925 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8926 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
8927}
8928
8929
8930/** Opcode 0x33. */
8931FNIEMOP_DEF(iemOp_xor_Gv_Ev)
8932{
8933 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
8934 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8935 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
8936}
8937
8938
8939/** Opcode 0x34. */
8940FNIEMOP_DEF(iemOp_xor_Al_Ib)
8941{
8942 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
8943 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8944 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
8945}
8946
8947
8948/** Opcode 0x35. */
8949FNIEMOP_DEF(iemOp_xor_eAX_Iz)
8950{
8951 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
8952 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8953 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
8954}
8955
8956
8957/** Opcode 0x36. */
8958FNIEMOP_DEF(iemOp_seg_SS)
8959{
8960 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
8961 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
8962 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
8963
8964 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8965 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8966}
8967
8968
8969/** Opcode 0x37. */
8970FNIEMOP_STUB(iemOp_aaa);
8971
8972
8973/** Opcode 0x38. */
8974FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
8975{
8976 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
8977 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
8978}
8979
8980
8981/** Opcode 0x39. */
8982FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
8983{
8984 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
8985 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
8986}
8987
8988
8989/** Opcode 0x3a. */
8990FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
8991{
8992 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
8993 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
8994}
8995
8996
8997/** Opcode 0x3b. */
8998FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
8999{
9000 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
9001 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
9002}
9003
9004
9005/** Opcode 0x3c. */
9006FNIEMOP_DEF(iemOp_cmp_Al_Ib)
9007{
9008 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
9009 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
9010}
9011
9012
9013/** Opcode 0x3d. */
9014FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
9015{
9016 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
9017 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
9018}
9019
9020
9021/** Opcode 0x3e. */
9022FNIEMOP_DEF(iemOp_seg_DS)
9023{
9024 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
9025 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
9026 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
9027
9028 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9029 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9030}
9031
9032
9033/** Opcode 0x3f. */
9034FNIEMOP_STUB(iemOp_aas);
9035
9036/**
9037 * Common 'inc/dec/not/neg register' helper.
9038 */
9039FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
9040{
9041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9042 switch (pVCpu->iem.s.enmEffOpSize)
9043 {
9044 case IEMMODE_16BIT:
9045 IEM_MC_BEGIN(2, 0);
9046 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9047 IEM_MC_ARG(uint32_t *, pEFlags, 1);
9048 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
9049 IEM_MC_REF_EFLAGS(pEFlags);
9050 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
9051 IEM_MC_ADVANCE_RIP();
9052 IEM_MC_END();
9053 return VINF_SUCCESS;
9054
9055 case IEMMODE_32BIT:
9056 IEM_MC_BEGIN(2, 0);
9057 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9058 IEM_MC_ARG(uint32_t *, pEFlags, 1);
9059 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9060 IEM_MC_REF_EFLAGS(pEFlags);
9061 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
9062 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9063 IEM_MC_ADVANCE_RIP();
9064 IEM_MC_END();
9065 return VINF_SUCCESS;
9066
9067 case IEMMODE_64BIT:
9068 IEM_MC_BEGIN(2, 0);
9069 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9070 IEM_MC_ARG(uint32_t *, pEFlags, 1);
9071 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9072 IEM_MC_REF_EFLAGS(pEFlags);
9073 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
9074 IEM_MC_ADVANCE_RIP();
9075 IEM_MC_END();
9076 return VINF_SUCCESS;
9077 }
9078 return VINF_SUCCESS;
9079}
9080
9081
9082/** Opcode 0x40. */
9083FNIEMOP_DEF(iemOp_inc_eAX)
9084{
9085 /*
9086 * This is a REX prefix in 64-bit mode.
9087 */
9088 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9089 {
9090 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
9091 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
9092
9093 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9094 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9095 }
9096
9097 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
9098 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
9099}
9100
9101
9102/** Opcode 0x41. */
9103FNIEMOP_DEF(iemOp_inc_eCX)
9104{
9105 /*
9106 * This is a REX prefix in 64-bit mode.
9107 */
9108 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9109 {
9110 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
9111 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
9112 pVCpu->iem.s.uRexB = 1 << 3;
9113
9114 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9115 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9116 }
9117
9118 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
9119 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
9120}
9121
9122
9123/** Opcode 0x42. */
9124FNIEMOP_DEF(iemOp_inc_eDX)
9125{
9126 /*
9127 * This is a REX prefix in 64-bit mode.
9128 */
9129 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9130 {
9131 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
9132 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
9133 pVCpu->iem.s.uRexIndex = 1 << 3;
9134
9135 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9136 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9137 }
9138
9139 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
9140 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
9141}
9142
9143
9144
9145/** Opcode 0x43. */
9146FNIEMOP_DEF(iemOp_inc_eBX)
9147{
9148 /*
9149 * This is a REX prefix in 64-bit mode.
9150 */
9151 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9152 {
9153 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
9154 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
9155 pVCpu->iem.s.uRexB = 1 << 3;
9156 pVCpu->iem.s.uRexIndex = 1 << 3;
9157
9158 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9159 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9160 }
9161
9162 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
9163 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
9164}
9165
9166
9167/** Opcode 0x44. */
9168FNIEMOP_DEF(iemOp_inc_eSP)
9169{
9170 /*
9171 * This is a REX prefix in 64-bit mode.
9172 */
9173 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9174 {
9175 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
9176 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
9177 pVCpu->iem.s.uRexReg = 1 << 3;
9178
9179 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9180 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9181 }
9182
9183 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
9184 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
9185}
9186
9187
9188/** Opcode 0x45. */
9189FNIEMOP_DEF(iemOp_inc_eBP)
9190{
9191 /*
9192 * This is a REX prefix in 64-bit mode.
9193 */
9194 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9195 {
9196 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
9197 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
9198 pVCpu->iem.s.uRexReg = 1 << 3;
9199 pVCpu->iem.s.uRexB = 1 << 3;
9200
9201 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9202 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9203 }
9204
9205 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
9206 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
9207}
9208
9209
9210/** Opcode 0x46. */
9211FNIEMOP_DEF(iemOp_inc_eSI)
9212{
9213 /*
9214 * This is a REX prefix in 64-bit mode.
9215 */
9216 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9217 {
9218 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
9219 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
9220 pVCpu->iem.s.uRexReg = 1 << 3;
9221 pVCpu->iem.s.uRexIndex = 1 << 3;
9222
9223 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9224 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9225 }
9226
9227 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
9228 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
9229}
9230
9231
9232/** Opcode 0x47. */
9233FNIEMOP_DEF(iemOp_inc_eDI)
9234{
9235 /*
9236 * This is a REX prefix in 64-bit mode.
9237 */
9238 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9239 {
9240 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
9241 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
9242 pVCpu->iem.s.uRexReg = 1 << 3;
9243 pVCpu->iem.s.uRexB = 1 << 3;
9244 pVCpu->iem.s.uRexIndex = 1 << 3;
9245
9246 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9247 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9248 }
9249
9250 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
9251 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
9252}
9253
9254
9255/** Opcode 0x48. */
9256FNIEMOP_DEF(iemOp_dec_eAX)
9257{
9258 /*
9259 * This is a REX prefix in 64-bit mode.
9260 */
9261 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9262 {
9263 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
9264 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
9265 iemRecalEffOpSize(pVCpu);
9266
9267 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9268 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9269 }
9270
9271 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
9272 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
9273}
9274
9275
9276/** Opcode 0x49. */
9277FNIEMOP_DEF(iemOp_dec_eCX)
9278{
9279 /*
9280 * This is a REX prefix in 64-bit mode.
9281 */
9282 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9283 {
9284 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
9285 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9286 pVCpu->iem.s.uRexB = 1 << 3;
9287 iemRecalEffOpSize(pVCpu);
9288
9289 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9290 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9291 }
9292
9293 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
9294 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
9295}
9296
9297
9298/** Opcode 0x4a. */
9299FNIEMOP_DEF(iemOp_dec_eDX)
9300{
9301 /*
9302 * This is a REX prefix in 64-bit mode.
9303 */
9304 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9305 {
9306 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
9307 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9308 pVCpu->iem.s.uRexIndex = 1 << 3;
9309 iemRecalEffOpSize(pVCpu);
9310
9311 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9312 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9313 }
9314
9315 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
9316 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
9317}
9318
9319
9320/** Opcode 0x4b. */
9321FNIEMOP_DEF(iemOp_dec_eBX)
9322{
9323 /*
9324 * This is a REX prefix in 64-bit mode.
9325 */
9326 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9327 {
9328 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
9329 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9330 pVCpu->iem.s.uRexB = 1 << 3;
9331 pVCpu->iem.s.uRexIndex = 1 << 3;
9332 iemRecalEffOpSize(pVCpu);
9333
9334 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9335 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9336 }
9337
9338 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
9339 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
9340}
9341
9342
9343/** Opcode 0x4c. */
9344FNIEMOP_DEF(iemOp_dec_eSP)
9345{
9346 /*
9347 * This is a REX prefix in 64-bit mode.
9348 */
9349 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9350 {
9351 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
9352 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
9353 pVCpu->iem.s.uRexReg = 1 << 3;
9354 iemRecalEffOpSize(pVCpu);
9355
9356 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9357 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9358 }
9359
9360 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
9361 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
9362}
9363
9364
9365/** Opcode 0x4d. */
9366FNIEMOP_DEF(iemOp_dec_eBP)
9367{
9368 /*
9369 * This is a REX prefix in 64-bit mode.
9370 */
9371 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9372 {
9373 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
9374 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9375 pVCpu->iem.s.uRexReg = 1 << 3;
9376 pVCpu->iem.s.uRexB = 1 << 3;
9377 iemRecalEffOpSize(pVCpu);
9378
9379 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9380 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9381 }
9382
9383 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
9384 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
9385}
9386
9387
9388/** Opcode 0x4e. */
9389FNIEMOP_DEF(iemOp_dec_eSI)
9390{
9391 /*
9392 * This is a REX prefix in 64-bit mode.
9393 */
9394 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9395 {
9396 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
9397 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9398 pVCpu->iem.s.uRexReg = 1 << 3;
9399 pVCpu->iem.s.uRexIndex = 1 << 3;
9400 iemRecalEffOpSize(pVCpu);
9401
9402 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9403 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9404 }
9405
9406 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
9407 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
9408}
9409
9410
9411/** Opcode 0x4f. */
9412FNIEMOP_DEF(iemOp_dec_eDI)
9413{
9414 /*
9415 * This is a REX prefix in 64-bit mode.
9416 */
9417 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9418 {
9419 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
9420 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9421 pVCpu->iem.s.uRexReg = 1 << 3;
9422 pVCpu->iem.s.uRexB = 1 << 3;
9423 pVCpu->iem.s.uRexIndex = 1 << 3;
9424 iemRecalEffOpSize(pVCpu);
9425
9426 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9427 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9428 }
9429
9430 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
9431 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
9432}
9433
9434
9435/**
9436 * Common 'push register' helper.
9437 */
9438FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
9439{
9440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9441 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9442 {
9443 iReg |= pVCpu->iem.s.uRexB;
9444 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9445 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9446 }
9447
9448 switch (pVCpu->iem.s.enmEffOpSize)
9449 {
9450 case IEMMODE_16BIT:
9451 IEM_MC_BEGIN(0, 1);
9452 IEM_MC_LOCAL(uint16_t, u16Value);
9453 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
9454 IEM_MC_PUSH_U16(u16Value);
9455 IEM_MC_ADVANCE_RIP();
9456 IEM_MC_END();
9457 break;
9458
9459 case IEMMODE_32BIT:
9460 IEM_MC_BEGIN(0, 1);
9461 IEM_MC_LOCAL(uint32_t, u32Value);
9462 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
9463 IEM_MC_PUSH_U32(u32Value);
9464 IEM_MC_ADVANCE_RIP();
9465 IEM_MC_END();
9466 break;
9467
9468 case IEMMODE_64BIT:
9469 IEM_MC_BEGIN(0, 1);
9470 IEM_MC_LOCAL(uint64_t, u64Value);
9471 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
9472 IEM_MC_PUSH_U64(u64Value);
9473 IEM_MC_ADVANCE_RIP();
9474 IEM_MC_END();
9475 break;
9476 }
9477
9478 return VINF_SUCCESS;
9479}
9480
9481
9482/** Opcode 0x50. */
9483FNIEMOP_DEF(iemOp_push_eAX)
9484{
9485 IEMOP_MNEMONIC(push_rAX, "push rAX");
9486 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
9487}
9488
9489
9490/** Opcode 0x51. */
9491FNIEMOP_DEF(iemOp_push_eCX)
9492{
9493 IEMOP_MNEMONIC(push_rCX, "push rCX");
9494 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
9495}
9496
9497
9498/** Opcode 0x52. */
9499FNIEMOP_DEF(iemOp_push_eDX)
9500{
9501 IEMOP_MNEMONIC(push_rDX, "push rDX");
9502 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
9503}
9504
9505
9506/** Opcode 0x53. */
9507FNIEMOP_DEF(iemOp_push_eBX)
9508{
9509 IEMOP_MNEMONIC(push_rBX, "push rBX");
9510 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
9511}
9512
9513
9514/** Opcode 0x54. */
9515FNIEMOP_DEF(iemOp_push_eSP)
9516{
9517 IEMOP_MNEMONIC(push_rSP, "push rSP");
9518 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
9519 {
9520 IEM_MC_BEGIN(0, 1);
9521 IEM_MC_LOCAL(uint16_t, u16Value);
9522 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
9523 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
9524 IEM_MC_PUSH_U16(u16Value);
9525 IEM_MC_ADVANCE_RIP();
9526 IEM_MC_END();
9527 }
9528 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
9529}
9530
9531
9532/** Opcode 0x55. */
9533FNIEMOP_DEF(iemOp_push_eBP)
9534{
9535 IEMOP_MNEMONIC(push_rBP, "push rBP");
9536 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
9537}
9538
9539
9540/** Opcode 0x56. */
9541FNIEMOP_DEF(iemOp_push_eSI)
9542{
9543 IEMOP_MNEMONIC(push_rSI, "push rSI");
9544 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
9545}
9546
9547
9548/** Opcode 0x57. */
9549FNIEMOP_DEF(iemOp_push_eDI)
9550{
9551 IEMOP_MNEMONIC(push_rDI, "push rDI");
9552 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
9553}
9554
9555
9556/**
9557 * Common 'pop register' helper.
9558 */
9559FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
9560{
9561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9562 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9563 {
9564 iReg |= pVCpu->iem.s.uRexB;
9565 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9566 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9567 }
9568
9569 switch (pVCpu->iem.s.enmEffOpSize)
9570 {
9571 case IEMMODE_16BIT:
9572 IEM_MC_BEGIN(0, 1);
9573 IEM_MC_LOCAL(uint16_t *, pu16Dst);
9574 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
9575 IEM_MC_POP_U16(pu16Dst);
9576 IEM_MC_ADVANCE_RIP();
9577 IEM_MC_END();
9578 break;
9579
9580 case IEMMODE_32BIT:
9581 IEM_MC_BEGIN(0, 1);
9582 IEM_MC_LOCAL(uint32_t *, pu32Dst);
9583 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9584 IEM_MC_POP_U32(pu32Dst);
9585 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
9586 IEM_MC_ADVANCE_RIP();
9587 IEM_MC_END();
9588 break;
9589
9590 case IEMMODE_64BIT:
9591 IEM_MC_BEGIN(0, 1);
9592 IEM_MC_LOCAL(uint64_t *, pu64Dst);
9593 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9594 IEM_MC_POP_U64(pu64Dst);
9595 IEM_MC_ADVANCE_RIP();
9596 IEM_MC_END();
9597 break;
9598 }
9599
9600 return VINF_SUCCESS;
9601}
9602
9603
9604/** Opcode 0x58. */
9605FNIEMOP_DEF(iemOp_pop_eAX)
9606{
9607 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
9608 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
9609}
9610
9611
9612/** Opcode 0x59. */
9613FNIEMOP_DEF(iemOp_pop_eCX)
9614{
9615 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
9616 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
9617}
9618
9619
9620/** Opcode 0x5a. */
9621FNIEMOP_DEF(iemOp_pop_eDX)
9622{
9623 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
9624 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
9625}
9626
9627
9628/** Opcode 0x5b. */
9629FNIEMOP_DEF(iemOp_pop_eBX)
9630{
9631 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
9632 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
9633}
9634
9635
9636/** Opcode 0x5c. */
9637FNIEMOP_DEF(iemOp_pop_eSP)
9638{
9639 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
9640 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9641 {
9642 if (pVCpu->iem.s.uRexB)
9643 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
9644 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9645 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9646 }
9647
9648 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
9649 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
9650 /** @todo add testcase for this instruction. */
9651 switch (pVCpu->iem.s.enmEffOpSize)
9652 {
9653 case IEMMODE_16BIT:
9654 IEM_MC_BEGIN(0, 1);
9655 IEM_MC_LOCAL(uint16_t, u16Dst);
9656 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
9657 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
9658 IEM_MC_ADVANCE_RIP();
9659 IEM_MC_END();
9660 break;
9661
9662 case IEMMODE_32BIT:
9663 IEM_MC_BEGIN(0, 1);
9664 IEM_MC_LOCAL(uint32_t, u32Dst);
9665 IEM_MC_POP_U32(&u32Dst);
9666 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
9667 IEM_MC_ADVANCE_RIP();
9668 IEM_MC_END();
9669 break;
9670
9671 case IEMMODE_64BIT:
9672 IEM_MC_BEGIN(0, 1);
9673 IEM_MC_LOCAL(uint64_t, u64Dst);
9674 IEM_MC_POP_U64(&u64Dst);
9675 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
9676 IEM_MC_ADVANCE_RIP();
9677 IEM_MC_END();
9678 break;
9679 }
9680
9681 return VINF_SUCCESS;
9682}
9683
9684
9685/** Opcode 0x5d. */
9686FNIEMOP_DEF(iemOp_pop_eBP)
9687{
9688 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
9689 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
9690}
9691
9692
9693/** Opcode 0x5e. */
9694FNIEMOP_DEF(iemOp_pop_eSI)
9695{
9696 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
9697 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
9698}
9699
9700
9701/** Opcode 0x5f. */
9702FNIEMOP_DEF(iemOp_pop_eDI)
9703{
9704 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
9705 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
9706}
9707
9708
9709/** Opcode 0x60. */
9710FNIEMOP_DEF(iemOp_pusha)
9711{
9712 IEMOP_MNEMONIC(pusha, "pusha");
9713 IEMOP_HLP_MIN_186();
9714 IEMOP_HLP_NO_64BIT();
9715 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9716 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
9717 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9718 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
9719}
9720
9721
9722/** Opcode 0x61. */
9723FNIEMOP_DEF(iemOp_popa)
9724{
9725 IEMOP_MNEMONIC(popa, "popa");
9726 IEMOP_HLP_MIN_186();
9727 IEMOP_HLP_NO_64BIT();
9728 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9729 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
9730 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9731 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
9732}
9733
9734
9735/** Opcode 0x62. */
9736FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
9737// IEMOP_HLP_MIN_186();
9738
9739
9740/** Opcode 0x63 - non-64-bit modes. */
9741FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
9742{
9743 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
9744 IEMOP_HLP_MIN_286();
9745 IEMOP_HLP_NO_REAL_OR_V86_MODE();
9746 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9747
9748 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9749 {
9750 /* Register */
9751 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9752 IEM_MC_BEGIN(3, 0);
9753 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9754 IEM_MC_ARG(uint16_t, u16Src, 1);
9755 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9756
9757 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9758 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
9759 IEM_MC_REF_EFLAGS(pEFlags);
9760 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9761
9762 IEM_MC_ADVANCE_RIP();
9763 IEM_MC_END();
9764 }
9765 else
9766 {
9767 /* Memory */
9768 IEM_MC_BEGIN(3, 2);
9769 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9770 IEM_MC_ARG(uint16_t, u16Src, 1);
9771 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9773
9774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9775 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9776 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9777 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9778 IEM_MC_FETCH_EFLAGS(EFlags);
9779 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9780
9781 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9782 IEM_MC_COMMIT_EFLAGS(EFlags);
9783 IEM_MC_ADVANCE_RIP();
9784 IEM_MC_END();
9785 }
9786 return VINF_SUCCESS;
9787
9788}
9789
9790
9791/** Opcode 0x63.
9792 * @note This is a weird one. It works like a regular move instruction if
9793 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
9794 * @todo This definitely needs a testcase to verify the odd cases. */
9795FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
9796{
9797 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
9798
9799 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
9800 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9801
9802 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9803 {
9804 /*
9805 * Register to register.
9806 */
9807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9808 IEM_MC_BEGIN(0, 1);
9809 IEM_MC_LOCAL(uint64_t, u64Value);
9810 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9811 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9812 IEM_MC_ADVANCE_RIP();
9813 IEM_MC_END();
9814 }
9815 else
9816 {
9817 /*
9818 * We're loading a register from memory.
9819 */
9820 IEM_MC_BEGIN(0, 2);
9821 IEM_MC_LOCAL(uint64_t, u64Value);
9822 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9825 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9826 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9827 IEM_MC_ADVANCE_RIP();
9828 IEM_MC_END();
9829 }
9830 return VINF_SUCCESS;
9831}
9832
9833
9834/** Opcode 0x64. */
9835FNIEMOP_DEF(iemOp_seg_FS)
9836{
9837 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
9838 IEMOP_HLP_MIN_386();
9839
9840 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
9841 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
9842
9843 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9844 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9845}
9846
9847
9848/** Opcode 0x65. */
9849FNIEMOP_DEF(iemOp_seg_GS)
9850{
9851 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
9852 IEMOP_HLP_MIN_386();
9853
9854 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
9855 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
9856
9857 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9858 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9859}
9860
9861
9862/** Opcode 0x66. */
9863FNIEMOP_DEF(iemOp_op_size)
9864{
9865 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
9866 IEMOP_HLP_MIN_386();
9867
9868 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
9869 iemRecalEffOpSize(pVCpu);
9870
9871 /* For the 4 entry opcode tables, the operand prefix doesn't not count
9872 when REPZ or REPNZ are present. */
9873 if (pVCpu->iem.s.idxPrefix == 0)
9874 pVCpu->iem.s.idxPrefix = 1;
9875
9876 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9877 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9878}
9879
9880
9881/** Opcode 0x67. */
9882FNIEMOP_DEF(iemOp_addr_size)
9883{
9884 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
9885 IEMOP_HLP_MIN_386();
9886
9887 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
9888 switch (pVCpu->iem.s.enmDefAddrMode)
9889 {
9890 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9891 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
9892 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9893 default: AssertFailed();
9894 }
9895
9896 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9897 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9898}
9899
9900
9901/** Opcode 0x68. */
9902FNIEMOP_DEF(iemOp_push_Iz)
9903{
9904 IEMOP_MNEMONIC(push_Iz, "push Iz");
9905 IEMOP_HLP_MIN_186();
9906 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9907 switch (pVCpu->iem.s.enmEffOpSize)
9908 {
9909 case IEMMODE_16BIT:
9910 {
9911 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9913 IEM_MC_BEGIN(0,0);
9914 IEM_MC_PUSH_U16(u16Imm);
9915 IEM_MC_ADVANCE_RIP();
9916 IEM_MC_END();
9917 return VINF_SUCCESS;
9918 }
9919
9920 case IEMMODE_32BIT:
9921 {
9922 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9924 IEM_MC_BEGIN(0,0);
9925 IEM_MC_PUSH_U32(u32Imm);
9926 IEM_MC_ADVANCE_RIP();
9927 IEM_MC_END();
9928 return VINF_SUCCESS;
9929 }
9930
9931 case IEMMODE_64BIT:
9932 {
9933 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9935 IEM_MC_BEGIN(0,0);
9936 IEM_MC_PUSH_U64(u64Imm);
9937 IEM_MC_ADVANCE_RIP();
9938 IEM_MC_END();
9939 return VINF_SUCCESS;
9940 }
9941
9942 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9943 }
9944}
9945
9946
9947/** Opcode 0x69. */
9948FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
9949{
9950 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
9951 IEMOP_HLP_MIN_186();
9952 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9953 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9954
9955 switch (pVCpu->iem.s.enmEffOpSize)
9956 {
9957 case IEMMODE_16BIT:
9958 {
9959 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9960 {
9961 /* register operand */
9962 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9964
9965 IEM_MC_BEGIN(3, 1);
9966 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9967 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
9968 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9969 IEM_MC_LOCAL(uint16_t, u16Tmp);
9970
9971 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9972 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9973 IEM_MC_REF_EFLAGS(pEFlags);
9974 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9975 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9976
9977 IEM_MC_ADVANCE_RIP();
9978 IEM_MC_END();
9979 }
9980 else
9981 {
9982 /* memory operand */
9983 IEM_MC_BEGIN(3, 2);
9984 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9985 IEM_MC_ARG(uint16_t, u16Src, 1);
9986 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9987 IEM_MC_LOCAL(uint16_t, u16Tmp);
9988 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9989
9990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9991 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9992 IEM_MC_ASSIGN(u16Src, u16Imm);
9993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9994 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9995 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9996 IEM_MC_REF_EFLAGS(pEFlags);
9997 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9998 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9999
10000 IEM_MC_ADVANCE_RIP();
10001 IEM_MC_END();
10002 }
10003 return VINF_SUCCESS;
10004 }
10005
10006 case IEMMODE_32BIT:
10007 {
10008 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10009 {
10010 /* register operand */
10011 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10013
10014 IEM_MC_BEGIN(3, 1);
10015 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10016 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
10017 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10018 IEM_MC_LOCAL(uint32_t, u32Tmp);
10019
10020 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10021 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10022 IEM_MC_REF_EFLAGS(pEFlags);
10023 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10024 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10025
10026 IEM_MC_ADVANCE_RIP();
10027 IEM_MC_END();
10028 }
10029 else
10030 {
10031 /* memory operand */
10032 IEM_MC_BEGIN(3, 2);
10033 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10034 IEM_MC_ARG(uint32_t, u32Src, 1);
10035 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10036 IEM_MC_LOCAL(uint32_t, u32Tmp);
10037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10038
10039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10040 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10041 IEM_MC_ASSIGN(u32Src, u32Imm);
10042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10043 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10044 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10045 IEM_MC_REF_EFLAGS(pEFlags);
10046 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10047 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10048
10049 IEM_MC_ADVANCE_RIP();
10050 IEM_MC_END();
10051 }
10052 return VINF_SUCCESS;
10053 }
10054
10055 case IEMMODE_64BIT:
10056 {
10057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10058 {
10059 /* register operand */
10060 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10062
10063 IEM_MC_BEGIN(3, 1);
10064 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10065 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
10066 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10067 IEM_MC_LOCAL(uint64_t, u64Tmp);
10068
10069 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10070 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10071 IEM_MC_REF_EFLAGS(pEFlags);
10072 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10073 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10074
10075 IEM_MC_ADVANCE_RIP();
10076 IEM_MC_END();
10077 }
10078 else
10079 {
10080 /* memory operand */
10081 IEM_MC_BEGIN(3, 2);
10082 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10083 IEM_MC_ARG(uint64_t, u64Src, 1);
10084 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10085 IEM_MC_LOCAL(uint64_t, u64Tmp);
10086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10087
10088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10089 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10090 IEM_MC_ASSIGN(u64Src, u64Imm);
10091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10092 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10093 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10094 IEM_MC_REF_EFLAGS(pEFlags);
10095 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10096 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10097
10098 IEM_MC_ADVANCE_RIP();
10099 IEM_MC_END();
10100 }
10101 return VINF_SUCCESS;
10102 }
10103 }
10104 AssertFailedReturn(VERR_IEM_IPE_9);
10105}
10106
10107
10108/** Opcode 0x6a. */
10109FNIEMOP_DEF(iemOp_push_Ib)
10110{
10111 IEMOP_MNEMONIC(push_Ib, "push Ib");
10112 IEMOP_HLP_MIN_186();
10113 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10115 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10116
10117 IEM_MC_BEGIN(0,0);
10118 switch (pVCpu->iem.s.enmEffOpSize)
10119 {
10120 case IEMMODE_16BIT:
10121 IEM_MC_PUSH_U16(i8Imm);
10122 break;
10123 case IEMMODE_32BIT:
10124 IEM_MC_PUSH_U32(i8Imm);
10125 break;
10126 case IEMMODE_64BIT:
10127 IEM_MC_PUSH_U64(i8Imm);
10128 break;
10129 }
10130 IEM_MC_ADVANCE_RIP();
10131 IEM_MC_END();
10132 return VINF_SUCCESS;
10133}
10134
10135
10136/** Opcode 0x6b. */
10137FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
10138{
10139 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
10140 IEMOP_HLP_MIN_186();
10141 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10142 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10143
10144 switch (pVCpu->iem.s.enmEffOpSize)
10145 {
10146 case IEMMODE_16BIT:
10147 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10148 {
10149 /* register operand */
10150 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10152
10153 IEM_MC_BEGIN(3, 1);
10154 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10155 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
10156 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10157 IEM_MC_LOCAL(uint16_t, u16Tmp);
10158
10159 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10160 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
10161 IEM_MC_REF_EFLAGS(pEFlags);
10162 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
10163 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
10164
10165 IEM_MC_ADVANCE_RIP();
10166 IEM_MC_END();
10167 }
10168 else
10169 {
10170 /* memory operand */
10171 IEM_MC_BEGIN(3, 2);
10172 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10173 IEM_MC_ARG(uint16_t, u16Src, 1);
10174 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10175 IEM_MC_LOCAL(uint16_t, u16Tmp);
10176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10177
10178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10179 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
10180 IEM_MC_ASSIGN(u16Src, u16Imm);
10181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10182 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10183 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
10184 IEM_MC_REF_EFLAGS(pEFlags);
10185 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
10186 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
10187
10188 IEM_MC_ADVANCE_RIP();
10189 IEM_MC_END();
10190 }
10191 return VINF_SUCCESS;
10192
10193 case IEMMODE_32BIT:
10194 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10195 {
10196 /* register operand */
10197 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10199
10200 IEM_MC_BEGIN(3, 1);
10201 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10202 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
10203 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10204 IEM_MC_LOCAL(uint32_t, u32Tmp);
10205
10206 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10207 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10208 IEM_MC_REF_EFLAGS(pEFlags);
10209 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10210 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10211
10212 IEM_MC_ADVANCE_RIP();
10213 IEM_MC_END();
10214 }
10215 else
10216 {
10217 /* memory operand */
10218 IEM_MC_BEGIN(3, 2);
10219 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10220 IEM_MC_ARG(uint32_t, u32Src, 1);
10221 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10222 IEM_MC_LOCAL(uint32_t, u32Tmp);
10223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10224
10225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10226 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
10227 IEM_MC_ASSIGN(u32Src, u32Imm);
10228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10229 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10230 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10231 IEM_MC_REF_EFLAGS(pEFlags);
10232 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10233 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10234
10235 IEM_MC_ADVANCE_RIP();
10236 IEM_MC_END();
10237 }
10238 return VINF_SUCCESS;
10239
10240 case IEMMODE_64BIT:
10241 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10242 {
10243 /* register operand */
10244 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10246
10247 IEM_MC_BEGIN(3, 1);
10248 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10249 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
10250 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10251 IEM_MC_LOCAL(uint64_t, u64Tmp);
10252
10253 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10254 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10255 IEM_MC_REF_EFLAGS(pEFlags);
10256 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10257 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10258
10259 IEM_MC_ADVANCE_RIP();
10260 IEM_MC_END();
10261 }
10262 else
10263 {
10264 /* memory operand */
10265 IEM_MC_BEGIN(3, 2);
10266 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10267 IEM_MC_ARG(uint64_t, u64Src, 1);
10268 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10269 IEM_MC_LOCAL(uint64_t, u64Tmp);
10270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10271
10272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10273 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
10274 IEM_MC_ASSIGN(u64Src, u64Imm);
10275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10276 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10277 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10278 IEM_MC_REF_EFLAGS(pEFlags);
10279 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10280 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10281
10282 IEM_MC_ADVANCE_RIP();
10283 IEM_MC_END();
10284 }
10285 return VINF_SUCCESS;
10286 }
10287 AssertFailedReturn(VERR_IEM_IPE_8);
10288}
10289
10290
10291/** Opcode 0x6c. */
10292FNIEMOP_DEF(iemOp_insb_Yb_DX)
10293{
10294 IEMOP_HLP_MIN_186();
10295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10296 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10297 {
10298 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
10299 switch (pVCpu->iem.s.enmEffAddrMode)
10300 {
10301 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
10302 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
10303 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
10304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10305 }
10306 }
10307 else
10308 {
10309 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
10310 switch (pVCpu->iem.s.enmEffAddrMode)
10311 {
10312 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
10313 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
10314 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
10315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10316 }
10317 }
10318}
10319
10320
10321/** Opcode 0x6d. */
10322FNIEMOP_DEF(iemOp_inswd_Yv_DX)
10323{
10324 IEMOP_HLP_MIN_186();
10325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10326 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10327 {
10328 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
10329 switch (pVCpu->iem.s.enmEffOpSize)
10330 {
10331 case IEMMODE_16BIT:
10332 switch (pVCpu->iem.s.enmEffAddrMode)
10333 {
10334 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
10335 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
10336 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
10337 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10338 }
10339 break;
10340 case IEMMODE_64BIT:
10341 case IEMMODE_32BIT:
10342 switch (pVCpu->iem.s.enmEffAddrMode)
10343 {
10344 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
10345 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
10346 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
10347 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10348 }
10349 break;
10350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10351 }
10352 }
10353 else
10354 {
10355 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
10356 switch (pVCpu->iem.s.enmEffOpSize)
10357 {
10358 case IEMMODE_16BIT:
10359 switch (pVCpu->iem.s.enmEffAddrMode)
10360 {
10361 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
10362 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
10363 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
10364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10365 }
10366 break;
10367 case IEMMODE_64BIT:
10368 case IEMMODE_32BIT:
10369 switch (pVCpu->iem.s.enmEffAddrMode)
10370 {
10371 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
10372 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
10373 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
10374 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10375 }
10376 break;
10377 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10378 }
10379 }
10380}
10381
10382
10383/** Opcode 0x6e. */
10384FNIEMOP_DEF(iemOp_outsb_Yb_DX)
10385{
10386 IEMOP_HLP_MIN_186();
10387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10388 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10389 {
10390 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
10391 switch (pVCpu->iem.s.enmEffAddrMode)
10392 {
10393 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10394 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10395 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10396 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10397 }
10398 }
10399 else
10400 {
10401 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
10402 switch (pVCpu->iem.s.enmEffAddrMode)
10403 {
10404 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10405 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10406 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10407 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10408 }
10409 }
10410}
10411
10412
10413/** Opcode 0x6f. */
10414FNIEMOP_DEF(iemOp_outswd_Yv_DX)
10415{
10416 IEMOP_HLP_MIN_186();
10417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10418 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10419 {
10420 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
10421 switch (pVCpu->iem.s.enmEffOpSize)
10422 {
10423 case IEMMODE_16BIT:
10424 switch (pVCpu->iem.s.enmEffAddrMode)
10425 {
10426 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10427 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10428 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10429 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10430 }
10431 break;
10432 case IEMMODE_64BIT:
10433 case IEMMODE_32BIT:
10434 switch (pVCpu->iem.s.enmEffAddrMode)
10435 {
10436 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10437 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10438 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10440 }
10441 break;
10442 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10443 }
10444 }
10445 else
10446 {
10447 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
10448 switch (pVCpu->iem.s.enmEffOpSize)
10449 {
10450 case IEMMODE_16BIT:
10451 switch (pVCpu->iem.s.enmEffAddrMode)
10452 {
10453 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10454 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10455 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10457 }
10458 break;
10459 case IEMMODE_64BIT:
10460 case IEMMODE_32BIT:
10461 switch (pVCpu->iem.s.enmEffAddrMode)
10462 {
10463 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10464 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10465 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10467 }
10468 break;
10469 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10470 }
10471 }
10472}
10473
10474
10475/** Opcode 0x70. */
10476FNIEMOP_DEF(iemOp_jo_Jb)
10477{
10478 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
10479 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10481 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10482
10483 IEM_MC_BEGIN(0, 0);
10484 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10485 IEM_MC_REL_JMP_S8(i8Imm);
10486 } IEM_MC_ELSE() {
10487 IEM_MC_ADVANCE_RIP();
10488 } IEM_MC_ENDIF();
10489 IEM_MC_END();
10490 return VINF_SUCCESS;
10491}
10492
10493
10494/** Opcode 0x71. */
10495FNIEMOP_DEF(iemOp_jno_Jb)
10496{
10497 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
10498 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10500 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10501
10502 IEM_MC_BEGIN(0, 0);
10503 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10504 IEM_MC_ADVANCE_RIP();
10505 } IEM_MC_ELSE() {
10506 IEM_MC_REL_JMP_S8(i8Imm);
10507 } IEM_MC_ENDIF();
10508 IEM_MC_END();
10509 return VINF_SUCCESS;
10510}
10511
10512/** Opcode 0x72. */
10513FNIEMOP_DEF(iemOp_jc_Jb)
10514{
10515 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
10516 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10518 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10519
10520 IEM_MC_BEGIN(0, 0);
10521 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10522 IEM_MC_REL_JMP_S8(i8Imm);
10523 } IEM_MC_ELSE() {
10524 IEM_MC_ADVANCE_RIP();
10525 } IEM_MC_ENDIF();
10526 IEM_MC_END();
10527 return VINF_SUCCESS;
10528}
10529
10530
10531/** Opcode 0x73. */
10532FNIEMOP_DEF(iemOp_jnc_Jb)
10533{
10534 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
10535 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10537 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10538
10539 IEM_MC_BEGIN(0, 0);
10540 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10541 IEM_MC_ADVANCE_RIP();
10542 } IEM_MC_ELSE() {
10543 IEM_MC_REL_JMP_S8(i8Imm);
10544 } IEM_MC_ENDIF();
10545 IEM_MC_END();
10546 return VINF_SUCCESS;
10547}
10548
10549
10550/** Opcode 0x74. */
10551FNIEMOP_DEF(iemOp_je_Jb)
10552{
10553 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
10554 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10556 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10557
10558 IEM_MC_BEGIN(0, 0);
10559 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10560 IEM_MC_REL_JMP_S8(i8Imm);
10561 } IEM_MC_ELSE() {
10562 IEM_MC_ADVANCE_RIP();
10563 } IEM_MC_ENDIF();
10564 IEM_MC_END();
10565 return VINF_SUCCESS;
10566}
10567
10568
10569/** Opcode 0x75. */
10570FNIEMOP_DEF(iemOp_jne_Jb)
10571{
10572 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
10573 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10575 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10576
10577 IEM_MC_BEGIN(0, 0);
10578 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10579 IEM_MC_ADVANCE_RIP();
10580 } IEM_MC_ELSE() {
10581 IEM_MC_REL_JMP_S8(i8Imm);
10582 } IEM_MC_ENDIF();
10583 IEM_MC_END();
10584 return VINF_SUCCESS;
10585}
10586
10587
10588/** Opcode 0x76. */
10589FNIEMOP_DEF(iemOp_jbe_Jb)
10590{
10591 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
10592 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10594 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10595
10596 IEM_MC_BEGIN(0, 0);
10597 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10598 IEM_MC_REL_JMP_S8(i8Imm);
10599 } IEM_MC_ELSE() {
10600 IEM_MC_ADVANCE_RIP();
10601 } IEM_MC_ENDIF();
10602 IEM_MC_END();
10603 return VINF_SUCCESS;
10604}
10605
10606
10607/** Opcode 0x77. */
10608FNIEMOP_DEF(iemOp_jnbe_Jb)
10609{
10610 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
10611 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10613 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10614
10615 IEM_MC_BEGIN(0, 0);
10616 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10617 IEM_MC_ADVANCE_RIP();
10618 } IEM_MC_ELSE() {
10619 IEM_MC_REL_JMP_S8(i8Imm);
10620 } IEM_MC_ENDIF();
10621 IEM_MC_END();
10622 return VINF_SUCCESS;
10623}
10624
10625
10626/** Opcode 0x78. */
10627FNIEMOP_DEF(iemOp_js_Jb)
10628{
10629 IEMOP_MNEMONIC(js_Jb, "js Jb");
10630 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10632 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10633
10634 IEM_MC_BEGIN(0, 0);
10635 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10636 IEM_MC_REL_JMP_S8(i8Imm);
10637 } IEM_MC_ELSE() {
10638 IEM_MC_ADVANCE_RIP();
10639 } IEM_MC_ENDIF();
10640 IEM_MC_END();
10641 return VINF_SUCCESS;
10642}
10643
10644
10645/** Opcode 0x79. */
10646FNIEMOP_DEF(iemOp_jns_Jb)
10647{
10648 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
10649 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10651 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10652
10653 IEM_MC_BEGIN(0, 0);
10654 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10655 IEM_MC_ADVANCE_RIP();
10656 } IEM_MC_ELSE() {
10657 IEM_MC_REL_JMP_S8(i8Imm);
10658 } IEM_MC_ENDIF();
10659 IEM_MC_END();
10660 return VINF_SUCCESS;
10661}
10662
10663
10664/** Opcode 0x7a. */
10665FNIEMOP_DEF(iemOp_jp_Jb)
10666{
10667 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
10668 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10670 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10671
10672 IEM_MC_BEGIN(0, 0);
10673 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10674 IEM_MC_REL_JMP_S8(i8Imm);
10675 } IEM_MC_ELSE() {
10676 IEM_MC_ADVANCE_RIP();
10677 } IEM_MC_ENDIF();
10678 IEM_MC_END();
10679 return VINF_SUCCESS;
10680}
10681
10682
10683/** Opcode 0x7b. */
10684FNIEMOP_DEF(iemOp_jnp_Jb)
10685{
10686 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
10687 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10689 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10690
10691 IEM_MC_BEGIN(0, 0);
10692 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10693 IEM_MC_ADVANCE_RIP();
10694 } IEM_MC_ELSE() {
10695 IEM_MC_REL_JMP_S8(i8Imm);
10696 } IEM_MC_ENDIF();
10697 IEM_MC_END();
10698 return VINF_SUCCESS;
10699}
10700
10701
10702/** Opcode 0x7c. */
10703FNIEMOP_DEF(iemOp_jl_Jb)
10704{
10705 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
10706 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10708 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10709
10710 IEM_MC_BEGIN(0, 0);
10711 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10712 IEM_MC_REL_JMP_S8(i8Imm);
10713 } IEM_MC_ELSE() {
10714 IEM_MC_ADVANCE_RIP();
10715 } IEM_MC_ENDIF();
10716 IEM_MC_END();
10717 return VINF_SUCCESS;
10718}
10719
10720
10721/** Opcode 0x7d. */
10722FNIEMOP_DEF(iemOp_jnl_Jb)
10723{
10724 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
10725 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10727 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10728
10729 IEM_MC_BEGIN(0, 0);
10730 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10731 IEM_MC_ADVANCE_RIP();
10732 } IEM_MC_ELSE() {
10733 IEM_MC_REL_JMP_S8(i8Imm);
10734 } IEM_MC_ENDIF();
10735 IEM_MC_END();
10736 return VINF_SUCCESS;
10737}
10738
10739
10740/** Opcode 0x7e. */
10741FNIEMOP_DEF(iemOp_jle_Jb)
10742{
10743 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
10744 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10746 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10747
10748 IEM_MC_BEGIN(0, 0);
10749 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10750 IEM_MC_REL_JMP_S8(i8Imm);
10751 } IEM_MC_ELSE() {
10752 IEM_MC_ADVANCE_RIP();
10753 } IEM_MC_ENDIF();
10754 IEM_MC_END();
10755 return VINF_SUCCESS;
10756}
10757
10758
10759/** Opcode 0x7f. */
10760FNIEMOP_DEF(iemOp_jnle_Jb)
10761{
10762 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
10763 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10765 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10766
10767 IEM_MC_BEGIN(0, 0);
10768 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10769 IEM_MC_ADVANCE_RIP();
10770 } IEM_MC_ELSE() {
10771 IEM_MC_REL_JMP_S8(i8Imm);
10772 } IEM_MC_ENDIF();
10773 IEM_MC_END();
10774 return VINF_SUCCESS;
10775}
10776
10777
10778/** Opcode 0x80. */
10779FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
10780{
10781 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10782 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10783 {
10784 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
10785 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
10786 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
10787 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
10788 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
10789 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
10790 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
10791 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
10792 }
10793 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10794
10795 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10796 {
10797 /* register target */
10798 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10800 IEM_MC_BEGIN(3, 0);
10801 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10802 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10803 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10804
10805 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10806 IEM_MC_REF_EFLAGS(pEFlags);
10807 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10808
10809 IEM_MC_ADVANCE_RIP();
10810 IEM_MC_END();
10811 }
10812 else
10813 {
10814 /* memory target */
10815 uint32_t fAccess;
10816 if (pImpl->pfnLockedU8)
10817 fAccess = IEM_ACCESS_DATA_RW;
10818 else /* CMP */
10819 fAccess = IEM_ACCESS_DATA_R;
10820 IEM_MC_BEGIN(3, 2);
10821 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10822 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10824
10825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10826 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10827 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10828 if (pImpl->pfnLockedU8)
10829 IEMOP_HLP_DONE_DECODING();
10830 else
10831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10832
10833 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10834 IEM_MC_FETCH_EFLAGS(EFlags);
10835 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10836 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10837 else
10838 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
10839
10840 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
10841 IEM_MC_COMMIT_EFLAGS(EFlags);
10842 IEM_MC_ADVANCE_RIP();
10843 IEM_MC_END();
10844 }
10845 return VINF_SUCCESS;
10846}
10847
10848
10849/** Opcode 0x81. */
10850FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
10851{
10852 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10853 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10854 {
10855 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
10856 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
10857 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
10858 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
10859 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
10860 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
10861 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
10862 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
10863 }
10864 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10865
10866 switch (pVCpu->iem.s.enmEffOpSize)
10867 {
10868 case IEMMODE_16BIT:
10869 {
10870 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10871 {
10872 /* register target */
10873 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10875 IEM_MC_BEGIN(3, 0);
10876 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10877 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
10878 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10879
10880 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10881 IEM_MC_REF_EFLAGS(pEFlags);
10882 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10883
10884 IEM_MC_ADVANCE_RIP();
10885 IEM_MC_END();
10886 }
10887 else
10888 {
10889 /* memory target */
10890 uint32_t fAccess;
10891 if (pImpl->pfnLockedU16)
10892 fAccess = IEM_ACCESS_DATA_RW;
10893 else /* CMP, TEST */
10894 fAccess = IEM_ACCESS_DATA_R;
10895 IEM_MC_BEGIN(3, 2);
10896 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10897 IEM_MC_ARG(uint16_t, u16Src, 1);
10898 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10900
10901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10902 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10903 IEM_MC_ASSIGN(u16Src, u16Imm);
10904 if (pImpl->pfnLockedU16)
10905 IEMOP_HLP_DONE_DECODING();
10906 else
10907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10908 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10909 IEM_MC_FETCH_EFLAGS(EFlags);
10910 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10911 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10912 else
10913 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10914
10915 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10916 IEM_MC_COMMIT_EFLAGS(EFlags);
10917 IEM_MC_ADVANCE_RIP();
10918 IEM_MC_END();
10919 }
10920 break;
10921 }
10922
10923 case IEMMODE_32BIT:
10924 {
10925 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10926 {
10927 /* register target */
10928 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10930 IEM_MC_BEGIN(3, 0);
10931 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10932 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
10933 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10934
10935 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10936 IEM_MC_REF_EFLAGS(pEFlags);
10937 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10938 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10939
10940 IEM_MC_ADVANCE_RIP();
10941 IEM_MC_END();
10942 }
10943 else
10944 {
10945 /* memory target */
10946 uint32_t fAccess;
10947 if (pImpl->pfnLockedU32)
10948 fAccess = IEM_ACCESS_DATA_RW;
10949 else /* CMP, TEST */
10950 fAccess = IEM_ACCESS_DATA_R;
10951 IEM_MC_BEGIN(3, 2);
10952 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10953 IEM_MC_ARG(uint32_t, u32Src, 1);
10954 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10956
10957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10958 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10959 IEM_MC_ASSIGN(u32Src, u32Imm);
10960 if (pImpl->pfnLockedU32)
10961 IEMOP_HLP_DONE_DECODING();
10962 else
10963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10964 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10965 IEM_MC_FETCH_EFLAGS(EFlags);
10966 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10967 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10968 else
10969 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10970
10971 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10972 IEM_MC_COMMIT_EFLAGS(EFlags);
10973 IEM_MC_ADVANCE_RIP();
10974 IEM_MC_END();
10975 }
10976 break;
10977 }
10978
10979 case IEMMODE_64BIT:
10980 {
10981 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10982 {
10983 /* register target */
10984 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10986 IEM_MC_BEGIN(3, 0);
10987 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10988 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
10989 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10990
10991 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10992 IEM_MC_REF_EFLAGS(pEFlags);
10993 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10994
10995 IEM_MC_ADVANCE_RIP();
10996 IEM_MC_END();
10997 }
10998 else
10999 {
11000 /* memory target */
11001 uint32_t fAccess;
11002 if (pImpl->pfnLockedU64)
11003 fAccess = IEM_ACCESS_DATA_RW;
11004 else /* CMP */
11005 fAccess = IEM_ACCESS_DATA_R;
11006 IEM_MC_BEGIN(3, 2);
11007 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11008 IEM_MC_ARG(uint64_t, u64Src, 1);
11009 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11011
11012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
11013 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
11014 if (pImpl->pfnLockedU64)
11015 IEMOP_HLP_DONE_DECODING();
11016 else
11017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11018 IEM_MC_ASSIGN(u64Src, u64Imm);
11019 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11020 IEM_MC_FETCH_EFLAGS(EFlags);
11021 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11022 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11023 else
11024 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
11025
11026 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
11027 IEM_MC_COMMIT_EFLAGS(EFlags);
11028 IEM_MC_ADVANCE_RIP();
11029 IEM_MC_END();
11030 }
11031 break;
11032 }
11033 }
11034 return VINF_SUCCESS;
11035}
11036
11037
11038/** Opcode 0x82. */
11039FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
11040{
11041 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
11042 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
11043}
11044
11045
11046/** Opcode 0x83. */
11047FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
11048{
11049 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11050 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11051 {
11052 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
11053 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
11054 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
11055 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
11056 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
11057 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
11058 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
11059 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
11060 }
11061 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
11062 to the 386 even if absent in the intel reference manuals and some
11063 3rd party opcode listings. */
11064 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
11065
11066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11067 {
11068 /*
11069 * Register target
11070 */
11071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11072 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11073 switch (pVCpu->iem.s.enmEffOpSize)
11074 {
11075 case IEMMODE_16BIT:
11076 {
11077 IEM_MC_BEGIN(3, 0);
11078 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11079 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
11080 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11081
11082 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11083 IEM_MC_REF_EFLAGS(pEFlags);
11084 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11085
11086 IEM_MC_ADVANCE_RIP();
11087 IEM_MC_END();
11088 break;
11089 }
11090
11091 case IEMMODE_32BIT:
11092 {
11093 IEM_MC_BEGIN(3, 0);
11094 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11095 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
11096 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11097
11098 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11099 IEM_MC_REF_EFLAGS(pEFlags);
11100 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11101 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11102
11103 IEM_MC_ADVANCE_RIP();
11104 IEM_MC_END();
11105 break;
11106 }
11107
11108 case IEMMODE_64BIT:
11109 {
11110 IEM_MC_BEGIN(3, 0);
11111 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11112 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
11113 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11114
11115 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11116 IEM_MC_REF_EFLAGS(pEFlags);
11117 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11118
11119 IEM_MC_ADVANCE_RIP();
11120 IEM_MC_END();
11121 break;
11122 }
11123 }
11124 }
11125 else
11126 {
11127 /*
11128 * Memory target.
11129 */
11130 uint32_t fAccess;
11131 if (pImpl->pfnLockedU16)
11132 fAccess = IEM_ACCESS_DATA_RW;
11133 else /* CMP */
11134 fAccess = IEM_ACCESS_DATA_R;
11135
11136 switch (pVCpu->iem.s.enmEffOpSize)
11137 {
11138 case IEMMODE_16BIT:
11139 {
11140 IEM_MC_BEGIN(3, 2);
11141 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11142 IEM_MC_ARG(uint16_t, u16Src, 1);
11143 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11145
11146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11147 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11148 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
11149 if (pImpl->pfnLockedU16)
11150 IEMOP_HLP_DONE_DECODING();
11151 else
11152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11153 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11154 IEM_MC_FETCH_EFLAGS(EFlags);
11155 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11156 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11157 else
11158 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
11159
11160 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
11161 IEM_MC_COMMIT_EFLAGS(EFlags);
11162 IEM_MC_ADVANCE_RIP();
11163 IEM_MC_END();
11164 break;
11165 }
11166
11167 case IEMMODE_32BIT:
11168 {
11169 IEM_MC_BEGIN(3, 2);
11170 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11171 IEM_MC_ARG(uint32_t, u32Src, 1);
11172 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11174
11175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11176 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11177 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
11178 if (pImpl->pfnLockedU32)
11179 IEMOP_HLP_DONE_DECODING();
11180 else
11181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11182 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11183 IEM_MC_FETCH_EFLAGS(EFlags);
11184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11186 else
11187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
11188
11189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
11190 IEM_MC_COMMIT_EFLAGS(EFlags);
11191 IEM_MC_ADVANCE_RIP();
11192 IEM_MC_END();
11193 break;
11194 }
11195
11196 case IEMMODE_64BIT:
11197 {
11198 IEM_MC_BEGIN(3, 2);
11199 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11200 IEM_MC_ARG(uint64_t, u64Src, 1);
11201 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11203
11204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11205 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11206 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
11207 if (pImpl->pfnLockedU64)
11208 IEMOP_HLP_DONE_DECODING();
11209 else
11210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11211 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11212 IEM_MC_FETCH_EFLAGS(EFlags);
11213 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11214 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11215 else
11216 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
11217
11218 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
11219 IEM_MC_COMMIT_EFLAGS(EFlags);
11220 IEM_MC_ADVANCE_RIP();
11221 IEM_MC_END();
11222 break;
11223 }
11224 }
11225 }
11226 return VINF_SUCCESS;
11227}
11228
11229
11230/** Opcode 0x84. */
11231FNIEMOP_DEF(iemOp_test_Eb_Gb)
11232{
11233 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
11234 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11235 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
11236}
11237
11238
11239/** Opcode 0x85. */
11240FNIEMOP_DEF(iemOp_test_Ev_Gv)
11241{
11242 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
11243 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11244 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
11245}
11246
11247
11248/** Opcode 0x86. */
11249FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
11250{
11251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11252 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
11253
11254 /*
11255 * If rm is denoting a register, no more instruction bytes.
11256 */
11257 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11258 {
11259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11260
11261 IEM_MC_BEGIN(0, 2);
11262 IEM_MC_LOCAL(uint8_t, uTmp1);
11263 IEM_MC_LOCAL(uint8_t, uTmp2);
11264
11265 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11266 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11267 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11268 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11269
11270 IEM_MC_ADVANCE_RIP();
11271 IEM_MC_END();
11272 }
11273 else
11274 {
11275 /*
11276 * We're accessing memory.
11277 */
11278/** @todo the register must be committed separately! */
11279 IEM_MC_BEGIN(2, 2);
11280 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
11281 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11283
11284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11285 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11286 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11287 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
11288 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
11289
11290 IEM_MC_ADVANCE_RIP();
11291 IEM_MC_END();
11292 }
11293 return VINF_SUCCESS;
11294}
11295
11296
11297/** Opcode 0x87. */
11298FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
11299{
11300 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
11301 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11302
11303 /*
11304 * If rm is denoting a register, no more instruction bytes.
11305 */
11306 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11307 {
11308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11309
11310 switch (pVCpu->iem.s.enmEffOpSize)
11311 {
11312 case IEMMODE_16BIT:
11313 IEM_MC_BEGIN(0, 2);
11314 IEM_MC_LOCAL(uint16_t, uTmp1);
11315 IEM_MC_LOCAL(uint16_t, uTmp2);
11316
11317 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11318 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11319 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11320 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11321
11322 IEM_MC_ADVANCE_RIP();
11323 IEM_MC_END();
11324 return VINF_SUCCESS;
11325
11326 case IEMMODE_32BIT:
11327 IEM_MC_BEGIN(0, 2);
11328 IEM_MC_LOCAL(uint32_t, uTmp1);
11329 IEM_MC_LOCAL(uint32_t, uTmp2);
11330
11331 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11332 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11333 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11334 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11335
11336 IEM_MC_ADVANCE_RIP();
11337 IEM_MC_END();
11338 return VINF_SUCCESS;
11339
11340 case IEMMODE_64BIT:
11341 IEM_MC_BEGIN(0, 2);
11342 IEM_MC_LOCAL(uint64_t, uTmp1);
11343 IEM_MC_LOCAL(uint64_t, uTmp2);
11344
11345 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11346 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11347 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11348 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11349
11350 IEM_MC_ADVANCE_RIP();
11351 IEM_MC_END();
11352 return VINF_SUCCESS;
11353
11354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11355 }
11356 }
11357 else
11358 {
11359 /*
11360 * We're accessing memory.
11361 */
11362 switch (pVCpu->iem.s.enmEffOpSize)
11363 {
11364/** @todo the register must be committed separately! */
11365 case IEMMODE_16BIT:
11366 IEM_MC_BEGIN(2, 2);
11367 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
11368 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11370
11371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11372 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11373 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11374 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
11375 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
11376
11377 IEM_MC_ADVANCE_RIP();
11378 IEM_MC_END();
11379 return VINF_SUCCESS;
11380
11381 case IEMMODE_32BIT:
11382 IEM_MC_BEGIN(2, 2);
11383 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
11384 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11386
11387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11388 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11389 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11390 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
11391 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
11392
11393 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11394 IEM_MC_ADVANCE_RIP();
11395 IEM_MC_END();
11396 return VINF_SUCCESS;
11397
11398 case IEMMODE_64BIT:
11399 IEM_MC_BEGIN(2, 2);
11400 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
11401 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11403
11404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11405 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11406 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11407 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
11408 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
11409
11410 IEM_MC_ADVANCE_RIP();
11411 IEM_MC_END();
11412 return VINF_SUCCESS;
11413
11414 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11415 }
11416 }
11417}
11418
11419
11420/** Opcode 0x88. */
11421FNIEMOP_DEF(iemOp_mov_Eb_Gb)
11422{
11423 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
11424
11425 uint8_t bRm;
11426 IEM_OPCODE_GET_NEXT_U8(&bRm);
11427
11428 /*
11429 * If rm is denoting a register, no more instruction bytes.
11430 */
11431 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11432 {
11433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11434 IEM_MC_BEGIN(0, 1);
11435 IEM_MC_LOCAL(uint8_t, u8Value);
11436 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11437 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
11438 IEM_MC_ADVANCE_RIP();
11439 IEM_MC_END();
11440 }
11441 else
11442 {
11443 /*
11444 * We're writing a register to memory.
11445 */
11446 IEM_MC_BEGIN(0, 2);
11447 IEM_MC_LOCAL(uint8_t, u8Value);
11448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11451 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11452 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
11453 IEM_MC_ADVANCE_RIP();
11454 IEM_MC_END();
11455 }
11456 return VINF_SUCCESS;
11457
11458}
11459
11460
11461/** Opcode 0x89. */
11462FNIEMOP_DEF(iemOp_mov_Ev_Gv)
11463{
11464 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
11465
11466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11467
11468 /*
11469 * If rm is denoting a register, no more instruction bytes.
11470 */
11471 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11472 {
11473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11474 switch (pVCpu->iem.s.enmEffOpSize)
11475 {
11476 case IEMMODE_16BIT:
11477 IEM_MC_BEGIN(0, 1);
11478 IEM_MC_LOCAL(uint16_t, u16Value);
11479 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11480 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11481 IEM_MC_ADVANCE_RIP();
11482 IEM_MC_END();
11483 break;
11484
11485 case IEMMODE_32BIT:
11486 IEM_MC_BEGIN(0, 1);
11487 IEM_MC_LOCAL(uint32_t, u32Value);
11488 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11489 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11490 IEM_MC_ADVANCE_RIP();
11491 IEM_MC_END();
11492 break;
11493
11494 case IEMMODE_64BIT:
11495 IEM_MC_BEGIN(0, 1);
11496 IEM_MC_LOCAL(uint64_t, u64Value);
11497 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11498 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11499 IEM_MC_ADVANCE_RIP();
11500 IEM_MC_END();
11501 break;
11502 }
11503 }
11504 else
11505 {
11506 /*
11507 * We're writing a register to memory.
11508 */
11509 switch (pVCpu->iem.s.enmEffOpSize)
11510 {
11511 case IEMMODE_16BIT:
11512 IEM_MC_BEGIN(0, 2);
11513 IEM_MC_LOCAL(uint16_t, u16Value);
11514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11517 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11518 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11519 IEM_MC_ADVANCE_RIP();
11520 IEM_MC_END();
11521 break;
11522
11523 case IEMMODE_32BIT:
11524 IEM_MC_BEGIN(0, 2);
11525 IEM_MC_LOCAL(uint32_t, u32Value);
11526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11529 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11530 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11531 IEM_MC_ADVANCE_RIP();
11532 IEM_MC_END();
11533 break;
11534
11535 case IEMMODE_64BIT:
11536 IEM_MC_BEGIN(0, 2);
11537 IEM_MC_LOCAL(uint64_t, u64Value);
11538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11541 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11542 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11543 IEM_MC_ADVANCE_RIP();
11544 IEM_MC_END();
11545 break;
11546 }
11547 }
11548 return VINF_SUCCESS;
11549}
11550
11551
11552/** Opcode 0x8a. */
11553FNIEMOP_DEF(iemOp_mov_Gb_Eb)
11554{
11555 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
11556
11557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11558
11559 /*
11560 * If rm is denoting a register, no more instruction bytes.
11561 */
11562 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11563 {
11564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11565 IEM_MC_BEGIN(0, 1);
11566 IEM_MC_LOCAL(uint8_t, u8Value);
11567 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11568 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11569 IEM_MC_ADVANCE_RIP();
11570 IEM_MC_END();
11571 }
11572 else
11573 {
11574 /*
11575 * We're loading a register from memory.
11576 */
11577 IEM_MC_BEGIN(0, 2);
11578 IEM_MC_LOCAL(uint8_t, u8Value);
11579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11582 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11583 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11584 IEM_MC_ADVANCE_RIP();
11585 IEM_MC_END();
11586 }
11587 return VINF_SUCCESS;
11588}
11589
11590
11591/** Opcode 0x8b. */
11592FNIEMOP_DEF(iemOp_mov_Gv_Ev)
11593{
11594 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
11595
11596 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11597
11598 /*
11599 * If rm is denoting a register, no more instruction bytes.
11600 */
11601 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11602 {
11603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11604 switch (pVCpu->iem.s.enmEffOpSize)
11605 {
11606 case IEMMODE_16BIT:
11607 IEM_MC_BEGIN(0, 1);
11608 IEM_MC_LOCAL(uint16_t, u16Value);
11609 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11610 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11611 IEM_MC_ADVANCE_RIP();
11612 IEM_MC_END();
11613 break;
11614
11615 case IEMMODE_32BIT:
11616 IEM_MC_BEGIN(0, 1);
11617 IEM_MC_LOCAL(uint32_t, u32Value);
11618 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11619 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11620 IEM_MC_ADVANCE_RIP();
11621 IEM_MC_END();
11622 break;
11623
11624 case IEMMODE_64BIT:
11625 IEM_MC_BEGIN(0, 1);
11626 IEM_MC_LOCAL(uint64_t, u64Value);
11627 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11628 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11629 IEM_MC_ADVANCE_RIP();
11630 IEM_MC_END();
11631 break;
11632 }
11633 }
11634 else
11635 {
11636 /*
11637 * We're loading a register from memory.
11638 */
11639 switch (pVCpu->iem.s.enmEffOpSize)
11640 {
11641 case IEMMODE_16BIT:
11642 IEM_MC_BEGIN(0, 2);
11643 IEM_MC_LOCAL(uint16_t, u16Value);
11644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11647 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11648 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11649 IEM_MC_ADVANCE_RIP();
11650 IEM_MC_END();
11651 break;
11652
11653 case IEMMODE_32BIT:
11654 IEM_MC_BEGIN(0, 2);
11655 IEM_MC_LOCAL(uint32_t, u32Value);
11656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11659 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11660 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11661 IEM_MC_ADVANCE_RIP();
11662 IEM_MC_END();
11663 break;
11664
11665 case IEMMODE_64BIT:
11666 IEM_MC_BEGIN(0, 2);
11667 IEM_MC_LOCAL(uint64_t, u64Value);
11668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11671 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11672 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11673 IEM_MC_ADVANCE_RIP();
11674 IEM_MC_END();
11675 break;
11676 }
11677 }
11678 return VINF_SUCCESS;
11679}
11680
11681
11682/** Opcode 0x63. */
11683FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
11684{
11685 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
11686 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
11687 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11688 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
11689 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
11690}
11691
11692
11693/** Opcode 0x8c. */
11694FNIEMOP_DEF(iemOp_mov_Ev_Sw)
11695{
11696 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
11697
11698 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11699
11700 /*
11701 * Check that the destination register exists. The REX.R prefix is ignored.
11702 */
11703 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11704 if ( iSegReg > X86_SREG_GS)
11705 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11706
11707 /*
11708 * If rm is denoting a register, no more instruction bytes.
11709 * In that case, the operand size is respected and the upper bits are
11710 * cleared (starting with some pentium).
11711 */
11712 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11713 {
11714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11715 switch (pVCpu->iem.s.enmEffOpSize)
11716 {
11717 case IEMMODE_16BIT:
11718 IEM_MC_BEGIN(0, 1);
11719 IEM_MC_LOCAL(uint16_t, u16Value);
11720 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11721 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11722 IEM_MC_ADVANCE_RIP();
11723 IEM_MC_END();
11724 break;
11725
11726 case IEMMODE_32BIT:
11727 IEM_MC_BEGIN(0, 1);
11728 IEM_MC_LOCAL(uint32_t, u32Value);
11729 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
11730 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11731 IEM_MC_ADVANCE_RIP();
11732 IEM_MC_END();
11733 break;
11734
11735 case IEMMODE_64BIT:
11736 IEM_MC_BEGIN(0, 1);
11737 IEM_MC_LOCAL(uint64_t, u64Value);
11738 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
11739 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11740 IEM_MC_ADVANCE_RIP();
11741 IEM_MC_END();
11742 break;
11743 }
11744 }
11745 else
11746 {
11747 /*
11748 * We're saving the register to memory. The access is word sized
11749 * regardless of operand size prefixes.
11750 */
11751#if 0 /* not necessary */
11752 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11753#endif
11754 IEM_MC_BEGIN(0, 2);
11755 IEM_MC_LOCAL(uint16_t, u16Value);
11756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11759 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11760 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11761 IEM_MC_ADVANCE_RIP();
11762 IEM_MC_END();
11763 }
11764 return VINF_SUCCESS;
11765}
11766
11767
11768
11769
11770/** Opcode 0x8d. */
11771FNIEMOP_DEF(iemOp_lea_Gv_M)
11772{
11773 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
11774 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11775 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11776 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
11777
11778 switch (pVCpu->iem.s.enmEffOpSize)
11779 {
11780 case IEMMODE_16BIT:
11781 IEM_MC_BEGIN(0, 2);
11782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11783 IEM_MC_LOCAL(uint16_t, u16Cast);
11784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11786 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
11787 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
11788 IEM_MC_ADVANCE_RIP();
11789 IEM_MC_END();
11790 return VINF_SUCCESS;
11791
11792 case IEMMODE_32BIT:
11793 IEM_MC_BEGIN(0, 2);
11794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11795 IEM_MC_LOCAL(uint32_t, u32Cast);
11796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11798 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
11799 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
11800 IEM_MC_ADVANCE_RIP();
11801 IEM_MC_END();
11802 return VINF_SUCCESS;
11803
11804 case IEMMODE_64BIT:
11805 IEM_MC_BEGIN(0, 1);
11806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11809 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
11810 IEM_MC_ADVANCE_RIP();
11811 IEM_MC_END();
11812 return VINF_SUCCESS;
11813 }
11814 AssertFailedReturn(VERR_IEM_IPE_7);
11815}
11816
11817
11818/** Opcode 0x8e. */
11819FNIEMOP_DEF(iemOp_mov_Sw_Ev)
11820{
11821 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
11822
11823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11824
11825 /*
11826 * The practical operand size is 16-bit.
11827 */
11828#if 0 /* not necessary */
11829 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11830#endif
11831
11832 /*
11833 * Check that the destination register exists and can be used with this
11834 * instruction. The REX.R prefix is ignored.
11835 */
11836 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11837 if ( iSegReg == X86_SREG_CS
11838 || iSegReg > X86_SREG_GS)
11839 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11840
11841 /*
11842 * If rm is denoting a register, no more instruction bytes.
11843 */
11844 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11845 {
11846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11847 IEM_MC_BEGIN(2, 0);
11848 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11849 IEM_MC_ARG(uint16_t, u16Value, 1);
11850 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11851 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11852 IEM_MC_END();
11853 }
11854 else
11855 {
11856 /*
11857 * We're loading the register from memory. The access is word sized
11858 * regardless of operand size prefixes.
11859 */
11860 IEM_MC_BEGIN(2, 1);
11861 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11862 IEM_MC_ARG(uint16_t, u16Value, 1);
11863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11866 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11867 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11868 IEM_MC_END();
11869 }
11870 return VINF_SUCCESS;
11871}
11872
11873
11874/** Opcode 0x8f /0. */
11875FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
11876{
11877 /* This bugger is rather annoying as it requires rSP to be updated before
11878 doing the effective address calculations. Will eventually require a
11879 split between the R/M+SIB decoding and the effective address
11880 calculation - which is something that is required for any attempt at
11881 reusing this code for a recompiler. It may also be good to have if we
11882 need to delay #UD exception caused by invalid lock prefixes.
11883
11884 For now, we'll do a mostly safe interpreter-only implementation here. */
11885 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
11886 * now until tests show it's checked.. */
11887 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
11888
11889 /* Register access is relatively easy and can share code. */
11890 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11891 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11892
11893 /*
11894 * Memory target.
11895 *
11896 * Intel says that RSP is incremented before it's used in any effective
11897 * address calcuations. This means some serious extra annoyance here since
11898 * we decode and calculate the effective address in one step and like to
11899 * delay committing registers till everything is done.
11900 *
11901 * So, we'll decode and calculate the effective address twice. This will
11902 * require some recoding if turned into a recompiler.
11903 */
11904 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
11905
11906#ifndef TST_IEM_CHECK_MC
11907 /* Calc effective address with modified ESP. */
11908/** @todo testcase */
11909 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
11910 RTGCPTR GCPtrEff;
11911 VBOXSTRICTRC rcStrict;
11912 switch (pVCpu->iem.s.enmEffOpSize)
11913 {
11914 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
11915 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
11916 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
11917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11918 }
11919 if (rcStrict != VINF_SUCCESS)
11920 return rcStrict;
11921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11922
11923 /* Perform the operation - this should be CImpl. */
11924 RTUINT64U TmpRsp;
11925 TmpRsp.u = pCtx->rsp;
11926 switch (pVCpu->iem.s.enmEffOpSize)
11927 {
11928 case IEMMODE_16BIT:
11929 {
11930 uint16_t u16Value;
11931 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
11932 if (rcStrict == VINF_SUCCESS)
11933 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
11934 break;
11935 }
11936
11937 case IEMMODE_32BIT:
11938 {
11939 uint32_t u32Value;
11940 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
11941 if (rcStrict == VINF_SUCCESS)
11942 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
11943 break;
11944 }
11945
11946 case IEMMODE_64BIT:
11947 {
11948 uint64_t u64Value;
11949 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
11950 if (rcStrict == VINF_SUCCESS)
11951 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
11952 break;
11953 }
11954
11955 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11956 }
11957 if (rcStrict == VINF_SUCCESS)
11958 {
11959 pCtx->rsp = TmpRsp.u;
11960 iemRegUpdateRipAndClearRF(pVCpu);
11961 }
11962 return rcStrict;
11963
11964#else
11965 return VERR_IEM_IPE_2;
11966#endif
11967}
11968
11969
11970/** Opcode 0x8f. */
11971FNIEMOP_DEF(iemOp_Grp1A)
11972{
11973 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11974 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
11975 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
11976
11977 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
11978 /** @todo XOP decoding. */
11979 IEMOP_MNEMONIC(xop_amd, "3-byte-xop");
11980 return IEMOP_RAISE_INVALID_OPCODE();
11981}
11982
11983
11984/**
11985 * Common 'xchg reg,rAX' helper.
11986 */
11987FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
11988{
11989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11990
11991 iReg |= pVCpu->iem.s.uRexB;
11992 switch (pVCpu->iem.s.enmEffOpSize)
11993 {
11994 case IEMMODE_16BIT:
11995 IEM_MC_BEGIN(0, 2);
11996 IEM_MC_LOCAL(uint16_t, u16Tmp1);
11997 IEM_MC_LOCAL(uint16_t, u16Tmp2);
11998 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
11999 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
12000 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
12001 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
12002 IEM_MC_ADVANCE_RIP();
12003 IEM_MC_END();
12004 return VINF_SUCCESS;
12005
12006 case IEMMODE_32BIT:
12007 IEM_MC_BEGIN(0, 2);
12008 IEM_MC_LOCAL(uint32_t, u32Tmp1);
12009 IEM_MC_LOCAL(uint32_t, u32Tmp2);
12010 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
12011 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
12012 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
12013 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
12014 IEM_MC_ADVANCE_RIP();
12015 IEM_MC_END();
12016 return VINF_SUCCESS;
12017
12018 case IEMMODE_64BIT:
12019 IEM_MC_BEGIN(0, 2);
12020 IEM_MC_LOCAL(uint64_t, u64Tmp1);
12021 IEM_MC_LOCAL(uint64_t, u64Tmp2);
12022 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
12023 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
12024 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
12025 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
12026 IEM_MC_ADVANCE_RIP();
12027 IEM_MC_END();
12028 return VINF_SUCCESS;
12029
12030 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12031 }
12032}
12033
12034
12035/** Opcode 0x90. */
12036FNIEMOP_DEF(iemOp_nop)
12037{
12038 /* R8/R8D and RAX/EAX can be exchanged. */
12039 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
12040 {
12041 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
12042 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
12043 }
12044
12045 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
12046 IEMOP_MNEMONIC(pause, "pause");
12047 else
12048 IEMOP_MNEMONIC(nop, "nop");
12049 IEM_MC_BEGIN(0, 0);
12050 IEM_MC_ADVANCE_RIP();
12051 IEM_MC_END();
12052 return VINF_SUCCESS;
12053}
12054
12055
12056/** Opcode 0x91. */
12057FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
12058{
12059 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
12060 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
12061}
12062
12063
12064/** Opcode 0x92. */
12065FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
12066{
12067 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
12068 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
12069}
12070
12071
12072/** Opcode 0x93. */
12073FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
12074{
12075 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
12076 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
12077}
12078
12079
12080/** Opcode 0x94. */
12081FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
12082{
12083 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
12084 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
12085}
12086
12087
12088/** Opcode 0x95. */
12089FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
12090{
12091 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
12092 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
12093}
12094
12095
12096/** Opcode 0x96. */
12097FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
12098{
12099 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
12100 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
12101}
12102
12103
12104/** Opcode 0x97. */
12105FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
12106{
12107 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
12108 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
12109}
12110
12111
12112/** Opcode 0x98. */
12113FNIEMOP_DEF(iemOp_cbw)
12114{
12115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12116 switch (pVCpu->iem.s.enmEffOpSize)
12117 {
12118 case IEMMODE_16BIT:
12119 IEMOP_MNEMONIC(cbw, "cbw");
12120 IEM_MC_BEGIN(0, 1);
12121 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
12122 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
12123 } IEM_MC_ELSE() {
12124 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
12125 } IEM_MC_ENDIF();
12126 IEM_MC_ADVANCE_RIP();
12127 IEM_MC_END();
12128 return VINF_SUCCESS;
12129
12130 case IEMMODE_32BIT:
12131 IEMOP_MNEMONIC(cwde, "cwde");
12132 IEM_MC_BEGIN(0, 1);
12133 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
12134 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
12135 } IEM_MC_ELSE() {
12136 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
12137 } IEM_MC_ENDIF();
12138 IEM_MC_ADVANCE_RIP();
12139 IEM_MC_END();
12140 return VINF_SUCCESS;
12141
12142 case IEMMODE_64BIT:
12143 IEMOP_MNEMONIC(cdqe, "cdqe");
12144 IEM_MC_BEGIN(0, 1);
12145 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
12146 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
12147 } IEM_MC_ELSE() {
12148 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
12149 } IEM_MC_ENDIF();
12150 IEM_MC_ADVANCE_RIP();
12151 IEM_MC_END();
12152 return VINF_SUCCESS;
12153
12154 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12155 }
12156}
12157
12158
12159/** Opcode 0x99. */
12160FNIEMOP_DEF(iemOp_cwd)
12161{
12162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12163 switch (pVCpu->iem.s.enmEffOpSize)
12164 {
12165 case IEMMODE_16BIT:
12166 IEMOP_MNEMONIC(cwd, "cwd");
12167 IEM_MC_BEGIN(0, 1);
12168 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
12169 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
12170 } IEM_MC_ELSE() {
12171 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
12172 } IEM_MC_ENDIF();
12173 IEM_MC_ADVANCE_RIP();
12174 IEM_MC_END();
12175 return VINF_SUCCESS;
12176
12177 case IEMMODE_32BIT:
12178 IEMOP_MNEMONIC(cdq, "cdq");
12179 IEM_MC_BEGIN(0, 1);
12180 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
12181 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
12182 } IEM_MC_ELSE() {
12183 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
12184 } IEM_MC_ENDIF();
12185 IEM_MC_ADVANCE_RIP();
12186 IEM_MC_END();
12187 return VINF_SUCCESS;
12188
12189 case IEMMODE_64BIT:
12190 IEMOP_MNEMONIC(cqo, "cqo");
12191 IEM_MC_BEGIN(0, 1);
12192 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
12193 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
12194 } IEM_MC_ELSE() {
12195 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
12196 } IEM_MC_ENDIF();
12197 IEM_MC_ADVANCE_RIP();
12198 IEM_MC_END();
12199 return VINF_SUCCESS;
12200
12201 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12202 }
12203}
12204
12205
12206/** Opcode 0x9a. */
12207FNIEMOP_DEF(iemOp_call_Ap)
12208{
12209 IEMOP_MNEMONIC(call_Ap, "call Ap");
12210 IEMOP_HLP_NO_64BIT();
12211
12212 /* Decode the far pointer address and pass it on to the far call C implementation. */
12213 uint32_t offSeg;
12214 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12215 IEM_OPCODE_GET_NEXT_U32(&offSeg);
12216 else
12217 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
12218 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
12219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12220 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
12221}
12222
12223
12224/** Opcode 0x9b. (aka fwait) */
12225FNIEMOP_DEF(iemOp_wait)
12226{
12227 IEMOP_MNEMONIC(wait, "wait");
12228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12229
12230 IEM_MC_BEGIN(0, 0);
12231 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
12232 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12233 IEM_MC_ADVANCE_RIP();
12234 IEM_MC_END();
12235 return VINF_SUCCESS;
12236}
12237
12238
12239/** Opcode 0x9c. */
12240FNIEMOP_DEF(iemOp_pushf_Fv)
12241{
12242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12243 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12244 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
12245}
12246
12247
12248/** Opcode 0x9d. */
12249FNIEMOP_DEF(iemOp_popf_Fv)
12250{
12251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12252 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12253 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
12254}
12255
12256
12257/** Opcode 0x9e. */
12258FNIEMOP_DEF(iemOp_sahf)
12259{
12260 IEMOP_MNEMONIC(sahf, "sahf");
12261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12262 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12263 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
12264 return IEMOP_RAISE_INVALID_OPCODE();
12265 IEM_MC_BEGIN(0, 2);
12266 IEM_MC_LOCAL(uint32_t, u32Flags);
12267 IEM_MC_LOCAL(uint32_t, EFlags);
12268 IEM_MC_FETCH_EFLAGS(EFlags);
12269 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
12270 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
12271 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
12272 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
12273 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
12274 IEM_MC_COMMIT_EFLAGS(EFlags);
12275 IEM_MC_ADVANCE_RIP();
12276 IEM_MC_END();
12277 return VINF_SUCCESS;
12278}
12279
12280
12281/** Opcode 0x9f. */
12282FNIEMOP_DEF(iemOp_lahf)
12283{
12284 IEMOP_MNEMONIC(lahf, "lahf");
12285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12286 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12287 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
12288 return IEMOP_RAISE_INVALID_OPCODE();
12289 IEM_MC_BEGIN(0, 1);
12290 IEM_MC_LOCAL(uint8_t, u8Flags);
12291 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
12292 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
12293 IEM_MC_ADVANCE_RIP();
12294 IEM_MC_END();
12295 return VINF_SUCCESS;
12296}
12297
12298
12299/**
12300 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
12301 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
12302 * prefixes. Will return on failures.
12303 * @param a_GCPtrMemOff The variable to store the offset in.
12304 */
12305#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
12306 do \
12307 { \
12308 switch (pVCpu->iem.s.enmEffAddrMode) \
12309 { \
12310 case IEMMODE_16BIT: \
12311 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
12312 break; \
12313 case IEMMODE_32BIT: \
12314 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
12315 break; \
12316 case IEMMODE_64BIT: \
12317 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
12318 break; \
12319 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12320 } \
12321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12322 } while (0)
12323
12324/** Opcode 0xa0. */
12325FNIEMOP_DEF(iemOp_mov_Al_Ob)
12326{
12327 /*
12328 * Get the offset and fend of lock prefixes.
12329 */
12330 RTGCPTR GCPtrMemOff;
12331 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12332
12333 /*
12334 * Fetch AL.
12335 */
12336 IEM_MC_BEGIN(0,1);
12337 IEM_MC_LOCAL(uint8_t, u8Tmp);
12338 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12339 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12340 IEM_MC_ADVANCE_RIP();
12341 IEM_MC_END();
12342 return VINF_SUCCESS;
12343}
12344
12345
12346/** Opcode 0xa1. */
12347FNIEMOP_DEF(iemOp_mov_rAX_Ov)
12348{
12349 /*
12350 * Get the offset and fend of lock prefixes.
12351 */
12352 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
12353 RTGCPTR GCPtrMemOff;
12354 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12355
12356 /*
12357 * Fetch rAX.
12358 */
12359 switch (pVCpu->iem.s.enmEffOpSize)
12360 {
12361 case IEMMODE_16BIT:
12362 IEM_MC_BEGIN(0,1);
12363 IEM_MC_LOCAL(uint16_t, u16Tmp);
12364 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12365 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12366 IEM_MC_ADVANCE_RIP();
12367 IEM_MC_END();
12368 return VINF_SUCCESS;
12369
12370 case IEMMODE_32BIT:
12371 IEM_MC_BEGIN(0,1);
12372 IEM_MC_LOCAL(uint32_t, u32Tmp);
12373 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12374 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
12375 IEM_MC_ADVANCE_RIP();
12376 IEM_MC_END();
12377 return VINF_SUCCESS;
12378
12379 case IEMMODE_64BIT:
12380 IEM_MC_BEGIN(0,1);
12381 IEM_MC_LOCAL(uint64_t, u64Tmp);
12382 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12383 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
12384 IEM_MC_ADVANCE_RIP();
12385 IEM_MC_END();
12386 return VINF_SUCCESS;
12387
12388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12389 }
12390}
12391
12392
12393/** Opcode 0xa2. */
12394FNIEMOP_DEF(iemOp_mov_Ob_AL)
12395{
12396 /*
12397 * Get the offset and fend of lock prefixes.
12398 */
12399 RTGCPTR GCPtrMemOff;
12400 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12401
12402 /*
12403 * Store AL.
12404 */
12405 IEM_MC_BEGIN(0,1);
12406 IEM_MC_LOCAL(uint8_t, u8Tmp);
12407 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
12408 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
12409 IEM_MC_ADVANCE_RIP();
12410 IEM_MC_END();
12411 return VINF_SUCCESS;
12412}
12413
12414
12415/** Opcode 0xa3. */
12416FNIEMOP_DEF(iemOp_mov_Ov_rAX)
12417{
12418 /*
12419 * Get the offset and fend of lock prefixes.
12420 */
12421 RTGCPTR GCPtrMemOff;
12422 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12423
12424 /*
12425 * Store rAX.
12426 */
12427 switch (pVCpu->iem.s.enmEffOpSize)
12428 {
12429 case IEMMODE_16BIT:
12430 IEM_MC_BEGIN(0,1);
12431 IEM_MC_LOCAL(uint16_t, u16Tmp);
12432 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
12433 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
12434 IEM_MC_ADVANCE_RIP();
12435 IEM_MC_END();
12436 return VINF_SUCCESS;
12437
12438 case IEMMODE_32BIT:
12439 IEM_MC_BEGIN(0,1);
12440 IEM_MC_LOCAL(uint32_t, u32Tmp);
12441 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
12442 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
12443 IEM_MC_ADVANCE_RIP();
12444 IEM_MC_END();
12445 return VINF_SUCCESS;
12446
12447 case IEMMODE_64BIT:
12448 IEM_MC_BEGIN(0,1);
12449 IEM_MC_LOCAL(uint64_t, u64Tmp);
12450 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
12451 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
12452 IEM_MC_ADVANCE_RIP();
12453 IEM_MC_END();
12454 return VINF_SUCCESS;
12455
12456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12457 }
12458}
12459
12460/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
12461#define IEM_MOVS_CASE(ValBits, AddrBits) \
12462 IEM_MC_BEGIN(0, 2); \
12463 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12464 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12465 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12466 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12467 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12468 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12469 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12470 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12471 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12472 } IEM_MC_ELSE() { \
12473 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12474 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12475 } IEM_MC_ENDIF(); \
12476 IEM_MC_ADVANCE_RIP(); \
12477 IEM_MC_END();
12478
12479/** Opcode 0xa4. */
12480FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
12481{
12482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12483
12484 /*
12485 * Use the C implementation if a repeat prefix is encountered.
12486 */
12487 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12488 {
12489 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
12490 switch (pVCpu->iem.s.enmEffAddrMode)
12491 {
12492 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
12493 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
12494 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
12495 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12496 }
12497 }
12498 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
12499
12500 /*
12501 * Sharing case implementation with movs[wdq] below.
12502 */
12503 switch (pVCpu->iem.s.enmEffAddrMode)
12504 {
12505 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
12506 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
12507 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
12508 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12509 }
12510 return VINF_SUCCESS;
12511}
12512
12513
12514/** Opcode 0xa5. */
12515FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
12516{
12517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12518
12519 /*
12520 * Use the C implementation if a repeat prefix is encountered.
12521 */
12522 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12523 {
12524 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
12525 switch (pVCpu->iem.s.enmEffOpSize)
12526 {
12527 case IEMMODE_16BIT:
12528 switch (pVCpu->iem.s.enmEffAddrMode)
12529 {
12530 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
12531 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
12532 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
12533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12534 }
12535 break;
12536 case IEMMODE_32BIT:
12537 switch (pVCpu->iem.s.enmEffAddrMode)
12538 {
12539 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
12540 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
12541 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
12542 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12543 }
12544 case IEMMODE_64BIT:
12545 switch (pVCpu->iem.s.enmEffAddrMode)
12546 {
12547 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
12548 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
12549 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
12550 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12551 }
12552 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12553 }
12554 }
12555 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
12556
12557 /*
12558 * Annoying double switch here.
12559 * Using ugly macro for implementing the cases, sharing it with movsb.
12560 */
12561 switch (pVCpu->iem.s.enmEffOpSize)
12562 {
12563 case IEMMODE_16BIT:
12564 switch (pVCpu->iem.s.enmEffAddrMode)
12565 {
12566 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
12567 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
12568 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
12569 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12570 }
12571 break;
12572
12573 case IEMMODE_32BIT:
12574 switch (pVCpu->iem.s.enmEffAddrMode)
12575 {
12576 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
12577 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
12578 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
12579 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12580 }
12581 break;
12582
12583 case IEMMODE_64BIT:
12584 switch (pVCpu->iem.s.enmEffAddrMode)
12585 {
12586 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12587 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
12588 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
12589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12590 }
12591 break;
12592 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12593 }
12594 return VINF_SUCCESS;
12595}
12596
12597#undef IEM_MOVS_CASE
12598
12599/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
12600#define IEM_CMPS_CASE(ValBits, AddrBits) \
12601 IEM_MC_BEGIN(3, 3); \
12602 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
12603 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
12604 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12605 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
12606 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12607 \
12608 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12609 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
12610 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12611 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
12612 IEM_MC_REF_LOCAL(puValue1, uValue1); \
12613 IEM_MC_REF_EFLAGS(pEFlags); \
12614 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
12615 \
12616 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12617 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12618 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12619 } IEM_MC_ELSE() { \
12620 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12621 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12622 } IEM_MC_ENDIF(); \
12623 IEM_MC_ADVANCE_RIP(); \
12624 IEM_MC_END(); \
12625
12626/** Opcode 0xa6. */
12627FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
12628{
12629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12630
12631 /*
12632 * Use the C implementation if a repeat prefix is encountered.
12633 */
12634 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12635 {
12636 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
12637 switch (pVCpu->iem.s.enmEffAddrMode)
12638 {
12639 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12640 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12641 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12642 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12643 }
12644 }
12645 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12646 {
12647 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
12648 switch (pVCpu->iem.s.enmEffAddrMode)
12649 {
12650 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12651 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12652 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12653 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12654 }
12655 }
12656 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
12657
12658 /*
12659 * Sharing case implementation with cmps[wdq] below.
12660 */
12661 switch (pVCpu->iem.s.enmEffAddrMode)
12662 {
12663 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
12664 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
12665 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
12666 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12667 }
12668 return VINF_SUCCESS;
12669
12670}
12671
12672
12673/** Opcode 0xa7. */
12674FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
12675{
12676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12677
12678 /*
12679 * Use the C implementation if a repeat prefix is encountered.
12680 */
12681 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12682 {
12683 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
12684 switch (pVCpu->iem.s.enmEffOpSize)
12685 {
12686 case IEMMODE_16BIT:
12687 switch (pVCpu->iem.s.enmEffAddrMode)
12688 {
12689 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12690 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12691 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12692 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12693 }
12694 break;
12695 case IEMMODE_32BIT:
12696 switch (pVCpu->iem.s.enmEffAddrMode)
12697 {
12698 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12699 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12700 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12701 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12702 }
12703 case IEMMODE_64BIT:
12704 switch (pVCpu->iem.s.enmEffAddrMode)
12705 {
12706 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
12707 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12708 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12710 }
12711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12712 }
12713 }
12714
12715 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12716 {
12717 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
12718 switch (pVCpu->iem.s.enmEffOpSize)
12719 {
12720 case IEMMODE_16BIT:
12721 switch (pVCpu->iem.s.enmEffAddrMode)
12722 {
12723 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12724 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12725 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12726 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12727 }
12728 break;
12729 case IEMMODE_32BIT:
12730 switch (pVCpu->iem.s.enmEffAddrMode)
12731 {
12732 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12733 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12734 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12735 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12736 }
12737 case IEMMODE_64BIT:
12738 switch (pVCpu->iem.s.enmEffAddrMode)
12739 {
12740 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
12741 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12742 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12744 }
12745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12746 }
12747 }
12748
12749 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
12750
12751 /*
12752 * Annoying double switch here.
12753 * Using ugly macro for implementing the cases, sharing it with cmpsb.
12754 */
12755 switch (pVCpu->iem.s.enmEffOpSize)
12756 {
12757 case IEMMODE_16BIT:
12758 switch (pVCpu->iem.s.enmEffAddrMode)
12759 {
12760 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
12761 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
12762 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
12763 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12764 }
12765 break;
12766
12767 case IEMMODE_32BIT:
12768 switch (pVCpu->iem.s.enmEffAddrMode)
12769 {
12770 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
12771 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
12772 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
12773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12774 }
12775 break;
12776
12777 case IEMMODE_64BIT:
12778 switch (pVCpu->iem.s.enmEffAddrMode)
12779 {
12780 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12781 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
12782 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
12783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12784 }
12785 break;
12786 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12787 }
12788 return VINF_SUCCESS;
12789
12790}
12791
12792#undef IEM_CMPS_CASE
12793
12794/** Opcode 0xa8. */
12795FNIEMOP_DEF(iemOp_test_AL_Ib)
12796{
12797 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
12798 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12799 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
12800}
12801
12802
12803/** Opcode 0xa9. */
12804FNIEMOP_DEF(iemOp_test_eAX_Iz)
12805{
12806 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
12807 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12808 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
12809}
12810
12811
12812/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
12813#define IEM_STOS_CASE(ValBits, AddrBits) \
12814 IEM_MC_BEGIN(0, 2); \
12815 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12816 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12817 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
12818 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12819 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12820 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12821 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12822 } IEM_MC_ELSE() { \
12823 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12824 } IEM_MC_ENDIF(); \
12825 IEM_MC_ADVANCE_RIP(); \
12826 IEM_MC_END(); \
12827
12828/** Opcode 0xaa. */
12829FNIEMOP_DEF(iemOp_stosb_Yb_AL)
12830{
12831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12832
12833 /*
12834 * Use the C implementation if a repeat prefix is encountered.
12835 */
12836 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12837 {
12838 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
12839 switch (pVCpu->iem.s.enmEffAddrMode)
12840 {
12841 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
12842 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
12843 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
12844 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12845 }
12846 }
12847 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
12848
12849 /*
12850 * Sharing case implementation with stos[wdq] below.
12851 */
12852 switch (pVCpu->iem.s.enmEffAddrMode)
12853 {
12854 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
12855 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
12856 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
12857 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12858 }
12859 return VINF_SUCCESS;
12860}
12861
12862
12863/** Opcode 0xab. */
12864FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
12865{
12866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12867
12868 /*
12869 * Use the C implementation if a repeat prefix is encountered.
12870 */
12871 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12872 {
12873 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
12874 switch (pVCpu->iem.s.enmEffOpSize)
12875 {
12876 case IEMMODE_16BIT:
12877 switch (pVCpu->iem.s.enmEffAddrMode)
12878 {
12879 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
12880 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
12881 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
12882 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12883 }
12884 break;
12885 case IEMMODE_32BIT:
12886 switch (pVCpu->iem.s.enmEffAddrMode)
12887 {
12888 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
12889 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
12890 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
12891 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12892 }
12893 case IEMMODE_64BIT:
12894 switch (pVCpu->iem.s.enmEffAddrMode)
12895 {
12896 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
12897 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
12898 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
12899 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12900 }
12901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12902 }
12903 }
12904 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
12905
12906 /*
12907 * Annoying double switch here.
12908 * Using ugly macro for implementing the cases, sharing it with stosb.
12909 */
12910 switch (pVCpu->iem.s.enmEffOpSize)
12911 {
12912 case IEMMODE_16BIT:
12913 switch (pVCpu->iem.s.enmEffAddrMode)
12914 {
12915 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
12916 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
12917 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
12918 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12919 }
12920 break;
12921
12922 case IEMMODE_32BIT:
12923 switch (pVCpu->iem.s.enmEffAddrMode)
12924 {
12925 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
12926 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
12927 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
12928 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12929 }
12930 break;
12931
12932 case IEMMODE_64BIT:
12933 switch (pVCpu->iem.s.enmEffAddrMode)
12934 {
12935 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12936 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
12937 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
12938 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12939 }
12940 break;
12941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12942 }
12943 return VINF_SUCCESS;
12944}
12945
12946#undef IEM_STOS_CASE
12947
12948/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
12949#define IEM_LODS_CASE(ValBits, AddrBits) \
12950 IEM_MC_BEGIN(0, 2); \
12951 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12952 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12953 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12954 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12955 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
12956 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12957 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12958 } IEM_MC_ELSE() { \
12959 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12960 } IEM_MC_ENDIF(); \
12961 IEM_MC_ADVANCE_RIP(); \
12962 IEM_MC_END();
12963
12964/** Opcode 0xac. */
12965FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
12966{
12967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12968
12969 /*
12970 * Use the C implementation if a repeat prefix is encountered.
12971 */
12972 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12973 {
12974 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
12975 switch (pVCpu->iem.s.enmEffAddrMode)
12976 {
12977 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
12978 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
12979 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
12980 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12981 }
12982 }
12983 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
12984
12985 /*
12986 * Sharing case implementation with stos[wdq] below.
12987 */
12988 switch (pVCpu->iem.s.enmEffAddrMode)
12989 {
12990 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
12991 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
12992 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
12993 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12994 }
12995 return VINF_SUCCESS;
12996}
12997
12998
12999/** Opcode 0xad. */
13000FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
13001{
13002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13003
13004 /*
13005 * Use the C implementation if a repeat prefix is encountered.
13006 */
13007 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
13008 {
13009 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
13010 switch (pVCpu->iem.s.enmEffOpSize)
13011 {
13012 case IEMMODE_16BIT:
13013 switch (pVCpu->iem.s.enmEffAddrMode)
13014 {
13015 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
13016 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
13017 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
13018 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13019 }
13020 break;
13021 case IEMMODE_32BIT:
13022 switch (pVCpu->iem.s.enmEffAddrMode)
13023 {
13024 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
13025 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
13026 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
13027 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13028 }
13029 case IEMMODE_64BIT:
13030 switch (pVCpu->iem.s.enmEffAddrMode)
13031 {
13032 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
13033 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
13034 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
13035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13036 }
13037 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13038 }
13039 }
13040 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
13041
13042 /*
13043 * Annoying double switch here.
13044 * Using ugly macro for implementing the cases, sharing it with lodsb.
13045 */
13046 switch (pVCpu->iem.s.enmEffOpSize)
13047 {
13048 case IEMMODE_16BIT:
13049 switch (pVCpu->iem.s.enmEffAddrMode)
13050 {
13051 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
13052 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
13053 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
13054 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13055 }
13056 break;
13057
13058 case IEMMODE_32BIT:
13059 switch (pVCpu->iem.s.enmEffAddrMode)
13060 {
13061 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
13062 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
13063 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
13064 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13065 }
13066 break;
13067
13068 case IEMMODE_64BIT:
13069 switch (pVCpu->iem.s.enmEffAddrMode)
13070 {
13071 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
13072 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
13073 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
13074 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13075 }
13076 break;
13077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13078 }
13079 return VINF_SUCCESS;
13080}
13081
13082#undef IEM_LODS_CASE
13083
13084/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
13085#define IEM_SCAS_CASE(ValBits, AddrBits) \
13086 IEM_MC_BEGIN(3, 2); \
13087 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
13088 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
13089 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
13090 IEM_MC_LOCAL(RTGCPTR, uAddr); \
13091 \
13092 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
13093 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
13094 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
13095 IEM_MC_REF_EFLAGS(pEFlags); \
13096 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
13097 \
13098 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
13099 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
13100 } IEM_MC_ELSE() { \
13101 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
13102 } IEM_MC_ENDIF(); \
13103 IEM_MC_ADVANCE_RIP(); \
13104 IEM_MC_END();
13105
13106/** Opcode 0xae. */
13107FNIEMOP_DEF(iemOp_scasb_AL_Xb)
13108{
13109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13110
13111 /*
13112 * Use the C implementation if a repeat prefix is encountered.
13113 */
13114 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
13115 {
13116 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
13117 switch (pVCpu->iem.s.enmEffAddrMode)
13118 {
13119 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
13120 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
13121 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
13122 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13123 }
13124 }
13125 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
13126 {
13127 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
13128 switch (pVCpu->iem.s.enmEffAddrMode)
13129 {
13130 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
13131 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
13132 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
13133 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13134 }
13135 }
13136 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
13137
13138 /*
13139 * Sharing case implementation with stos[wdq] below.
13140 */
13141 switch (pVCpu->iem.s.enmEffAddrMode)
13142 {
13143 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
13144 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
13145 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
13146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13147 }
13148 return VINF_SUCCESS;
13149}
13150
13151
13152/** Opcode 0xaf. */
13153FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
13154{
13155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13156
13157 /*
13158 * Use the C implementation if a repeat prefix is encountered.
13159 */
13160 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
13161 {
13162 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
13163 switch (pVCpu->iem.s.enmEffOpSize)
13164 {
13165 case IEMMODE_16BIT:
13166 switch (pVCpu->iem.s.enmEffAddrMode)
13167 {
13168 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
13169 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
13170 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
13171 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13172 }
13173 break;
13174 case IEMMODE_32BIT:
13175 switch (pVCpu->iem.s.enmEffAddrMode)
13176 {
13177 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
13178 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
13179 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
13180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13181 }
13182 case IEMMODE_64BIT:
13183 switch (pVCpu->iem.s.enmEffAddrMode)
13184 {
13185 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
13186 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
13187 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
13188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13189 }
13190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13191 }
13192 }
13193 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
13194 {
13195 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
13196 switch (pVCpu->iem.s.enmEffOpSize)
13197 {
13198 case IEMMODE_16BIT:
13199 switch (pVCpu->iem.s.enmEffAddrMode)
13200 {
13201 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
13202 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
13203 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
13204 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13205 }
13206 break;
13207 case IEMMODE_32BIT:
13208 switch (pVCpu->iem.s.enmEffAddrMode)
13209 {
13210 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
13211 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
13212 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
13213 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13214 }
13215 case IEMMODE_64BIT:
13216 switch (pVCpu->iem.s.enmEffAddrMode)
13217 {
13218 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
13219 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
13220 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
13221 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13222 }
13223 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13224 }
13225 }
13226 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
13227
13228 /*
13229 * Annoying double switch here.
13230 * Using ugly macro for implementing the cases, sharing it with scasb.
13231 */
13232 switch (pVCpu->iem.s.enmEffOpSize)
13233 {
13234 case IEMMODE_16BIT:
13235 switch (pVCpu->iem.s.enmEffAddrMode)
13236 {
13237 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
13238 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
13239 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
13240 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13241 }
13242 break;
13243
13244 case IEMMODE_32BIT:
13245 switch (pVCpu->iem.s.enmEffAddrMode)
13246 {
13247 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
13248 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
13249 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
13250 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13251 }
13252 break;
13253
13254 case IEMMODE_64BIT:
13255 switch (pVCpu->iem.s.enmEffAddrMode)
13256 {
13257 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
13258 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
13259 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
13260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13261 }
13262 break;
13263 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13264 }
13265 return VINF_SUCCESS;
13266}
13267
13268#undef IEM_SCAS_CASE
13269
13270/**
13271 * Common 'mov r8, imm8' helper.
13272 */
13273FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
13274{
13275 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13277
13278 IEM_MC_BEGIN(0, 1);
13279 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
13280 IEM_MC_STORE_GREG_U8(iReg, u8Value);
13281 IEM_MC_ADVANCE_RIP();
13282 IEM_MC_END();
13283
13284 return VINF_SUCCESS;
13285}
13286
13287
13288/** Opcode 0xb0. */
13289FNIEMOP_DEF(iemOp_mov_AL_Ib)
13290{
13291 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
13292 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13293}
13294
13295
13296/** Opcode 0xb1. */
13297FNIEMOP_DEF(iemOp_CL_Ib)
13298{
13299 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
13300 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13301}
13302
13303
13304/** Opcode 0xb2. */
13305FNIEMOP_DEF(iemOp_DL_Ib)
13306{
13307 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
13308 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13309}
13310
13311
13312/** Opcode 0xb3. */
13313FNIEMOP_DEF(iemOp_BL_Ib)
13314{
13315 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
13316 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13317}
13318
13319
13320/** Opcode 0xb4. */
13321FNIEMOP_DEF(iemOp_mov_AH_Ib)
13322{
13323 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
13324 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13325}
13326
13327
13328/** Opcode 0xb5. */
13329FNIEMOP_DEF(iemOp_CH_Ib)
13330{
13331 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
13332 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13333}
13334
13335
13336/** Opcode 0xb6. */
13337FNIEMOP_DEF(iemOp_DH_Ib)
13338{
13339 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
13340 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13341}
13342
13343
13344/** Opcode 0xb7. */
13345FNIEMOP_DEF(iemOp_BH_Ib)
13346{
13347 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
13348 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13349}
13350
13351
13352/**
13353 * Common 'mov regX,immX' helper.
13354 */
13355FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
13356{
13357 switch (pVCpu->iem.s.enmEffOpSize)
13358 {
13359 case IEMMODE_16BIT:
13360 {
13361 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13363
13364 IEM_MC_BEGIN(0, 1);
13365 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
13366 IEM_MC_STORE_GREG_U16(iReg, u16Value);
13367 IEM_MC_ADVANCE_RIP();
13368 IEM_MC_END();
13369 break;
13370 }
13371
13372 case IEMMODE_32BIT:
13373 {
13374 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13376
13377 IEM_MC_BEGIN(0, 1);
13378 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
13379 IEM_MC_STORE_GREG_U32(iReg, u32Value);
13380 IEM_MC_ADVANCE_RIP();
13381 IEM_MC_END();
13382 break;
13383 }
13384 case IEMMODE_64BIT:
13385 {
13386 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
13387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13388
13389 IEM_MC_BEGIN(0, 1);
13390 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
13391 IEM_MC_STORE_GREG_U64(iReg, u64Value);
13392 IEM_MC_ADVANCE_RIP();
13393 IEM_MC_END();
13394 break;
13395 }
13396 }
13397
13398 return VINF_SUCCESS;
13399}
13400
13401
13402/** Opcode 0xb8. */
13403FNIEMOP_DEF(iemOp_eAX_Iv)
13404{
13405 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
13406 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13407}
13408
13409
13410/** Opcode 0xb9. */
13411FNIEMOP_DEF(iemOp_eCX_Iv)
13412{
13413 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
13414 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13415}
13416
13417
13418/** Opcode 0xba. */
13419FNIEMOP_DEF(iemOp_eDX_Iv)
13420{
13421 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
13422 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13423}
13424
13425
13426/** Opcode 0xbb. */
13427FNIEMOP_DEF(iemOp_eBX_Iv)
13428{
13429 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
13430 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13431}
13432
13433
13434/** Opcode 0xbc. */
13435FNIEMOP_DEF(iemOp_eSP_Iv)
13436{
13437 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
13438 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13439}
13440
13441
13442/** Opcode 0xbd. */
13443FNIEMOP_DEF(iemOp_eBP_Iv)
13444{
13445 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
13446 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13447}
13448
13449
13450/** Opcode 0xbe. */
13451FNIEMOP_DEF(iemOp_eSI_Iv)
13452{
13453 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
13454 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13455}
13456
13457
13458/** Opcode 0xbf. */
13459FNIEMOP_DEF(iemOp_eDI_Iv)
13460{
13461 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
13462 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13463}
13464
13465
13466/** Opcode 0xc0. */
13467FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
13468{
13469 IEMOP_HLP_MIN_186();
13470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13471 PCIEMOPSHIFTSIZES pImpl;
13472 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13473 {
13474 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
13475 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
13476 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
13477 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
13478 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
13479 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
13480 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
13481 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13482 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13483 }
13484 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13485
13486 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13487 {
13488 /* register */
13489 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13491 IEM_MC_BEGIN(3, 0);
13492 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13493 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13494 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13495 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13496 IEM_MC_REF_EFLAGS(pEFlags);
13497 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13498 IEM_MC_ADVANCE_RIP();
13499 IEM_MC_END();
13500 }
13501 else
13502 {
13503 /* memory */
13504 IEM_MC_BEGIN(3, 2);
13505 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13506 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13507 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13509
13510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13511 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13512 IEM_MC_ASSIGN(cShiftArg, cShift);
13513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13514 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13515 IEM_MC_FETCH_EFLAGS(EFlags);
13516 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13517
13518 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13519 IEM_MC_COMMIT_EFLAGS(EFlags);
13520 IEM_MC_ADVANCE_RIP();
13521 IEM_MC_END();
13522 }
13523 return VINF_SUCCESS;
13524}
13525
13526
13527/** Opcode 0xc1. */
13528FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
13529{
13530 IEMOP_HLP_MIN_186();
13531 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13532 PCIEMOPSHIFTSIZES pImpl;
13533 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13534 {
13535 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
13536 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
13537 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
13538 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
13539 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
13540 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
13541 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
13542 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13543 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13544 }
13545 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13546
13547 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13548 {
13549 /* register */
13550 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13552 switch (pVCpu->iem.s.enmEffOpSize)
13553 {
13554 case IEMMODE_16BIT:
13555 IEM_MC_BEGIN(3, 0);
13556 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13557 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13558 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13559 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13560 IEM_MC_REF_EFLAGS(pEFlags);
13561 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13562 IEM_MC_ADVANCE_RIP();
13563 IEM_MC_END();
13564 return VINF_SUCCESS;
13565
13566 case IEMMODE_32BIT:
13567 IEM_MC_BEGIN(3, 0);
13568 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13569 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13570 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13571 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13572 IEM_MC_REF_EFLAGS(pEFlags);
13573 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13574 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13575 IEM_MC_ADVANCE_RIP();
13576 IEM_MC_END();
13577 return VINF_SUCCESS;
13578
13579 case IEMMODE_64BIT:
13580 IEM_MC_BEGIN(3, 0);
13581 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13582 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13583 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13584 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13585 IEM_MC_REF_EFLAGS(pEFlags);
13586 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13587 IEM_MC_ADVANCE_RIP();
13588 IEM_MC_END();
13589 return VINF_SUCCESS;
13590
13591 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13592 }
13593 }
13594 else
13595 {
13596 /* memory */
13597 switch (pVCpu->iem.s.enmEffOpSize)
13598 {
13599 case IEMMODE_16BIT:
13600 IEM_MC_BEGIN(3, 2);
13601 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13602 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13603 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13605
13606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13607 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13608 IEM_MC_ASSIGN(cShiftArg, cShift);
13609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13610 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13611 IEM_MC_FETCH_EFLAGS(EFlags);
13612 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13613
13614 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13615 IEM_MC_COMMIT_EFLAGS(EFlags);
13616 IEM_MC_ADVANCE_RIP();
13617 IEM_MC_END();
13618 return VINF_SUCCESS;
13619
13620 case IEMMODE_32BIT:
13621 IEM_MC_BEGIN(3, 2);
13622 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13623 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13624 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13626
13627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13628 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13629 IEM_MC_ASSIGN(cShiftArg, cShift);
13630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13631 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13632 IEM_MC_FETCH_EFLAGS(EFlags);
13633 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13634
13635 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13636 IEM_MC_COMMIT_EFLAGS(EFlags);
13637 IEM_MC_ADVANCE_RIP();
13638 IEM_MC_END();
13639 return VINF_SUCCESS;
13640
13641 case IEMMODE_64BIT:
13642 IEM_MC_BEGIN(3, 2);
13643 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13644 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13645 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13647
13648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13649 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13650 IEM_MC_ASSIGN(cShiftArg, cShift);
13651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13652 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13653 IEM_MC_FETCH_EFLAGS(EFlags);
13654 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13655
13656 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13657 IEM_MC_COMMIT_EFLAGS(EFlags);
13658 IEM_MC_ADVANCE_RIP();
13659 IEM_MC_END();
13660 return VINF_SUCCESS;
13661
13662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13663 }
13664 }
13665}
13666
13667
13668/** Opcode 0xc2. */
13669FNIEMOP_DEF(iemOp_retn_Iw)
13670{
13671 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
13672 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13674 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13675 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
13676}
13677
13678
13679/** Opcode 0xc3. */
13680FNIEMOP_DEF(iemOp_retn)
13681{
13682 IEMOP_MNEMONIC(retn, "retn");
13683 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13685 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
13686}
13687
13688
13689/** Opcode 0xc4. */
13690FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
13691{
13692 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13693 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
13694 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13695 {
13696 IEMOP_MNEMONIC(vex2_prefix, "2-byte-vex");
13697 /* The LES instruction is invalid 64-bit mode. In legacy and
13698 compatability mode it is invalid with MOD=3.
13699 The use as a VEX prefix is made possible by assigning the inverted
13700 REX.R to the top MOD bit, and the top bit in the inverted register
13701 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
13702 to accessing registers 0..7 in this VEX form. */
13703 /** @todo VEX: Just use new tables for it. */
13704 return IEMOP_RAISE_INVALID_OPCODE();
13705 }
13706 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
13707 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
13708}
13709
13710
13711/** Opcode 0xc5. */
13712FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
13713{
13714 /* The LDS instruction is invalid 64-bit mode. In legacy and
13715 compatability mode it is invalid with MOD=3.
13716 The use as a VEX prefix is made possible by assigning the inverted
13717 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
13718 outside of 64-bit mode. VEX is not available in real or v86 mode. */
13719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13720 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
13721 {
13722 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
13723 {
13724 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
13725 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
13726 }
13727 IEMOP_HLP_NO_REAL_OR_V86_MODE();
13728 }
13729
13730 IEMOP_MNEMONIC(vex3_prefix, "3-byte-vex");
13731 /** @todo Test when exctly the VEX conformance checks kick in during
13732 * instruction decoding and fetching (using \#PF). */
13733 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
13734 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
13735 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
13736#if 0 /* will make sense of this next week... */
13737 if ( !(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
13738 &&
13739 )
13740 {
13741
13742 }
13743#endif
13744
13745 /** @todo VEX: Just use new tables for it. */
13746 return IEMOP_RAISE_INVALID_OPCODE();
13747}
13748
13749
13750/** Opcode 0xc6. */
13751FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
13752{
13753 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13754 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13755 return IEMOP_RAISE_INVALID_OPCODE();
13756 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
13757
13758 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13759 {
13760 /* register access */
13761 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13763 IEM_MC_BEGIN(0, 0);
13764 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
13765 IEM_MC_ADVANCE_RIP();
13766 IEM_MC_END();
13767 }
13768 else
13769 {
13770 /* memory access. */
13771 IEM_MC_BEGIN(0, 1);
13772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13774 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13776 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
13777 IEM_MC_ADVANCE_RIP();
13778 IEM_MC_END();
13779 }
13780 return VINF_SUCCESS;
13781}
13782
13783
13784/** Opcode 0xc7. */
13785FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
13786{
13787 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13788 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13789 return IEMOP_RAISE_INVALID_OPCODE();
13790 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
13791
13792 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13793 {
13794 /* register access */
13795 switch (pVCpu->iem.s.enmEffOpSize)
13796 {
13797 case IEMMODE_16BIT:
13798 IEM_MC_BEGIN(0, 0);
13799 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13801 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
13802 IEM_MC_ADVANCE_RIP();
13803 IEM_MC_END();
13804 return VINF_SUCCESS;
13805
13806 case IEMMODE_32BIT:
13807 IEM_MC_BEGIN(0, 0);
13808 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13810 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
13811 IEM_MC_ADVANCE_RIP();
13812 IEM_MC_END();
13813 return VINF_SUCCESS;
13814
13815 case IEMMODE_64BIT:
13816 IEM_MC_BEGIN(0, 0);
13817 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13819 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
13820 IEM_MC_ADVANCE_RIP();
13821 IEM_MC_END();
13822 return VINF_SUCCESS;
13823
13824 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13825 }
13826 }
13827 else
13828 {
13829 /* memory access. */
13830 switch (pVCpu->iem.s.enmEffOpSize)
13831 {
13832 case IEMMODE_16BIT:
13833 IEM_MC_BEGIN(0, 1);
13834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13836 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13838 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
13839 IEM_MC_ADVANCE_RIP();
13840 IEM_MC_END();
13841 return VINF_SUCCESS;
13842
13843 case IEMMODE_32BIT:
13844 IEM_MC_BEGIN(0, 1);
13845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13847 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13849 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
13850 IEM_MC_ADVANCE_RIP();
13851 IEM_MC_END();
13852 return VINF_SUCCESS;
13853
13854 case IEMMODE_64BIT:
13855 IEM_MC_BEGIN(0, 1);
13856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13858 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13860 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
13861 IEM_MC_ADVANCE_RIP();
13862 IEM_MC_END();
13863 return VINF_SUCCESS;
13864
13865 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13866 }
13867 }
13868}
13869
13870
13871
13872
13873/** Opcode 0xc8. */
13874FNIEMOP_DEF(iemOp_enter_Iw_Ib)
13875{
13876 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
13877 IEMOP_HLP_MIN_186();
13878 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13879 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
13880 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
13881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13882 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
13883}
13884
13885
13886/** Opcode 0xc9. */
13887FNIEMOP_DEF(iemOp_leave)
13888{
13889 IEMOP_MNEMONIC(leave, "leave");
13890 IEMOP_HLP_MIN_186();
13891 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13893 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
13894}
13895
13896
13897/** Opcode 0xca. */
13898FNIEMOP_DEF(iemOp_retf_Iw)
13899{
13900 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
13901 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13903 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13904 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
13905}
13906
13907
13908/** Opcode 0xcb. */
13909FNIEMOP_DEF(iemOp_retf)
13910{
13911 IEMOP_MNEMONIC(retf, "retf");
13912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13913 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13914 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
13915}
13916
13917
13918/** Opcode 0xcc. */
13919FNIEMOP_DEF(iemOp_int_3)
13920{
13921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13922 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
13923}
13924
13925
13926/** Opcode 0xcd. */
13927FNIEMOP_DEF(iemOp_int_Ib)
13928{
13929 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
13930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13931 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
13932}
13933
13934
13935/** Opcode 0xce. */
13936FNIEMOP_DEF(iemOp_into)
13937{
13938 IEMOP_MNEMONIC(into, "into");
13939 IEMOP_HLP_NO_64BIT();
13940
13941 IEM_MC_BEGIN(2, 0);
13942 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
13943 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
13944 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
13945 IEM_MC_END();
13946 return VINF_SUCCESS;
13947}
13948
13949
13950/** Opcode 0xcf. */
13951FNIEMOP_DEF(iemOp_iret)
13952{
13953 IEMOP_MNEMONIC(iret, "iret");
13954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13955 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
13956}
13957
13958
13959/** Opcode 0xd0. */
13960FNIEMOP_DEF(iemOp_Grp2_Eb_1)
13961{
13962 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13963 PCIEMOPSHIFTSIZES pImpl;
13964 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13965 {
13966 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
13967 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
13968 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
13969 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
13970 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
13971 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
13972 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
13973 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13974 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13975 }
13976 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13977
13978 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13979 {
13980 /* register */
13981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13982 IEM_MC_BEGIN(3, 0);
13983 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13984 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13985 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13986 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13987 IEM_MC_REF_EFLAGS(pEFlags);
13988 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13989 IEM_MC_ADVANCE_RIP();
13990 IEM_MC_END();
13991 }
13992 else
13993 {
13994 /* memory */
13995 IEM_MC_BEGIN(3, 2);
13996 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13997 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13998 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14000
14001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14003 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14004 IEM_MC_FETCH_EFLAGS(EFlags);
14005 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
14006
14007 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
14008 IEM_MC_COMMIT_EFLAGS(EFlags);
14009 IEM_MC_ADVANCE_RIP();
14010 IEM_MC_END();
14011 }
14012 return VINF_SUCCESS;
14013}
14014
14015
14016
14017/** Opcode 0xd1. */
14018FNIEMOP_DEF(iemOp_Grp2_Ev_1)
14019{
14020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14021 PCIEMOPSHIFTSIZES pImpl;
14022 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14023 {
14024 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
14025 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
14026 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
14027 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
14028 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
14029 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
14030 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
14031 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14032 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
14033 }
14034 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
14035
14036 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14037 {
14038 /* register */
14039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14040 switch (pVCpu->iem.s.enmEffOpSize)
14041 {
14042 case IEMMODE_16BIT:
14043 IEM_MC_BEGIN(3, 0);
14044 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14045 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
14046 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14047 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14048 IEM_MC_REF_EFLAGS(pEFlags);
14049 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14050 IEM_MC_ADVANCE_RIP();
14051 IEM_MC_END();
14052 return VINF_SUCCESS;
14053
14054 case IEMMODE_32BIT:
14055 IEM_MC_BEGIN(3, 0);
14056 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14057 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
14058 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14059 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14060 IEM_MC_REF_EFLAGS(pEFlags);
14061 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14062 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
14063 IEM_MC_ADVANCE_RIP();
14064 IEM_MC_END();
14065 return VINF_SUCCESS;
14066
14067 case IEMMODE_64BIT:
14068 IEM_MC_BEGIN(3, 0);
14069 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14070 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
14071 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14072 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14073 IEM_MC_REF_EFLAGS(pEFlags);
14074 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14075 IEM_MC_ADVANCE_RIP();
14076 IEM_MC_END();
14077 return VINF_SUCCESS;
14078
14079 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14080 }
14081 }
14082 else
14083 {
14084 /* memory */
14085 switch (pVCpu->iem.s.enmEffOpSize)
14086 {
14087 case IEMMODE_16BIT:
14088 IEM_MC_BEGIN(3, 2);
14089 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14090 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
14091 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14093
14094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14096 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14097 IEM_MC_FETCH_EFLAGS(EFlags);
14098 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14099
14100 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
14101 IEM_MC_COMMIT_EFLAGS(EFlags);
14102 IEM_MC_ADVANCE_RIP();
14103 IEM_MC_END();
14104 return VINF_SUCCESS;
14105
14106 case IEMMODE_32BIT:
14107 IEM_MC_BEGIN(3, 2);
14108 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14109 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
14110 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14111 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14112
14113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14115 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14116 IEM_MC_FETCH_EFLAGS(EFlags);
14117 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14118
14119 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
14120 IEM_MC_COMMIT_EFLAGS(EFlags);
14121 IEM_MC_ADVANCE_RIP();
14122 IEM_MC_END();
14123 return VINF_SUCCESS;
14124
14125 case IEMMODE_64BIT:
14126 IEM_MC_BEGIN(3, 2);
14127 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14128 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
14129 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14131
14132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14134 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14135 IEM_MC_FETCH_EFLAGS(EFlags);
14136 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14137
14138 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
14139 IEM_MC_COMMIT_EFLAGS(EFlags);
14140 IEM_MC_ADVANCE_RIP();
14141 IEM_MC_END();
14142 return VINF_SUCCESS;
14143
14144 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14145 }
14146 }
14147}
14148
14149
14150/** Opcode 0xd2. */
14151FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
14152{
14153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14154 PCIEMOPSHIFTSIZES pImpl;
14155 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14156 {
14157 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
14158 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
14159 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
14160 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
14161 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
14162 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
14163 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
14164 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14165 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
14166 }
14167 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
14168
14169 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14170 {
14171 /* register */
14172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14173 IEM_MC_BEGIN(3, 0);
14174 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14175 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14176 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14177 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14178 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14179 IEM_MC_REF_EFLAGS(pEFlags);
14180 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
14181 IEM_MC_ADVANCE_RIP();
14182 IEM_MC_END();
14183 }
14184 else
14185 {
14186 /* memory */
14187 IEM_MC_BEGIN(3, 2);
14188 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14189 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14192
14193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14195 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14196 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14197 IEM_MC_FETCH_EFLAGS(EFlags);
14198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
14199
14200 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
14201 IEM_MC_COMMIT_EFLAGS(EFlags);
14202 IEM_MC_ADVANCE_RIP();
14203 IEM_MC_END();
14204 }
14205 return VINF_SUCCESS;
14206}
14207
14208
14209/** Opcode 0xd3. */
14210FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
14211{
14212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14213 PCIEMOPSHIFTSIZES pImpl;
14214 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14215 {
14216 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
14217 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
14218 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
14219 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
14220 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
14221 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
14222 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
14223 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14224 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
14225 }
14226 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
14227
14228 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14229 {
14230 /* register */
14231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14232 switch (pVCpu->iem.s.enmEffOpSize)
14233 {
14234 case IEMMODE_16BIT:
14235 IEM_MC_BEGIN(3, 0);
14236 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14237 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14238 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14239 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14240 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14241 IEM_MC_REF_EFLAGS(pEFlags);
14242 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14243 IEM_MC_ADVANCE_RIP();
14244 IEM_MC_END();
14245 return VINF_SUCCESS;
14246
14247 case IEMMODE_32BIT:
14248 IEM_MC_BEGIN(3, 0);
14249 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14250 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14251 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14252 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14253 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14254 IEM_MC_REF_EFLAGS(pEFlags);
14255 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14256 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
14257 IEM_MC_ADVANCE_RIP();
14258 IEM_MC_END();
14259 return VINF_SUCCESS;
14260
14261 case IEMMODE_64BIT:
14262 IEM_MC_BEGIN(3, 0);
14263 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14264 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14265 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14266 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14267 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14268 IEM_MC_REF_EFLAGS(pEFlags);
14269 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14270 IEM_MC_ADVANCE_RIP();
14271 IEM_MC_END();
14272 return VINF_SUCCESS;
14273
14274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14275 }
14276 }
14277 else
14278 {
14279 /* memory */
14280 switch (pVCpu->iem.s.enmEffOpSize)
14281 {
14282 case IEMMODE_16BIT:
14283 IEM_MC_BEGIN(3, 2);
14284 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14285 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14286 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14288
14289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14291 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14292 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14293 IEM_MC_FETCH_EFLAGS(EFlags);
14294 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14295
14296 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
14297 IEM_MC_COMMIT_EFLAGS(EFlags);
14298 IEM_MC_ADVANCE_RIP();
14299 IEM_MC_END();
14300 return VINF_SUCCESS;
14301
14302 case IEMMODE_32BIT:
14303 IEM_MC_BEGIN(3, 2);
14304 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14305 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14306 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14308
14309 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14311 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14312 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14313 IEM_MC_FETCH_EFLAGS(EFlags);
14314 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14315
14316 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
14317 IEM_MC_COMMIT_EFLAGS(EFlags);
14318 IEM_MC_ADVANCE_RIP();
14319 IEM_MC_END();
14320 return VINF_SUCCESS;
14321
14322 case IEMMODE_64BIT:
14323 IEM_MC_BEGIN(3, 2);
14324 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14325 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14326 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14328
14329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14331 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14332 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14333 IEM_MC_FETCH_EFLAGS(EFlags);
14334 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14335
14336 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
14337 IEM_MC_COMMIT_EFLAGS(EFlags);
14338 IEM_MC_ADVANCE_RIP();
14339 IEM_MC_END();
14340 return VINF_SUCCESS;
14341
14342 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14343 }
14344 }
14345}
14346
14347/** Opcode 0xd4. */
14348FNIEMOP_DEF(iemOp_aam_Ib)
14349{
14350 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
14351 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14353 IEMOP_HLP_NO_64BIT();
14354 if (!bImm)
14355 return IEMOP_RAISE_DIVIDE_ERROR();
14356 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
14357}
14358
14359
14360/** Opcode 0xd5. */
14361FNIEMOP_DEF(iemOp_aad_Ib)
14362{
14363 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
14364 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14366 IEMOP_HLP_NO_64BIT();
14367 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
14368}
14369
14370
14371/** Opcode 0xd6. */
14372FNIEMOP_DEF(iemOp_salc)
14373{
14374 IEMOP_MNEMONIC(salc, "salc");
14375 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
14376 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14378 IEMOP_HLP_NO_64BIT();
14379
14380 IEM_MC_BEGIN(0, 0);
14381 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
14382 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
14383 } IEM_MC_ELSE() {
14384 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
14385 } IEM_MC_ENDIF();
14386 IEM_MC_ADVANCE_RIP();
14387 IEM_MC_END();
14388 return VINF_SUCCESS;
14389}
14390
14391
14392/** Opcode 0xd7. */
14393FNIEMOP_DEF(iemOp_xlat)
14394{
14395 IEMOP_MNEMONIC(xlat, "xlat");
14396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14397 switch (pVCpu->iem.s.enmEffAddrMode)
14398 {
14399 case IEMMODE_16BIT:
14400 IEM_MC_BEGIN(2, 0);
14401 IEM_MC_LOCAL(uint8_t, u8Tmp);
14402 IEM_MC_LOCAL(uint16_t, u16Addr);
14403 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
14404 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
14405 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
14406 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14407 IEM_MC_ADVANCE_RIP();
14408 IEM_MC_END();
14409 return VINF_SUCCESS;
14410
14411 case IEMMODE_32BIT:
14412 IEM_MC_BEGIN(2, 0);
14413 IEM_MC_LOCAL(uint8_t, u8Tmp);
14414 IEM_MC_LOCAL(uint32_t, u32Addr);
14415 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
14416 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
14417 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
14418 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14419 IEM_MC_ADVANCE_RIP();
14420 IEM_MC_END();
14421 return VINF_SUCCESS;
14422
14423 case IEMMODE_64BIT:
14424 IEM_MC_BEGIN(2, 0);
14425 IEM_MC_LOCAL(uint8_t, u8Tmp);
14426 IEM_MC_LOCAL(uint64_t, u64Addr);
14427 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
14428 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
14429 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
14430 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14431 IEM_MC_ADVANCE_RIP();
14432 IEM_MC_END();
14433 return VINF_SUCCESS;
14434
14435 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14436 }
14437}
14438
14439
14440/**
14441 * Common worker for FPU instructions working on ST0 and STn, and storing the
14442 * result in ST0.
14443 *
14444 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14445 */
14446FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14447{
14448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14449
14450 IEM_MC_BEGIN(3, 1);
14451 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14452 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14453 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14454 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14455
14456 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14457 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14458 IEM_MC_PREPARE_FPU_USAGE();
14459 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14460 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14461 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14462 IEM_MC_ELSE()
14463 IEM_MC_FPU_STACK_UNDERFLOW(0);
14464 IEM_MC_ENDIF();
14465 IEM_MC_ADVANCE_RIP();
14466
14467 IEM_MC_END();
14468 return VINF_SUCCESS;
14469}
14470
14471
14472/**
14473 * Common worker for FPU instructions working on ST0 and STn, and only affecting
14474 * flags.
14475 *
14476 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14477 */
14478FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14479{
14480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14481
14482 IEM_MC_BEGIN(3, 1);
14483 IEM_MC_LOCAL(uint16_t, u16Fsw);
14484 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14485 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14486 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14487
14488 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14489 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14490 IEM_MC_PREPARE_FPU_USAGE();
14491 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14492 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14493 IEM_MC_UPDATE_FSW(u16Fsw);
14494 IEM_MC_ELSE()
14495 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14496 IEM_MC_ENDIF();
14497 IEM_MC_ADVANCE_RIP();
14498
14499 IEM_MC_END();
14500 return VINF_SUCCESS;
14501}
14502
14503
14504/**
14505 * Common worker for FPU instructions working on ST0 and STn, only affecting
14506 * flags, and popping when done.
14507 *
14508 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14509 */
14510FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14511{
14512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14513
14514 IEM_MC_BEGIN(3, 1);
14515 IEM_MC_LOCAL(uint16_t, u16Fsw);
14516 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14517 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14518 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14519
14520 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14521 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14522 IEM_MC_PREPARE_FPU_USAGE();
14523 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14524 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14525 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14526 IEM_MC_ELSE()
14527 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
14528 IEM_MC_ENDIF();
14529 IEM_MC_ADVANCE_RIP();
14530
14531 IEM_MC_END();
14532 return VINF_SUCCESS;
14533}
14534
14535
14536/** Opcode 0xd8 11/0. */
14537FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
14538{
14539 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
14540 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
14541}
14542
14543
14544/** Opcode 0xd8 11/1. */
14545FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
14546{
14547 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
14548 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
14549}
14550
14551
14552/** Opcode 0xd8 11/2. */
14553FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
14554{
14555 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
14556 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
14557}
14558
14559
14560/** Opcode 0xd8 11/3. */
14561FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
14562{
14563 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
14564 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
14565}
14566
14567
14568/** Opcode 0xd8 11/4. */
14569FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
14570{
14571 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
14572 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
14573}
14574
14575
14576/** Opcode 0xd8 11/5. */
14577FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
14578{
14579 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
14580 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
14581}
14582
14583
14584/** Opcode 0xd8 11/6. */
14585FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
14586{
14587 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
14588 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
14589}
14590
14591
14592/** Opcode 0xd8 11/7. */
14593FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
14594{
14595 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
14596 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
14597}
14598
14599
14600/**
14601 * Common worker for FPU instructions working on ST0 and an m32r, and storing
14602 * the result in ST0.
14603 *
14604 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14605 */
14606FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
14607{
14608 IEM_MC_BEGIN(3, 3);
14609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14610 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14611 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14612 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14613 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14614 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14615
14616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14618
14619 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14620 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14621 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14622
14623 IEM_MC_PREPARE_FPU_USAGE();
14624 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14625 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
14626 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14627 IEM_MC_ELSE()
14628 IEM_MC_FPU_STACK_UNDERFLOW(0);
14629 IEM_MC_ENDIF();
14630 IEM_MC_ADVANCE_RIP();
14631
14632 IEM_MC_END();
14633 return VINF_SUCCESS;
14634}
14635
14636
14637/** Opcode 0xd8 !11/0. */
14638FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
14639{
14640 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
14641 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
14642}
14643
14644
14645/** Opcode 0xd8 !11/1. */
14646FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
14647{
14648 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
14649 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
14650}
14651
14652
14653/** Opcode 0xd8 !11/2. */
14654FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
14655{
14656 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
14657
14658 IEM_MC_BEGIN(3, 3);
14659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14660 IEM_MC_LOCAL(uint16_t, u16Fsw);
14661 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14662 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14663 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14664 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14665
14666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14668
14669 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14670 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14671 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14672
14673 IEM_MC_PREPARE_FPU_USAGE();
14674 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14675 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14676 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14677 IEM_MC_ELSE()
14678 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14679 IEM_MC_ENDIF();
14680 IEM_MC_ADVANCE_RIP();
14681
14682 IEM_MC_END();
14683 return VINF_SUCCESS;
14684}
14685
14686
14687/** Opcode 0xd8 !11/3. */
14688FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
14689{
14690 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
14691
14692 IEM_MC_BEGIN(3, 3);
14693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14694 IEM_MC_LOCAL(uint16_t, u16Fsw);
14695 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14696 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14697 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14698 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14699
14700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14702
14703 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14704 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14705 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14706
14707 IEM_MC_PREPARE_FPU_USAGE();
14708 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14709 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14710 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14711 IEM_MC_ELSE()
14712 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14713 IEM_MC_ENDIF();
14714 IEM_MC_ADVANCE_RIP();
14715
14716 IEM_MC_END();
14717 return VINF_SUCCESS;
14718}
14719
14720
14721/** Opcode 0xd8 !11/4. */
14722FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
14723{
14724 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
14725 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
14726}
14727
14728
14729/** Opcode 0xd8 !11/5. */
14730FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
14731{
14732 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
14733 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
14734}
14735
14736
14737/** Opcode 0xd8 !11/6. */
14738FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
14739{
14740 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
14741 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
14742}
14743
14744
14745/** Opcode 0xd8 !11/7. */
14746FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
14747{
14748 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
14749 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
14750}
14751
14752
14753/** Opcode 0xd8. */
14754FNIEMOP_DEF(iemOp_EscF0)
14755{
14756 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14757 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
14758
14759 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14760 {
14761 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14762 {
14763 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
14764 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
14765 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
14766 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
14767 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
14768 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
14769 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
14770 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
14771 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14772 }
14773 }
14774 else
14775 {
14776 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14777 {
14778 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
14779 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
14780 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
14781 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
14782 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
14783 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
14784 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
14785 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
14786 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14787 }
14788 }
14789}
14790
14791
14792/** Opcode 0xd9 /0 mem32real
14793 * @sa iemOp_fld_m64r */
14794FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
14795{
14796 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
14797
14798 IEM_MC_BEGIN(2, 3);
14799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14800 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14801 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
14802 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14803 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
14804
14805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14807
14808 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14809 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14810 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14811
14812 IEM_MC_PREPARE_FPU_USAGE();
14813 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14814 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
14815 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14816 IEM_MC_ELSE()
14817 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14818 IEM_MC_ENDIF();
14819 IEM_MC_ADVANCE_RIP();
14820
14821 IEM_MC_END();
14822 return VINF_SUCCESS;
14823}
14824
14825
14826/** Opcode 0xd9 !11/2 mem32real */
14827FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
14828{
14829 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
14830 IEM_MC_BEGIN(3, 2);
14831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14832 IEM_MC_LOCAL(uint16_t, u16Fsw);
14833 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14834 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14835 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14836
14837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14839 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14840 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14841
14842 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14843 IEM_MC_PREPARE_FPU_USAGE();
14844 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14845 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14846 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14847 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14848 IEM_MC_ELSE()
14849 IEM_MC_IF_FCW_IM()
14850 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14851 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14852 IEM_MC_ENDIF();
14853 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14854 IEM_MC_ENDIF();
14855 IEM_MC_ADVANCE_RIP();
14856
14857 IEM_MC_END();
14858 return VINF_SUCCESS;
14859}
14860
14861
14862/** Opcode 0xd9 !11/3 */
14863FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
14864{
14865 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
14866 IEM_MC_BEGIN(3, 2);
14867 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14868 IEM_MC_LOCAL(uint16_t, u16Fsw);
14869 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14870 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14871 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14872
14873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14875 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14876 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14877
14878 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14879 IEM_MC_PREPARE_FPU_USAGE();
14880 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14881 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14882 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14883 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14884 IEM_MC_ELSE()
14885 IEM_MC_IF_FCW_IM()
14886 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14887 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14888 IEM_MC_ENDIF();
14889 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14890 IEM_MC_ENDIF();
14891 IEM_MC_ADVANCE_RIP();
14892
14893 IEM_MC_END();
14894 return VINF_SUCCESS;
14895}
14896
14897
14898/** Opcode 0xd9 !11/4 */
14899FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
14900{
14901 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
14902 IEM_MC_BEGIN(3, 0);
14903 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14904 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14905 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
14906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14908 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14909 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14910 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14911 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
14912 IEM_MC_END();
14913 return VINF_SUCCESS;
14914}
14915
14916
14917/** Opcode 0xd9 !11/5 */
14918FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
14919{
14920 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
14921 IEM_MC_BEGIN(1, 1);
14922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14923 IEM_MC_ARG(uint16_t, u16Fsw, 0);
14924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14926 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14927 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14928 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14929 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
14930 IEM_MC_END();
14931 return VINF_SUCCESS;
14932}
14933
14934
14935/** Opcode 0xd9 !11/6 */
14936FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
14937{
14938 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
14939 IEM_MC_BEGIN(3, 0);
14940 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14941 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14942 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
14943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14945 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14946 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14947 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14948 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
14949 IEM_MC_END();
14950 return VINF_SUCCESS;
14951}
14952
14953
14954/** Opcode 0xd9 !11/7 */
14955FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
14956{
14957 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
14958 IEM_MC_BEGIN(2, 0);
14959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14960 IEM_MC_LOCAL(uint16_t, u16Fcw);
14961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14963 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14964 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14965 IEM_MC_FETCH_FCW(u16Fcw);
14966 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
14967 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14968 IEM_MC_END();
14969 return VINF_SUCCESS;
14970}
14971
14972
14973/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
14974FNIEMOP_DEF(iemOp_fnop)
14975{
14976 IEMOP_MNEMONIC(fnop, "fnop");
14977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14978
14979 IEM_MC_BEGIN(0, 0);
14980 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14981 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14982 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14983 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
14984 * intel optimizations. Investigate. */
14985 IEM_MC_UPDATE_FPU_OPCODE_IP();
14986 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14987 IEM_MC_END();
14988 return VINF_SUCCESS;
14989}
14990
14991
14992/** Opcode 0xd9 11/0 stN */
14993FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
14994{
14995 IEMOP_MNEMONIC(fld_stN, "fld stN");
14996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14997
14998 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14999 * indicates that it does. */
15000 IEM_MC_BEGIN(0, 2);
15001 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15002 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15003 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15004 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15005
15006 IEM_MC_PREPARE_FPU_USAGE();
15007 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
15008 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15009 IEM_MC_PUSH_FPU_RESULT(FpuRes);
15010 IEM_MC_ELSE()
15011 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
15012 IEM_MC_ENDIF();
15013
15014 IEM_MC_ADVANCE_RIP();
15015 IEM_MC_END();
15016
15017 return VINF_SUCCESS;
15018}
15019
15020
15021/** Opcode 0xd9 11/3 stN */
15022FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
15023{
15024 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
15025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15026
15027 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
15028 * indicates that it does. */
15029 IEM_MC_BEGIN(1, 3);
15030 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
15031 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
15032 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15033 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
15034 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15035 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15036
15037 IEM_MC_PREPARE_FPU_USAGE();
15038 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
15039 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
15040 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
15041 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15042 IEM_MC_ELSE()
15043 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
15044 IEM_MC_ENDIF();
15045
15046 IEM_MC_ADVANCE_RIP();
15047 IEM_MC_END();
15048
15049 return VINF_SUCCESS;
15050}
15051
15052
15053/** Opcode 0xd9 11/4, 0xdd 11/2. */
15054FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
15055{
15056 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
15057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15058
15059 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
15060 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
15061 if (!iDstReg)
15062 {
15063 IEM_MC_BEGIN(0, 1);
15064 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
15065 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15066 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15067
15068 IEM_MC_PREPARE_FPU_USAGE();
15069 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
15070 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
15071 IEM_MC_ELSE()
15072 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
15073 IEM_MC_ENDIF();
15074
15075 IEM_MC_ADVANCE_RIP();
15076 IEM_MC_END();
15077 }
15078 else
15079 {
15080 IEM_MC_BEGIN(0, 2);
15081 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15082 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15083 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15084 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15085
15086 IEM_MC_PREPARE_FPU_USAGE();
15087 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15088 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15089 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
15090 IEM_MC_ELSE()
15091 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
15092 IEM_MC_ENDIF();
15093
15094 IEM_MC_ADVANCE_RIP();
15095 IEM_MC_END();
15096 }
15097 return VINF_SUCCESS;
15098}
15099
15100
15101/**
15102 * Common worker for FPU instructions working on ST0 and replaces it with the
15103 * result, i.e. unary operators.
15104 *
15105 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15106 */
15107FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
15108{
15109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15110
15111 IEM_MC_BEGIN(2, 1);
15112 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15113 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15114 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
15115
15116 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15117 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15118 IEM_MC_PREPARE_FPU_USAGE();
15119 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15120 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
15121 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15122 IEM_MC_ELSE()
15123 IEM_MC_FPU_STACK_UNDERFLOW(0);
15124 IEM_MC_ENDIF();
15125 IEM_MC_ADVANCE_RIP();
15126
15127 IEM_MC_END();
15128 return VINF_SUCCESS;
15129}
15130
15131
15132/** Opcode 0xd9 0xe0. */
15133FNIEMOP_DEF(iemOp_fchs)
15134{
15135 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
15136 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
15137}
15138
15139
15140/** Opcode 0xd9 0xe1. */
15141FNIEMOP_DEF(iemOp_fabs)
15142{
15143 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
15144 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
15145}
15146
15147
15148/**
15149 * Common worker for FPU instructions working on ST0 and only returns FSW.
15150 *
15151 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15152 */
15153FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
15154{
15155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15156
15157 IEM_MC_BEGIN(2, 1);
15158 IEM_MC_LOCAL(uint16_t, u16Fsw);
15159 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15160 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
15161
15162 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15163 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15164 IEM_MC_PREPARE_FPU_USAGE();
15165 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15166 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
15167 IEM_MC_UPDATE_FSW(u16Fsw);
15168 IEM_MC_ELSE()
15169 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
15170 IEM_MC_ENDIF();
15171 IEM_MC_ADVANCE_RIP();
15172
15173 IEM_MC_END();
15174 return VINF_SUCCESS;
15175}
15176
15177
15178/** Opcode 0xd9 0xe4. */
15179FNIEMOP_DEF(iemOp_ftst)
15180{
15181 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
15182 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
15183}
15184
15185
15186/** Opcode 0xd9 0xe5. */
15187FNIEMOP_DEF(iemOp_fxam)
15188{
15189 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
15190 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
15191}
15192
15193
15194/**
15195 * Common worker for FPU instructions pushing a constant onto the FPU stack.
15196 *
15197 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15198 */
15199FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
15200{
15201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15202
15203 IEM_MC_BEGIN(1, 1);
15204 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15205 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15206
15207 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15208 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15209 IEM_MC_PREPARE_FPU_USAGE();
15210 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15211 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
15212 IEM_MC_PUSH_FPU_RESULT(FpuRes);
15213 IEM_MC_ELSE()
15214 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
15215 IEM_MC_ENDIF();
15216 IEM_MC_ADVANCE_RIP();
15217
15218 IEM_MC_END();
15219 return VINF_SUCCESS;
15220}
15221
15222
15223/** Opcode 0xd9 0xe8. */
15224FNIEMOP_DEF(iemOp_fld1)
15225{
15226 IEMOP_MNEMONIC(fld1, "fld1");
15227 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
15228}
15229
15230
15231/** Opcode 0xd9 0xe9. */
15232FNIEMOP_DEF(iemOp_fldl2t)
15233{
15234 IEMOP_MNEMONIC(fldl2t, "fldl2t");
15235 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
15236}
15237
15238
15239/** Opcode 0xd9 0xea. */
15240FNIEMOP_DEF(iemOp_fldl2e)
15241{
15242 IEMOP_MNEMONIC(fldl2e, "fldl2e");
15243 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
15244}
15245
15246/** Opcode 0xd9 0xeb. */
15247FNIEMOP_DEF(iemOp_fldpi)
15248{
15249 IEMOP_MNEMONIC(fldpi, "fldpi");
15250 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
15251}
15252
15253
15254/** Opcode 0xd9 0xec. */
15255FNIEMOP_DEF(iemOp_fldlg2)
15256{
15257 IEMOP_MNEMONIC(fldlg2, "fldlg2");
15258 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
15259}
15260
15261/** Opcode 0xd9 0xed. */
15262FNIEMOP_DEF(iemOp_fldln2)
15263{
15264 IEMOP_MNEMONIC(fldln2, "fldln2");
15265 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
15266}
15267
15268
15269/** Opcode 0xd9 0xee. */
15270FNIEMOP_DEF(iemOp_fldz)
15271{
15272 IEMOP_MNEMONIC(fldz, "fldz");
15273 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
15274}
15275
15276
15277/** Opcode 0xd9 0xf0. */
15278FNIEMOP_DEF(iemOp_f2xm1)
15279{
15280 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
15281 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
15282}
15283
15284
15285/**
15286 * Common worker for FPU instructions working on STn and ST0, storing the result
15287 * in STn, and popping the stack unless IE, DE or ZE was raised.
15288 *
15289 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15290 */
15291FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15292{
15293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15294
15295 IEM_MC_BEGIN(3, 1);
15296 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15297 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15298 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15299 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15300
15301 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15302 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15303
15304 IEM_MC_PREPARE_FPU_USAGE();
15305 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15306 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15307 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
15308 IEM_MC_ELSE()
15309 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
15310 IEM_MC_ENDIF();
15311 IEM_MC_ADVANCE_RIP();
15312
15313 IEM_MC_END();
15314 return VINF_SUCCESS;
15315}
15316
15317
15318/** Opcode 0xd9 0xf1. */
15319FNIEMOP_DEF(iemOp_fyl2x)
15320{
15321 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
15322 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
15323}
15324
15325
15326/**
15327 * Common worker for FPU instructions working on ST0 and having two outputs, one
15328 * replacing ST0 and one pushed onto the stack.
15329 *
15330 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15331 */
15332FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
15333{
15334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15335
15336 IEM_MC_BEGIN(2, 1);
15337 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
15338 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
15339 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
15340
15341 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15342 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15343 IEM_MC_PREPARE_FPU_USAGE();
15344 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15345 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
15346 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
15347 IEM_MC_ELSE()
15348 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
15349 IEM_MC_ENDIF();
15350 IEM_MC_ADVANCE_RIP();
15351
15352 IEM_MC_END();
15353 return VINF_SUCCESS;
15354}
15355
15356
15357/** Opcode 0xd9 0xf2. */
15358FNIEMOP_DEF(iemOp_fptan)
15359{
15360 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
15361 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
15362}
15363
15364
15365/** Opcode 0xd9 0xf3. */
15366FNIEMOP_DEF(iemOp_fpatan)
15367{
15368 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
15369 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
15370}
15371
15372
15373/** Opcode 0xd9 0xf4. */
15374FNIEMOP_DEF(iemOp_fxtract)
15375{
15376 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
15377 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
15378}
15379
15380
15381/** Opcode 0xd9 0xf5. */
15382FNIEMOP_DEF(iemOp_fprem1)
15383{
15384 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
15385 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
15386}
15387
15388
15389/** Opcode 0xd9 0xf6. */
15390FNIEMOP_DEF(iemOp_fdecstp)
15391{
15392 IEMOP_MNEMONIC(fdecstp, "fdecstp");
15393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15394 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15395 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15396 * FINCSTP and FDECSTP. */
15397
15398 IEM_MC_BEGIN(0,0);
15399
15400 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15401 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15402
15403 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15404 IEM_MC_FPU_STACK_DEC_TOP();
15405 IEM_MC_UPDATE_FSW_CONST(0);
15406
15407 IEM_MC_ADVANCE_RIP();
15408 IEM_MC_END();
15409 return VINF_SUCCESS;
15410}
15411
15412
15413/** Opcode 0xd9 0xf7. */
15414FNIEMOP_DEF(iemOp_fincstp)
15415{
15416 IEMOP_MNEMONIC(fincstp, "fincstp");
15417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15418 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15419 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15420 * FINCSTP and FDECSTP. */
15421
15422 IEM_MC_BEGIN(0,0);
15423
15424 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15425 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15426
15427 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15428 IEM_MC_FPU_STACK_INC_TOP();
15429 IEM_MC_UPDATE_FSW_CONST(0);
15430
15431 IEM_MC_ADVANCE_RIP();
15432 IEM_MC_END();
15433 return VINF_SUCCESS;
15434}
15435
15436
15437/** Opcode 0xd9 0xf8. */
15438FNIEMOP_DEF(iemOp_fprem)
15439{
15440 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
15441 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
15442}
15443
15444
15445/** Opcode 0xd9 0xf9. */
15446FNIEMOP_DEF(iemOp_fyl2xp1)
15447{
15448 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
15449 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
15450}
15451
15452
15453/** Opcode 0xd9 0xfa. */
15454FNIEMOP_DEF(iemOp_fsqrt)
15455{
15456 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
15457 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
15458}
15459
15460
15461/** Opcode 0xd9 0xfb. */
15462FNIEMOP_DEF(iemOp_fsincos)
15463{
15464 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
15465 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
15466}
15467
15468
15469/** Opcode 0xd9 0xfc. */
15470FNIEMOP_DEF(iemOp_frndint)
15471{
15472 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
15473 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
15474}
15475
15476
15477/** Opcode 0xd9 0xfd. */
15478FNIEMOP_DEF(iemOp_fscale)
15479{
15480 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
15481 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
15482}
15483
15484
15485/** Opcode 0xd9 0xfe. */
15486FNIEMOP_DEF(iemOp_fsin)
15487{
15488 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
15489 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
15490}
15491
15492
15493/** Opcode 0xd9 0xff. */
15494FNIEMOP_DEF(iemOp_fcos)
15495{
15496 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
15497 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
15498}
15499
15500
15501/** Used by iemOp_EscF1. */
15502IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
15503{
15504 /* 0xe0 */ iemOp_fchs,
15505 /* 0xe1 */ iemOp_fabs,
15506 /* 0xe2 */ iemOp_Invalid,
15507 /* 0xe3 */ iemOp_Invalid,
15508 /* 0xe4 */ iemOp_ftst,
15509 /* 0xe5 */ iemOp_fxam,
15510 /* 0xe6 */ iemOp_Invalid,
15511 /* 0xe7 */ iemOp_Invalid,
15512 /* 0xe8 */ iemOp_fld1,
15513 /* 0xe9 */ iemOp_fldl2t,
15514 /* 0xea */ iemOp_fldl2e,
15515 /* 0xeb */ iemOp_fldpi,
15516 /* 0xec */ iemOp_fldlg2,
15517 /* 0xed */ iemOp_fldln2,
15518 /* 0xee */ iemOp_fldz,
15519 /* 0xef */ iemOp_Invalid,
15520 /* 0xf0 */ iemOp_f2xm1,
15521 /* 0xf1 */ iemOp_fyl2x,
15522 /* 0xf2 */ iemOp_fptan,
15523 /* 0xf3 */ iemOp_fpatan,
15524 /* 0xf4 */ iemOp_fxtract,
15525 /* 0xf5 */ iemOp_fprem1,
15526 /* 0xf6 */ iemOp_fdecstp,
15527 /* 0xf7 */ iemOp_fincstp,
15528 /* 0xf8 */ iemOp_fprem,
15529 /* 0xf9 */ iemOp_fyl2xp1,
15530 /* 0xfa */ iemOp_fsqrt,
15531 /* 0xfb */ iemOp_fsincos,
15532 /* 0xfc */ iemOp_frndint,
15533 /* 0xfd */ iemOp_fscale,
15534 /* 0xfe */ iemOp_fsin,
15535 /* 0xff */ iemOp_fcos
15536};
15537
15538
15539/** Opcode 0xd9. */
15540FNIEMOP_DEF(iemOp_EscF1)
15541{
15542 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15543 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
15544
15545 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15546 {
15547 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15548 {
15549 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
15550 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
15551 case 2:
15552 if (bRm == 0xd0)
15553 return FNIEMOP_CALL(iemOp_fnop);
15554 return IEMOP_RAISE_INVALID_OPCODE();
15555 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
15556 case 4:
15557 case 5:
15558 case 6:
15559 case 7:
15560 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
15561 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
15562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15563 }
15564 }
15565 else
15566 {
15567 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15568 {
15569 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
15570 case 1: return IEMOP_RAISE_INVALID_OPCODE();
15571 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
15572 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
15573 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
15574 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
15575 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
15576 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
15577 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15578 }
15579 }
15580}
15581
15582
15583/** Opcode 0xda 11/0. */
15584FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
15585{
15586 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
15587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15588
15589 IEM_MC_BEGIN(0, 1);
15590 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15591
15592 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15593 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15594
15595 IEM_MC_PREPARE_FPU_USAGE();
15596 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15597 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
15598 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15599 IEM_MC_ENDIF();
15600 IEM_MC_UPDATE_FPU_OPCODE_IP();
15601 IEM_MC_ELSE()
15602 IEM_MC_FPU_STACK_UNDERFLOW(0);
15603 IEM_MC_ENDIF();
15604 IEM_MC_ADVANCE_RIP();
15605
15606 IEM_MC_END();
15607 return VINF_SUCCESS;
15608}
15609
15610
15611/** Opcode 0xda 11/1. */
15612FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
15613{
15614 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
15615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15616
15617 IEM_MC_BEGIN(0, 1);
15618 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15619
15620 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15621 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15622
15623 IEM_MC_PREPARE_FPU_USAGE();
15624 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15625 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
15626 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15627 IEM_MC_ENDIF();
15628 IEM_MC_UPDATE_FPU_OPCODE_IP();
15629 IEM_MC_ELSE()
15630 IEM_MC_FPU_STACK_UNDERFLOW(0);
15631 IEM_MC_ENDIF();
15632 IEM_MC_ADVANCE_RIP();
15633
15634 IEM_MC_END();
15635 return VINF_SUCCESS;
15636}
15637
15638
15639/** Opcode 0xda 11/2. */
15640FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
15641{
15642 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
15643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15644
15645 IEM_MC_BEGIN(0, 1);
15646 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15647
15648 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15649 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15650
15651 IEM_MC_PREPARE_FPU_USAGE();
15652 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15653 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15654 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15655 IEM_MC_ENDIF();
15656 IEM_MC_UPDATE_FPU_OPCODE_IP();
15657 IEM_MC_ELSE()
15658 IEM_MC_FPU_STACK_UNDERFLOW(0);
15659 IEM_MC_ENDIF();
15660 IEM_MC_ADVANCE_RIP();
15661
15662 IEM_MC_END();
15663 return VINF_SUCCESS;
15664}
15665
15666
15667/** Opcode 0xda 11/3. */
15668FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
15669{
15670 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
15671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15672
15673 IEM_MC_BEGIN(0, 1);
15674 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15675
15676 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15677 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15678
15679 IEM_MC_PREPARE_FPU_USAGE();
15680 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15681 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
15682 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15683 IEM_MC_ENDIF();
15684 IEM_MC_UPDATE_FPU_OPCODE_IP();
15685 IEM_MC_ELSE()
15686 IEM_MC_FPU_STACK_UNDERFLOW(0);
15687 IEM_MC_ENDIF();
15688 IEM_MC_ADVANCE_RIP();
15689
15690 IEM_MC_END();
15691 return VINF_SUCCESS;
15692}
15693
15694
15695/**
15696 * Common worker for FPU instructions working on ST0 and STn, only affecting
15697 * flags, and popping twice when done.
15698 *
15699 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15700 */
15701FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
15702{
15703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15704
15705 IEM_MC_BEGIN(3, 1);
15706 IEM_MC_LOCAL(uint16_t, u16Fsw);
15707 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15708 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15709 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15710
15711 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15712 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15713
15714 IEM_MC_PREPARE_FPU_USAGE();
15715 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
15716 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
15717 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
15718 IEM_MC_ELSE()
15719 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
15720 IEM_MC_ENDIF();
15721 IEM_MC_ADVANCE_RIP();
15722
15723 IEM_MC_END();
15724 return VINF_SUCCESS;
15725}
15726
15727
15728/** Opcode 0xda 0xe9. */
15729FNIEMOP_DEF(iemOp_fucompp)
15730{
15731 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
15732 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
15733}
15734
15735
15736/**
15737 * Common worker for FPU instructions working on ST0 and an m32i, and storing
15738 * the result in ST0.
15739 *
15740 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15741 */
15742FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
15743{
15744 IEM_MC_BEGIN(3, 3);
15745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15746 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15747 IEM_MC_LOCAL(int32_t, i32Val2);
15748 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15749 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15750 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15751
15752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15754
15755 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15756 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15757 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15758
15759 IEM_MC_PREPARE_FPU_USAGE();
15760 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15761 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
15762 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15763 IEM_MC_ELSE()
15764 IEM_MC_FPU_STACK_UNDERFLOW(0);
15765 IEM_MC_ENDIF();
15766 IEM_MC_ADVANCE_RIP();
15767
15768 IEM_MC_END();
15769 return VINF_SUCCESS;
15770}
15771
15772
15773/** Opcode 0xda !11/0. */
15774FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
15775{
15776 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
15777 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
15778}
15779
15780
15781/** Opcode 0xda !11/1. */
15782FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
15783{
15784 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
15785 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
15786}
15787
15788
15789/** Opcode 0xda !11/2. */
15790FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
15791{
15792 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
15793
15794 IEM_MC_BEGIN(3, 3);
15795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15796 IEM_MC_LOCAL(uint16_t, u16Fsw);
15797 IEM_MC_LOCAL(int32_t, i32Val2);
15798 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15799 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15800 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15801
15802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15804
15805 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15806 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15807 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15808
15809 IEM_MC_PREPARE_FPU_USAGE();
15810 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15811 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15812 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15813 IEM_MC_ELSE()
15814 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15815 IEM_MC_ENDIF();
15816 IEM_MC_ADVANCE_RIP();
15817
15818 IEM_MC_END();
15819 return VINF_SUCCESS;
15820}
15821
15822
15823/** Opcode 0xda !11/3. */
15824FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
15825{
15826 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
15827
15828 IEM_MC_BEGIN(3, 3);
15829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15830 IEM_MC_LOCAL(uint16_t, u16Fsw);
15831 IEM_MC_LOCAL(int32_t, i32Val2);
15832 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15833 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15834 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15835
15836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15838
15839 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15840 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15841 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15842
15843 IEM_MC_PREPARE_FPU_USAGE();
15844 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15845 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15846 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15847 IEM_MC_ELSE()
15848 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15849 IEM_MC_ENDIF();
15850 IEM_MC_ADVANCE_RIP();
15851
15852 IEM_MC_END();
15853 return VINF_SUCCESS;
15854}
15855
15856
15857/** Opcode 0xda !11/4. */
15858FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
15859{
15860 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
15861 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
15862}
15863
15864
15865/** Opcode 0xda !11/5. */
15866FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
15867{
15868 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
15869 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
15870}
15871
15872
15873/** Opcode 0xda !11/6. */
15874FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
15875{
15876 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
15877 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
15878}
15879
15880
15881/** Opcode 0xda !11/7. */
15882FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
15883{
15884 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
15885 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
15886}
15887
15888
15889/** Opcode 0xda. */
15890FNIEMOP_DEF(iemOp_EscF2)
15891{
15892 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15893 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
15894 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15895 {
15896 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15897 {
15898 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
15899 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
15900 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
15901 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
15902 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15903 case 5:
15904 if (bRm == 0xe9)
15905 return FNIEMOP_CALL(iemOp_fucompp);
15906 return IEMOP_RAISE_INVALID_OPCODE();
15907 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15908 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15910 }
15911 }
15912 else
15913 {
15914 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15915 {
15916 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
15917 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
15918 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
15919 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
15920 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
15921 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
15922 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
15923 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
15924 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15925 }
15926 }
15927}
15928
15929
15930/** Opcode 0xdb !11/0. */
15931FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
15932{
15933 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
15934
15935 IEM_MC_BEGIN(2, 3);
15936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15937 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15938 IEM_MC_LOCAL(int32_t, i32Val);
15939 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15940 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
15941
15942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15944
15945 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15946 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15947 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15948
15949 IEM_MC_PREPARE_FPU_USAGE();
15950 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15951 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
15952 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15953 IEM_MC_ELSE()
15954 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15955 IEM_MC_ENDIF();
15956 IEM_MC_ADVANCE_RIP();
15957
15958 IEM_MC_END();
15959 return VINF_SUCCESS;
15960}
15961
15962
15963/** Opcode 0xdb !11/1. */
15964FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
15965{
15966 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
15967 IEM_MC_BEGIN(3, 2);
15968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15969 IEM_MC_LOCAL(uint16_t, u16Fsw);
15970 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15971 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15972 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15973
15974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15976 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15977 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15978
15979 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15980 IEM_MC_PREPARE_FPU_USAGE();
15981 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15982 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15983 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15984 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15985 IEM_MC_ELSE()
15986 IEM_MC_IF_FCW_IM()
15987 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15988 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15989 IEM_MC_ENDIF();
15990 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15991 IEM_MC_ENDIF();
15992 IEM_MC_ADVANCE_RIP();
15993
15994 IEM_MC_END();
15995 return VINF_SUCCESS;
15996}
15997
15998
15999/** Opcode 0xdb !11/2. */
16000FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
16001{
16002 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
16003 IEM_MC_BEGIN(3, 2);
16004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16005 IEM_MC_LOCAL(uint16_t, u16Fsw);
16006 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16007 IEM_MC_ARG(int32_t *, pi32Dst, 1);
16008 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16009
16010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16012 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16013 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16014
16015 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16016 IEM_MC_PREPARE_FPU_USAGE();
16017 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16018 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
16019 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
16020 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16021 IEM_MC_ELSE()
16022 IEM_MC_IF_FCW_IM()
16023 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
16024 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
16025 IEM_MC_ENDIF();
16026 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16027 IEM_MC_ENDIF();
16028 IEM_MC_ADVANCE_RIP();
16029
16030 IEM_MC_END();
16031 return VINF_SUCCESS;
16032}
16033
16034
16035/** Opcode 0xdb !11/3. */
16036FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
16037{
16038 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
16039 IEM_MC_BEGIN(3, 2);
16040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16041 IEM_MC_LOCAL(uint16_t, u16Fsw);
16042 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16043 IEM_MC_ARG(int32_t *, pi32Dst, 1);
16044 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16045
16046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16048 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16049 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16050
16051 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16052 IEM_MC_PREPARE_FPU_USAGE();
16053 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16054 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
16055 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
16056 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16057 IEM_MC_ELSE()
16058 IEM_MC_IF_FCW_IM()
16059 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
16060 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
16061 IEM_MC_ENDIF();
16062 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16063 IEM_MC_ENDIF();
16064 IEM_MC_ADVANCE_RIP();
16065
16066 IEM_MC_END();
16067 return VINF_SUCCESS;
16068}
16069
16070
16071/** Opcode 0xdb !11/5. */
16072FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
16073{
16074 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
16075
16076 IEM_MC_BEGIN(2, 3);
16077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16078 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16079 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
16080 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16081 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
16082
16083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16085
16086 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16087 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16088 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16089
16090 IEM_MC_PREPARE_FPU_USAGE();
16091 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16092 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
16093 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16094 IEM_MC_ELSE()
16095 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16096 IEM_MC_ENDIF();
16097 IEM_MC_ADVANCE_RIP();
16098
16099 IEM_MC_END();
16100 return VINF_SUCCESS;
16101}
16102
16103
16104/** Opcode 0xdb !11/7. */
16105FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
16106{
16107 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
16108 IEM_MC_BEGIN(3, 2);
16109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16110 IEM_MC_LOCAL(uint16_t, u16Fsw);
16111 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16112 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
16113 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16114
16115 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16117 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16118 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16119
16120 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16121 IEM_MC_PREPARE_FPU_USAGE();
16122 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16123 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
16124 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
16125 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16126 IEM_MC_ELSE()
16127 IEM_MC_IF_FCW_IM()
16128 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
16129 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
16130 IEM_MC_ENDIF();
16131 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16132 IEM_MC_ENDIF();
16133 IEM_MC_ADVANCE_RIP();
16134
16135 IEM_MC_END();
16136 return VINF_SUCCESS;
16137}
16138
16139
16140/** Opcode 0xdb 11/0. */
16141FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
16142{
16143 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
16144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16145
16146 IEM_MC_BEGIN(0, 1);
16147 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16148
16149 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16150 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16151
16152 IEM_MC_PREPARE_FPU_USAGE();
16153 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16154 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
16155 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16156 IEM_MC_ENDIF();
16157 IEM_MC_UPDATE_FPU_OPCODE_IP();
16158 IEM_MC_ELSE()
16159 IEM_MC_FPU_STACK_UNDERFLOW(0);
16160 IEM_MC_ENDIF();
16161 IEM_MC_ADVANCE_RIP();
16162
16163 IEM_MC_END();
16164 return VINF_SUCCESS;
16165}
16166
16167
16168/** Opcode 0xdb 11/1. */
16169FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
16170{
16171 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
16172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16173
16174 IEM_MC_BEGIN(0, 1);
16175 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16176
16177 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16178 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16179
16180 IEM_MC_PREPARE_FPU_USAGE();
16181 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16182 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
16183 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16184 IEM_MC_ENDIF();
16185 IEM_MC_UPDATE_FPU_OPCODE_IP();
16186 IEM_MC_ELSE()
16187 IEM_MC_FPU_STACK_UNDERFLOW(0);
16188 IEM_MC_ENDIF();
16189 IEM_MC_ADVANCE_RIP();
16190
16191 IEM_MC_END();
16192 return VINF_SUCCESS;
16193}
16194
16195
16196/** Opcode 0xdb 11/2. */
16197FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
16198{
16199 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
16200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16201
16202 IEM_MC_BEGIN(0, 1);
16203 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16204
16205 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16206 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16207
16208 IEM_MC_PREPARE_FPU_USAGE();
16209 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16210 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
16211 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16212 IEM_MC_ENDIF();
16213 IEM_MC_UPDATE_FPU_OPCODE_IP();
16214 IEM_MC_ELSE()
16215 IEM_MC_FPU_STACK_UNDERFLOW(0);
16216 IEM_MC_ENDIF();
16217 IEM_MC_ADVANCE_RIP();
16218
16219 IEM_MC_END();
16220 return VINF_SUCCESS;
16221}
16222
16223
16224/** Opcode 0xdb 11/3. */
16225FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
16226{
16227 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
16228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16229
16230 IEM_MC_BEGIN(0, 1);
16231 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16232
16233 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16234 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16235
16236 IEM_MC_PREPARE_FPU_USAGE();
16237 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16238 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
16239 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16240 IEM_MC_ENDIF();
16241 IEM_MC_UPDATE_FPU_OPCODE_IP();
16242 IEM_MC_ELSE()
16243 IEM_MC_FPU_STACK_UNDERFLOW(0);
16244 IEM_MC_ENDIF();
16245 IEM_MC_ADVANCE_RIP();
16246
16247 IEM_MC_END();
16248 return VINF_SUCCESS;
16249}
16250
16251
16252/** Opcode 0xdb 0xe0. */
16253FNIEMOP_DEF(iemOp_fneni)
16254{
16255 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
16256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16257 IEM_MC_BEGIN(0,0);
16258 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16259 IEM_MC_ADVANCE_RIP();
16260 IEM_MC_END();
16261 return VINF_SUCCESS;
16262}
16263
16264
16265/** Opcode 0xdb 0xe1. */
16266FNIEMOP_DEF(iemOp_fndisi)
16267{
16268 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
16269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16270 IEM_MC_BEGIN(0,0);
16271 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16272 IEM_MC_ADVANCE_RIP();
16273 IEM_MC_END();
16274 return VINF_SUCCESS;
16275}
16276
16277
16278/** Opcode 0xdb 0xe2. */
16279FNIEMOP_DEF(iemOp_fnclex)
16280{
16281 IEMOP_MNEMONIC(fnclex, "fnclex");
16282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16283
16284 IEM_MC_BEGIN(0,0);
16285 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16286 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16287 IEM_MC_CLEAR_FSW_EX();
16288 IEM_MC_ADVANCE_RIP();
16289 IEM_MC_END();
16290 return VINF_SUCCESS;
16291}
16292
16293
16294/** Opcode 0xdb 0xe3. */
16295FNIEMOP_DEF(iemOp_fninit)
16296{
16297 IEMOP_MNEMONIC(fninit, "fninit");
16298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16299 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
16300}
16301
16302
16303/** Opcode 0xdb 0xe4. */
16304FNIEMOP_DEF(iemOp_fnsetpm)
16305{
16306 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
16307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16308 IEM_MC_BEGIN(0,0);
16309 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16310 IEM_MC_ADVANCE_RIP();
16311 IEM_MC_END();
16312 return VINF_SUCCESS;
16313}
16314
16315
16316/** Opcode 0xdb 0xe5. */
16317FNIEMOP_DEF(iemOp_frstpm)
16318{
16319 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
16320#if 0 /* #UDs on newer CPUs */
16321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16322 IEM_MC_BEGIN(0,0);
16323 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16324 IEM_MC_ADVANCE_RIP();
16325 IEM_MC_END();
16326 return VINF_SUCCESS;
16327#else
16328 return IEMOP_RAISE_INVALID_OPCODE();
16329#endif
16330}
16331
16332
16333/** Opcode 0xdb 11/5. */
16334FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
16335{
16336 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
16337 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
16338}
16339
16340
16341/** Opcode 0xdb 11/6. */
16342FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
16343{
16344 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
16345 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
16346}
16347
16348
16349/** Opcode 0xdb. */
16350FNIEMOP_DEF(iemOp_EscF3)
16351{
16352 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16353 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
16354 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16355 {
16356 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16357 {
16358 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
16359 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
16360 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
16361 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
16362 case 4:
16363 switch (bRm)
16364 {
16365 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
16366 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
16367 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
16368 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
16369 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
16370 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
16371 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
16372 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
16373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16374 }
16375 break;
16376 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
16377 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
16378 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16379 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16380 }
16381 }
16382 else
16383 {
16384 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16385 {
16386 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
16387 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
16388 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
16389 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
16390 case 4: return IEMOP_RAISE_INVALID_OPCODE();
16391 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
16392 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16393 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
16394 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16395 }
16396 }
16397}
16398
16399
16400/**
16401 * Common worker for FPU instructions working on STn and ST0, and storing the
16402 * result in STn unless IE, DE or ZE was raised.
16403 *
16404 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16405 */
16406FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
16407{
16408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16409
16410 IEM_MC_BEGIN(3, 1);
16411 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16412 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16413 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16414 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
16415
16416 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16417 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16418
16419 IEM_MC_PREPARE_FPU_USAGE();
16420 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
16421 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
16422 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16423 IEM_MC_ELSE()
16424 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16425 IEM_MC_ENDIF();
16426 IEM_MC_ADVANCE_RIP();
16427
16428 IEM_MC_END();
16429 return VINF_SUCCESS;
16430}
16431
16432
16433/** Opcode 0xdc 11/0. */
16434FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
16435{
16436 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
16437 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
16438}
16439
16440
16441/** Opcode 0xdc 11/1. */
16442FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
16443{
16444 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
16445 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
16446}
16447
16448
16449/** Opcode 0xdc 11/4. */
16450FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
16451{
16452 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
16453 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
16454}
16455
16456
16457/** Opcode 0xdc 11/5. */
16458FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
16459{
16460 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
16461 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
16462}
16463
16464
16465/** Opcode 0xdc 11/6. */
16466FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
16467{
16468 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
16469 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
16470}
16471
16472
16473/** Opcode 0xdc 11/7. */
16474FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
16475{
16476 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
16477 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
16478}
16479
16480
16481/**
16482 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
16483 * memory operand, and storing the result in ST0.
16484 *
16485 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16486 */
16487FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
16488{
16489 IEM_MC_BEGIN(3, 3);
16490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16491 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16492 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
16493 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16494 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
16495 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
16496
16497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16499 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16500 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16501
16502 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16503 IEM_MC_PREPARE_FPU_USAGE();
16504 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
16505 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
16506 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16507 IEM_MC_ELSE()
16508 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16509 IEM_MC_ENDIF();
16510 IEM_MC_ADVANCE_RIP();
16511
16512 IEM_MC_END();
16513 return VINF_SUCCESS;
16514}
16515
16516
16517/** Opcode 0xdc !11/0. */
16518FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
16519{
16520 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
16521 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
16522}
16523
16524
16525/** Opcode 0xdc !11/1. */
16526FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
16527{
16528 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
16529 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
16530}
16531
16532
16533/** Opcode 0xdc !11/2. */
16534FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
16535{
16536 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
16537
16538 IEM_MC_BEGIN(3, 3);
16539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16540 IEM_MC_LOCAL(uint16_t, u16Fsw);
16541 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16542 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16543 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16544 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16545
16546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16548
16549 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16550 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16551 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16552
16553 IEM_MC_PREPARE_FPU_USAGE();
16554 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16555 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16556 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16557 IEM_MC_ELSE()
16558 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16559 IEM_MC_ENDIF();
16560 IEM_MC_ADVANCE_RIP();
16561
16562 IEM_MC_END();
16563 return VINF_SUCCESS;
16564}
16565
16566
16567/** Opcode 0xdc !11/3. */
16568FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
16569{
16570 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
16571
16572 IEM_MC_BEGIN(3, 3);
16573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16574 IEM_MC_LOCAL(uint16_t, u16Fsw);
16575 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16576 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16577 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16578 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16579
16580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16582
16583 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16584 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16585 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16586
16587 IEM_MC_PREPARE_FPU_USAGE();
16588 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16589 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16590 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16591 IEM_MC_ELSE()
16592 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16593 IEM_MC_ENDIF();
16594 IEM_MC_ADVANCE_RIP();
16595
16596 IEM_MC_END();
16597 return VINF_SUCCESS;
16598}
16599
16600
16601/** Opcode 0xdc !11/4. */
16602FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
16603{
16604 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
16605 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
16606}
16607
16608
16609/** Opcode 0xdc !11/5. */
16610FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
16611{
16612 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
16613 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
16614}
16615
16616
16617/** Opcode 0xdc !11/6. */
16618FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
16619{
16620 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
16621 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
16622}
16623
16624
16625/** Opcode 0xdc !11/7. */
16626FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
16627{
16628 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
16629 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
16630}
16631
16632
16633/** Opcode 0xdc. */
16634FNIEMOP_DEF(iemOp_EscF4)
16635{
16636 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16637 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
16638 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16639 {
16640 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16641 {
16642 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
16643 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
16644 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
16645 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
16646 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
16647 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
16648 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
16649 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
16650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16651 }
16652 }
16653 else
16654 {
16655 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16656 {
16657 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
16658 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
16659 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
16660 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
16661 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
16662 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
16663 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
16664 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
16665 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16666 }
16667 }
16668}
16669
16670
16671/** Opcode 0xdd !11/0.
16672 * @sa iemOp_fld_m32r */
16673FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
16674{
16675 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
16676
16677 IEM_MC_BEGIN(2, 3);
16678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16679 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16680 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
16681 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16682 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
16683
16684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16686 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16687 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16688
16689 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16690 IEM_MC_PREPARE_FPU_USAGE();
16691 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16692 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
16693 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16694 IEM_MC_ELSE()
16695 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16696 IEM_MC_ENDIF();
16697 IEM_MC_ADVANCE_RIP();
16698
16699 IEM_MC_END();
16700 return VINF_SUCCESS;
16701}
16702
16703
16704/** Opcode 0xdd !11/0. */
16705FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
16706{
16707 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
16708 IEM_MC_BEGIN(3, 2);
16709 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16710 IEM_MC_LOCAL(uint16_t, u16Fsw);
16711 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16712 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16713 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16714
16715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16717 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16718 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16719
16720 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16721 IEM_MC_PREPARE_FPU_USAGE();
16722 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16723 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16724 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16725 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16726 IEM_MC_ELSE()
16727 IEM_MC_IF_FCW_IM()
16728 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16729 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16730 IEM_MC_ENDIF();
16731 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16732 IEM_MC_ENDIF();
16733 IEM_MC_ADVANCE_RIP();
16734
16735 IEM_MC_END();
16736 return VINF_SUCCESS;
16737}
16738
16739
16740/** Opcode 0xdd !11/0. */
16741FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
16742{
16743 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
16744 IEM_MC_BEGIN(3, 2);
16745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16746 IEM_MC_LOCAL(uint16_t, u16Fsw);
16747 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16748 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16749 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16750
16751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16753 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16754 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16755
16756 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16757 IEM_MC_PREPARE_FPU_USAGE();
16758 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16759 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16760 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16761 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16762 IEM_MC_ELSE()
16763 IEM_MC_IF_FCW_IM()
16764 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16765 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16766 IEM_MC_ENDIF();
16767 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16768 IEM_MC_ENDIF();
16769 IEM_MC_ADVANCE_RIP();
16770
16771 IEM_MC_END();
16772 return VINF_SUCCESS;
16773}
16774
16775
16776
16777
16778/** Opcode 0xdd !11/0. */
16779FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
16780{
16781 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
16782 IEM_MC_BEGIN(3, 2);
16783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16784 IEM_MC_LOCAL(uint16_t, u16Fsw);
16785 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16786 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16787 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16788
16789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16791 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16792 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16793
16794 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16795 IEM_MC_PREPARE_FPU_USAGE();
16796 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16797 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16798 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16799 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16800 IEM_MC_ELSE()
16801 IEM_MC_IF_FCW_IM()
16802 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16803 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16804 IEM_MC_ENDIF();
16805 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16806 IEM_MC_ENDIF();
16807 IEM_MC_ADVANCE_RIP();
16808
16809 IEM_MC_END();
16810 return VINF_SUCCESS;
16811}
16812
16813
16814/** Opcode 0xdd !11/0. */
16815FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
16816{
16817 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
16818 IEM_MC_BEGIN(3, 0);
16819 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16820 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16821 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
16822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16824 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16825 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16826 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16827 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
16828 IEM_MC_END();
16829 return VINF_SUCCESS;
16830}
16831
16832
16833/** Opcode 0xdd !11/0. */
16834FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
16835{
16836 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
16837 IEM_MC_BEGIN(3, 0);
16838 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16839 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16840 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
16841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16843 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16844 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16845 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16846 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
16847 IEM_MC_END();
16848 return VINF_SUCCESS;
16849
16850}
16851
16852/** Opcode 0xdd !11/0. */
16853FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
16854{
16855 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
16856
16857 IEM_MC_BEGIN(0, 2);
16858 IEM_MC_LOCAL(uint16_t, u16Tmp);
16859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16860
16861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16863 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16864
16865 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16866 IEM_MC_FETCH_FSW(u16Tmp);
16867 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
16868 IEM_MC_ADVANCE_RIP();
16869
16870/** @todo Debug / drop a hint to the verifier that things may differ
16871 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
16872 * NT4SP1. (X86_FSW_PE) */
16873 IEM_MC_END();
16874 return VINF_SUCCESS;
16875}
16876
16877
16878/** Opcode 0xdd 11/0. */
16879FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
16880{
16881 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
16882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16883 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
16884 unmodified. */
16885
16886 IEM_MC_BEGIN(0, 0);
16887
16888 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16889 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16890
16891 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16892 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16893 IEM_MC_UPDATE_FPU_OPCODE_IP();
16894
16895 IEM_MC_ADVANCE_RIP();
16896 IEM_MC_END();
16897 return VINF_SUCCESS;
16898}
16899
16900
16901/** Opcode 0xdd 11/1. */
16902FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
16903{
16904 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
16905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16906
16907 IEM_MC_BEGIN(0, 2);
16908 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
16909 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16910 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16911 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16912
16913 IEM_MC_PREPARE_FPU_USAGE();
16914 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16915 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
16916 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16917 IEM_MC_ELSE()
16918 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16919 IEM_MC_ENDIF();
16920
16921 IEM_MC_ADVANCE_RIP();
16922 IEM_MC_END();
16923 return VINF_SUCCESS;
16924}
16925
16926
16927/** Opcode 0xdd 11/3. */
16928FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
16929{
16930 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
16931 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
16932}
16933
16934
16935/** Opcode 0xdd 11/4. */
16936FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
16937{
16938 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
16939 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
16940}
16941
16942
16943/** Opcode 0xdd. */
16944FNIEMOP_DEF(iemOp_EscF5)
16945{
16946 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16947 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
16948 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16949 {
16950 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16951 {
16952 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
16953 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
16954 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
16955 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
16956 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
16957 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
16958 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16959 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16960 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16961 }
16962 }
16963 else
16964 {
16965 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16966 {
16967 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
16968 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
16969 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
16970 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
16971 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
16972 case 5: return IEMOP_RAISE_INVALID_OPCODE();
16973 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
16974 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
16975 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16976 }
16977 }
16978}
16979
16980
16981/** Opcode 0xde 11/0. */
16982FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
16983{
16984 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
16985 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
16986}
16987
16988
16989/** Opcode 0xde 11/0. */
16990FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
16991{
16992 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
16993 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
16994}
16995
16996
16997/** Opcode 0xde 0xd9. */
16998FNIEMOP_DEF(iemOp_fcompp)
16999{
17000 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
17001 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
17002}
17003
17004
17005/** Opcode 0xde 11/4. */
17006FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
17007{
17008 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
17009 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
17010}
17011
17012
17013/** Opcode 0xde 11/5. */
17014FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
17015{
17016 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
17017 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
17018}
17019
17020
17021/** Opcode 0xde 11/6. */
17022FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
17023{
17024 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
17025 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
17026}
17027
17028
17029/** Opcode 0xde 11/7. */
17030FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
17031{
17032 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
17033 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
17034}
17035
17036
17037/**
17038 * Common worker for FPU instructions working on ST0 and an m16i, and storing
17039 * the result in ST0.
17040 *
17041 * @param pfnAImpl Pointer to the instruction implementation (assembly).
17042 */
17043FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
17044{
17045 IEM_MC_BEGIN(3, 3);
17046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17047 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17048 IEM_MC_LOCAL(int16_t, i16Val2);
17049 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17050 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
17051 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
17052
17053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17055
17056 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17057 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17058 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17059
17060 IEM_MC_PREPARE_FPU_USAGE();
17061 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
17062 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
17063 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
17064 IEM_MC_ELSE()
17065 IEM_MC_FPU_STACK_UNDERFLOW(0);
17066 IEM_MC_ENDIF();
17067 IEM_MC_ADVANCE_RIP();
17068
17069 IEM_MC_END();
17070 return VINF_SUCCESS;
17071}
17072
17073
17074/** Opcode 0xde !11/0. */
17075FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
17076{
17077 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
17078 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
17079}
17080
17081
17082/** Opcode 0xde !11/1. */
17083FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
17084{
17085 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
17086 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
17087}
17088
17089
17090/** Opcode 0xde !11/2. */
17091FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
17092{
17093 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
17094
17095 IEM_MC_BEGIN(3, 3);
17096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17097 IEM_MC_LOCAL(uint16_t, u16Fsw);
17098 IEM_MC_LOCAL(int16_t, i16Val2);
17099 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17100 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
17101 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
17102
17103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17105
17106 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17107 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17108 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17109
17110 IEM_MC_PREPARE_FPU_USAGE();
17111 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
17112 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
17113 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17114 IEM_MC_ELSE()
17115 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17116 IEM_MC_ENDIF();
17117 IEM_MC_ADVANCE_RIP();
17118
17119 IEM_MC_END();
17120 return VINF_SUCCESS;
17121}
17122
17123
17124/** Opcode 0xde !11/3. */
17125FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
17126{
17127 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
17128
17129 IEM_MC_BEGIN(3, 3);
17130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17131 IEM_MC_LOCAL(uint16_t, u16Fsw);
17132 IEM_MC_LOCAL(int16_t, i16Val2);
17133 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17134 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
17135 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
17136
17137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17139
17140 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17141 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17142 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17143
17144 IEM_MC_PREPARE_FPU_USAGE();
17145 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
17146 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
17147 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17148 IEM_MC_ELSE()
17149 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17150 IEM_MC_ENDIF();
17151 IEM_MC_ADVANCE_RIP();
17152
17153 IEM_MC_END();
17154 return VINF_SUCCESS;
17155}
17156
17157
17158/** Opcode 0xde !11/4. */
17159FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
17160{
17161 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
17162 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
17163}
17164
17165
17166/** Opcode 0xde !11/5. */
17167FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
17168{
17169 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
17170 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
17171}
17172
17173
17174/** Opcode 0xde !11/6. */
17175FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
17176{
17177 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
17178 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
17179}
17180
17181
17182/** Opcode 0xde !11/7. */
17183FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
17184{
17185 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
17186 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
17187}
17188
17189
17190/** Opcode 0xde. */
17191FNIEMOP_DEF(iemOp_EscF6)
17192{
17193 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17194 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
17195 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17196 {
17197 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17198 {
17199 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
17200 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
17201 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
17202 case 3: if (bRm == 0xd9)
17203 return FNIEMOP_CALL(iemOp_fcompp);
17204 return IEMOP_RAISE_INVALID_OPCODE();
17205 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
17206 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
17207 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
17208 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
17209 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17210 }
17211 }
17212 else
17213 {
17214 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17215 {
17216 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
17217 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
17218 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
17219 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
17220 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
17221 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
17222 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
17223 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
17224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17225 }
17226 }
17227}
17228
17229
17230/** Opcode 0xdf 11/0.
17231 * Undocument instruction, assumed to work like ffree + fincstp. */
17232FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
17233{
17234 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
17235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17236
17237 IEM_MC_BEGIN(0, 0);
17238
17239 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17240 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17241
17242 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
17243 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
17244 IEM_MC_FPU_STACK_INC_TOP();
17245 IEM_MC_UPDATE_FPU_OPCODE_IP();
17246
17247 IEM_MC_ADVANCE_RIP();
17248 IEM_MC_END();
17249 return VINF_SUCCESS;
17250}
17251
17252
17253/** Opcode 0xdf 0xe0. */
17254FNIEMOP_DEF(iemOp_fnstsw_ax)
17255{
17256 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
17257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17258
17259 IEM_MC_BEGIN(0, 1);
17260 IEM_MC_LOCAL(uint16_t, u16Tmp);
17261 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17262 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
17263 IEM_MC_FETCH_FSW(u16Tmp);
17264 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
17265 IEM_MC_ADVANCE_RIP();
17266 IEM_MC_END();
17267 return VINF_SUCCESS;
17268}
17269
17270
17271/** Opcode 0xdf 11/5. */
17272FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
17273{
17274 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
17275 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17276}
17277
17278
17279/** Opcode 0xdf 11/6. */
17280FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
17281{
17282 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
17283 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17284}
17285
17286
17287/** Opcode 0xdf !11/0. */
17288FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
17289{
17290 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
17291
17292 IEM_MC_BEGIN(2, 3);
17293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17294 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17295 IEM_MC_LOCAL(int16_t, i16Val);
17296 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17297 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
17298
17299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17301
17302 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17303 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17304 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17305
17306 IEM_MC_PREPARE_FPU_USAGE();
17307 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17308 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
17309 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17310 IEM_MC_ELSE()
17311 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17312 IEM_MC_ENDIF();
17313 IEM_MC_ADVANCE_RIP();
17314
17315 IEM_MC_END();
17316 return VINF_SUCCESS;
17317}
17318
17319
17320/** Opcode 0xdf !11/1. */
17321FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
17322{
17323 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
17324 IEM_MC_BEGIN(3, 2);
17325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17326 IEM_MC_LOCAL(uint16_t, u16Fsw);
17327 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17328 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17329 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17330
17331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17333 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17334 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17335
17336 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17337 IEM_MC_PREPARE_FPU_USAGE();
17338 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17339 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17340 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17341 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17342 IEM_MC_ELSE()
17343 IEM_MC_IF_FCW_IM()
17344 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17345 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17346 IEM_MC_ENDIF();
17347 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17348 IEM_MC_ENDIF();
17349 IEM_MC_ADVANCE_RIP();
17350
17351 IEM_MC_END();
17352 return VINF_SUCCESS;
17353}
17354
17355
17356/** Opcode 0xdf !11/2. */
17357FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
17358{
17359 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
17360 IEM_MC_BEGIN(3, 2);
17361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17362 IEM_MC_LOCAL(uint16_t, u16Fsw);
17363 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17364 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17365 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17366
17367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17369 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17370 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17371
17372 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17373 IEM_MC_PREPARE_FPU_USAGE();
17374 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17375 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17376 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17377 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17378 IEM_MC_ELSE()
17379 IEM_MC_IF_FCW_IM()
17380 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17381 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17382 IEM_MC_ENDIF();
17383 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17384 IEM_MC_ENDIF();
17385 IEM_MC_ADVANCE_RIP();
17386
17387 IEM_MC_END();
17388 return VINF_SUCCESS;
17389}
17390
17391
17392/** Opcode 0xdf !11/3. */
17393FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
17394{
17395 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
17396 IEM_MC_BEGIN(3, 2);
17397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17398 IEM_MC_LOCAL(uint16_t, u16Fsw);
17399 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17400 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17401 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17402
17403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17405 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17406 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17407
17408 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17409 IEM_MC_PREPARE_FPU_USAGE();
17410 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17411 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17412 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17413 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17414 IEM_MC_ELSE()
17415 IEM_MC_IF_FCW_IM()
17416 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17417 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17418 IEM_MC_ENDIF();
17419 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17420 IEM_MC_ENDIF();
17421 IEM_MC_ADVANCE_RIP();
17422
17423 IEM_MC_END();
17424 return VINF_SUCCESS;
17425}
17426
17427
17428/** Opcode 0xdf !11/4. */
17429FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
17430
17431
17432/** Opcode 0xdf !11/5. */
17433FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
17434{
17435 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
17436
17437 IEM_MC_BEGIN(2, 3);
17438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17439 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17440 IEM_MC_LOCAL(int64_t, i64Val);
17441 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17442 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
17443
17444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17446
17447 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17448 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17449 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17450
17451 IEM_MC_PREPARE_FPU_USAGE();
17452 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17453 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
17454 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17455 IEM_MC_ELSE()
17456 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17457 IEM_MC_ENDIF();
17458 IEM_MC_ADVANCE_RIP();
17459
17460 IEM_MC_END();
17461 return VINF_SUCCESS;
17462}
17463
17464
17465/** Opcode 0xdf !11/6. */
17466FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
17467
17468
17469/** Opcode 0xdf !11/7. */
17470FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
17471{
17472 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
17473 IEM_MC_BEGIN(3, 2);
17474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17475 IEM_MC_LOCAL(uint16_t, u16Fsw);
17476 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17477 IEM_MC_ARG(int64_t *, pi64Dst, 1);
17478 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17479
17480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17482 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17483 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17484
17485 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17486 IEM_MC_PREPARE_FPU_USAGE();
17487 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17488 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
17489 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
17490 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17491 IEM_MC_ELSE()
17492 IEM_MC_IF_FCW_IM()
17493 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
17494 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
17495 IEM_MC_ENDIF();
17496 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17497 IEM_MC_ENDIF();
17498 IEM_MC_ADVANCE_RIP();
17499
17500 IEM_MC_END();
17501 return VINF_SUCCESS;
17502}
17503
17504
17505/** Opcode 0xdf. */
17506FNIEMOP_DEF(iemOp_EscF7)
17507{
17508 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17509 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17510 {
17511 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17512 {
17513 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
17514 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
17515 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17516 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17517 case 4: if (bRm == 0xe0)
17518 return FNIEMOP_CALL(iemOp_fnstsw_ax);
17519 return IEMOP_RAISE_INVALID_OPCODE();
17520 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
17521 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
17522 case 7: return IEMOP_RAISE_INVALID_OPCODE();
17523 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17524 }
17525 }
17526 else
17527 {
17528 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17529 {
17530 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
17531 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
17532 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
17533 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
17534 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
17535 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
17536 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
17537 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
17538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17539 }
17540 }
17541}
17542
17543
17544/** Opcode 0xe0. */
17545FNIEMOP_DEF(iemOp_loopne_Jb)
17546{
17547 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
17548 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17550 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17551
17552 switch (pVCpu->iem.s.enmEffAddrMode)
17553 {
17554 case IEMMODE_16BIT:
17555 IEM_MC_BEGIN(0,0);
17556 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17557 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17558 IEM_MC_REL_JMP_S8(i8Imm);
17559 } IEM_MC_ELSE() {
17560 IEM_MC_ADVANCE_RIP();
17561 } IEM_MC_ENDIF();
17562 IEM_MC_END();
17563 return VINF_SUCCESS;
17564
17565 case IEMMODE_32BIT:
17566 IEM_MC_BEGIN(0,0);
17567 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17568 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17569 IEM_MC_REL_JMP_S8(i8Imm);
17570 } IEM_MC_ELSE() {
17571 IEM_MC_ADVANCE_RIP();
17572 } IEM_MC_ENDIF();
17573 IEM_MC_END();
17574 return VINF_SUCCESS;
17575
17576 case IEMMODE_64BIT:
17577 IEM_MC_BEGIN(0,0);
17578 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17579 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17580 IEM_MC_REL_JMP_S8(i8Imm);
17581 } IEM_MC_ELSE() {
17582 IEM_MC_ADVANCE_RIP();
17583 } IEM_MC_ENDIF();
17584 IEM_MC_END();
17585 return VINF_SUCCESS;
17586
17587 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17588 }
17589}
17590
17591
17592/** Opcode 0xe1. */
17593FNIEMOP_DEF(iemOp_loope_Jb)
17594{
17595 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
17596 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17598 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17599
17600 switch (pVCpu->iem.s.enmEffAddrMode)
17601 {
17602 case IEMMODE_16BIT:
17603 IEM_MC_BEGIN(0,0);
17604 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17605 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17606 IEM_MC_REL_JMP_S8(i8Imm);
17607 } IEM_MC_ELSE() {
17608 IEM_MC_ADVANCE_RIP();
17609 } IEM_MC_ENDIF();
17610 IEM_MC_END();
17611 return VINF_SUCCESS;
17612
17613 case IEMMODE_32BIT:
17614 IEM_MC_BEGIN(0,0);
17615 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17616 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17617 IEM_MC_REL_JMP_S8(i8Imm);
17618 } IEM_MC_ELSE() {
17619 IEM_MC_ADVANCE_RIP();
17620 } IEM_MC_ENDIF();
17621 IEM_MC_END();
17622 return VINF_SUCCESS;
17623
17624 case IEMMODE_64BIT:
17625 IEM_MC_BEGIN(0,0);
17626 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17627 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17628 IEM_MC_REL_JMP_S8(i8Imm);
17629 } IEM_MC_ELSE() {
17630 IEM_MC_ADVANCE_RIP();
17631 } IEM_MC_ENDIF();
17632 IEM_MC_END();
17633 return VINF_SUCCESS;
17634
17635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17636 }
17637}
17638
17639
17640/** Opcode 0xe2. */
17641FNIEMOP_DEF(iemOp_loop_Jb)
17642{
17643 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
17644 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17646 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17647
17648 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
17649 * using the 32-bit operand size override. How can that be restarted? See
17650 * weird pseudo code in intel manual. */
17651 switch (pVCpu->iem.s.enmEffAddrMode)
17652 {
17653 case IEMMODE_16BIT:
17654 IEM_MC_BEGIN(0,0);
17655 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17656 {
17657 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17658 IEM_MC_IF_CX_IS_NZ() {
17659 IEM_MC_REL_JMP_S8(i8Imm);
17660 } IEM_MC_ELSE() {
17661 IEM_MC_ADVANCE_RIP();
17662 } IEM_MC_ENDIF();
17663 }
17664 else
17665 {
17666 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
17667 IEM_MC_ADVANCE_RIP();
17668 }
17669 IEM_MC_END();
17670 return VINF_SUCCESS;
17671
17672 case IEMMODE_32BIT:
17673 IEM_MC_BEGIN(0,0);
17674 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17675 {
17676 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17677 IEM_MC_IF_ECX_IS_NZ() {
17678 IEM_MC_REL_JMP_S8(i8Imm);
17679 } IEM_MC_ELSE() {
17680 IEM_MC_ADVANCE_RIP();
17681 } IEM_MC_ENDIF();
17682 }
17683 else
17684 {
17685 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
17686 IEM_MC_ADVANCE_RIP();
17687 }
17688 IEM_MC_END();
17689 return VINF_SUCCESS;
17690
17691 case IEMMODE_64BIT:
17692 IEM_MC_BEGIN(0,0);
17693 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17694 {
17695 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17696 IEM_MC_IF_RCX_IS_NZ() {
17697 IEM_MC_REL_JMP_S8(i8Imm);
17698 } IEM_MC_ELSE() {
17699 IEM_MC_ADVANCE_RIP();
17700 } IEM_MC_ENDIF();
17701 }
17702 else
17703 {
17704 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
17705 IEM_MC_ADVANCE_RIP();
17706 }
17707 IEM_MC_END();
17708 return VINF_SUCCESS;
17709
17710 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17711 }
17712}
17713
17714
17715/** Opcode 0xe3. */
17716FNIEMOP_DEF(iemOp_jecxz_Jb)
17717{
17718 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
17719 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17721 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17722
17723 switch (pVCpu->iem.s.enmEffAddrMode)
17724 {
17725 case IEMMODE_16BIT:
17726 IEM_MC_BEGIN(0,0);
17727 IEM_MC_IF_CX_IS_NZ() {
17728 IEM_MC_ADVANCE_RIP();
17729 } IEM_MC_ELSE() {
17730 IEM_MC_REL_JMP_S8(i8Imm);
17731 } IEM_MC_ENDIF();
17732 IEM_MC_END();
17733 return VINF_SUCCESS;
17734
17735 case IEMMODE_32BIT:
17736 IEM_MC_BEGIN(0,0);
17737 IEM_MC_IF_ECX_IS_NZ() {
17738 IEM_MC_ADVANCE_RIP();
17739 } IEM_MC_ELSE() {
17740 IEM_MC_REL_JMP_S8(i8Imm);
17741 } IEM_MC_ENDIF();
17742 IEM_MC_END();
17743 return VINF_SUCCESS;
17744
17745 case IEMMODE_64BIT:
17746 IEM_MC_BEGIN(0,0);
17747 IEM_MC_IF_RCX_IS_NZ() {
17748 IEM_MC_ADVANCE_RIP();
17749 } IEM_MC_ELSE() {
17750 IEM_MC_REL_JMP_S8(i8Imm);
17751 } IEM_MC_ENDIF();
17752 IEM_MC_END();
17753 return VINF_SUCCESS;
17754
17755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17756 }
17757}
17758
17759
17760/** Opcode 0xe4 */
17761FNIEMOP_DEF(iemOp_in_AL_Ib)
17762{
17763 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
17764 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17766 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
17767}
17768
17769
17770/** Opcode 0xe5 */
17771FNIEMOP_DEF(iemOp_in_eAX_Ib)
17772{
17773 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
17774 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17776 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17777}
17778
17779
17780/** Opcode 0xe6 */
17781FNIEMOP_DEF(iemOp_out_Ib_AL)
17782{
17783 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
17784 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17786 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
17787}
17788
17789
17790/** Opcode 0xe7 */
17791FNIEMOP_DEF(iemOp_out_Ib_eAX)
17792{
17793 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
17794 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17796 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17797}
17798
17799
17800/** Opcode 0xe8. */
17801FNIEMOP_DEF(iemOp_call_Jv)
17802{
17803 IEMOP_MNEMONIC(call_Jv, "call Jv");
17804 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17805 switch (pVCpu->iem.s.enmEffOpSize)
17806 {
17807 case IEMMODE_16BIT:
17808 {
17809 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17810 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
17811 }
17812
17813 case IEMMODE_32BIT:
17814 {
17815 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17816 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
17817 }
17818
17819 case IEMMODE_64BIT:
17820 {
17821 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17822 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
17823 }
17824
17825 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17826 }
17827}
17828
17829
17830/** Opcode 0xe9. */
17831FNIEMOP_DEF(iemOp_jmp_Jv)
17832{
17833 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
17834 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17835 switch (pVCpu->iem.s.enmEffOpSize)
17836 {
17837 case IEMMODE_16BIT:
17838 {
17839 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
17840 IEM_MC_BEGIN(0, 0);
17841 IEM_MC_REL_JMP_S16(i16Imm);
17842 IEM_MC_END();
17843 return VINF_SUCCESS;
17844 }
17845
17846 case IEMMODE_64BIT:
17847 case IEMMODE_32BIT:
17848 {
17849 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
17850 IEM_MC_BEGIN(0, 0);
17851 IEM_MC_REL_JMP_S32(i32Imm);
17852 IEM_MC_END();
17853 return VINF_SUCCESS;
17854 }
17855
17856 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17857 }
17858}
17859
17860
17861/** Opcode 0xea. */
17862FNIEMOP_DEF(iemOp_jmp_Ap)
17863{
17864 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
17865 IEMOP_HLP_NO_64BIT();
17866
17867 /* Decode the far pointer address and pass it on to the far call C implementation. */
17868 uint32_t offSeg;
17869 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
17870 IEM_OPCODE_GET_NEXT_U32(&offSeg);
17871 else
17872 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
17873 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
17874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17875 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
17876}
17877
17878
17879/** Opcode 0xeb. */
17880FNIEMOP_DEF(iemOp_jmp_Jb)
17881{
17882 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
17883 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17885 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17886
17887 IEM_MC_BEGIN(0, 0);
17888 IEM_MC_REL_JMP_S8(i8Imm);
17889 IEM_MC_END();
17890 return VINF_SUCCESS;
17891}
17892
17893
17894/** Opcode 0xec */
17895FNIEMOP_DEF(iemOp_in_AL_DX)
17896{
17897 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
17898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17899 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
17900}
17901
17902
17903/** Opcode 0xed */
17904FNIEMOP_DEF(iemOp_eAX_DX)
17905{
17906 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
17907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17908 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17909}
17910
17911
17912/** Opcode 0xee */
17913FNIEMOP_DEF(iemOp_out_DX_AL)
17914{
17915 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
17916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17917 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
17918}
17919
17920
17921/** Opcode 0xef */
17922FNIEMOP_DEF(iemOp_out_DX_eAX)
17923{
17924 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
17925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17926 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17927}
17928
17929
17930/** Opcode 0xf0. */
17931FNIEMOP_DEF(iemOp_lock)
17932{
17933 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
17934 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
17935
17936 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17937 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17938}
17939
17940
17941/** Opcode 0xf1. */
17942FNIEMOP_DEF(iemOp_int_1)
17943{
17944 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
17945 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
17946 /** @todo testcase! */
17947 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
17948}
17949
17950
17951/** Opcode 0xf2. */
17952FNIEMOP_DEF(iemOp_repne)
17953{
17954 /* This overrides any previous REPE prefix. */
17955 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
17956 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
17957 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
17958
17959 /* For the 4 entry opcode tables, REPNZ overrides any previous
17960 REPZ and operand size prefixes. */
17961 pVCpu->iem.s.idxPrefix = 3;
17962
17963 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17964 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17965}
17966
17967
17968/** Opcode 0xf3. */
17969FNIEMOP_DEF(iemOp_repe)
17970{
17971 /* This overrides any previous REPNE prefix. */
17972 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
17973 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
17974 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
17975
17976 /* For the 4 entry opcode tables, REPNZ overrides any previous
17977 REPNZ and operand size prefixes. */
17978 pVCpu->iem.s.idxPrefix = 2;
17979
17980 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17981 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17982}
17983
17984
17985/** Opcode 0xf4. */
17986FNIEMOP_DEF(iemOp_hlt)
17987{
17988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17989 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
17990}
17991
17992
17993/** Opcode 0xf5. */
17994FNIEMOP_DEF(iemOp_cmc)
17995{
17996 IEMOP_MNEMONIC(cmc, "cmc");
17997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17998 IEM_MC_BEGIN(0, 0);
17999 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
18000 IEM_MC_ADVANCE_RIP();
18001 IEM_MC_END();
18002 return VINF_SUCCESS;
18003}
18004
18005
18006/**
18007 * Common implementation of 'inc/dec/not/neg Eb'.
18008 *
18009 * @param bRm The RM byte.
18010 * @param pImpl The instruction implementation.
18011 */
18012FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
18013{
18014 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18015 {
18016 /* register access */
18017 IEM_MC_BEGIN(2, 0);
18018 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
18019 IEM_MC_ARG(uint32_t *, pEFlags, 1);
18020 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18021 IEM_MC_REF_EFLAGS(pEFlags);
18022 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
18023 IEM_MC_ADVANCE_RIP();
18024 IEM_MC_END();
18025 }
18026 else
18027 {
18028 /* memory access. */
18029 IEM_MC_BEGIN(2, 2);
18030 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
18031 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
18032 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18033
18034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18035 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18036 IEM_MC_FETCH_EFLAGS(EFlags);
18037 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
18038 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
18039 else
18040 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
18041
18042 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
18043 IEM_MC_COMMIT_EFLAGS(EFlags);
18044 IEM_MC_ADVANCE_RIP();
18045 IEM_MC_END();
18046 }
18047 return VINF_SUCCESS;
18048}
18049
18050
18051/**
18052 * Common implementation of 'inc/dec/not/neg Ev'.
18053 *
18054 * @param bRm The RM byte.
18055 * @param pImpl The instruction implementation.
18056 */
18057FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
18058{
18059 /* Registers are handled by a common worker. */
18060 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18061 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18062
18063 /* Memory we do here. */
18064 switch (pVCpu->iem.s.enmEffOpSize)
18065 {
18066 case IEMMODE_16BIT:
18067 IEM_MC_BEGIN(2, 2);
18068 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
18069 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
18070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18071
18072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18073 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18074 IEM_MC_FETCH_EFLAGS(EFlags);
18075 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
18076 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
18077 else
18078 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
18079
18080 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
18081 IEM_MC_COMMIT_EFLAGS(EFlags);
18082 IEM_MC_ADVANCE_RIP();
18083 IEM_MC_END();
18084 return VINF_SUCCESS;
18085
18086 case IEMMODE_32BIT:
18087 IEM_MC_BEGIN(2, 2);
18088 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18089 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
18090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18091
18092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18093 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18094 IEM_MC_FETCH_EFLAGS(EFlags);
18095 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
18096 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
18097 else
18098 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
18099
18100 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
18101 IEM_MC_COMMIT_EFLAGS(EFlags);
18102 IEM_MC_ADVANCE_RIP();
18103 IEM_MC_END();
18104 return VINF_SUCCESS;
18105
18106 case IEMMODE_64BIT:
18107 IEM_MC_BEGIN(2, 2);
18108 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18109 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
18110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18111
18112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18113 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18114 IEM_MC_FETCH_EFLAGS(EFlags);
18115 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
18116 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
18117 else
18118 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
18119
18120 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
18121 IEM_MC_COMMIT_EFLAGS(EFlags);
18122 IEM_MC_ADVANCE_RIP();
18123 IEM_MC_END();
18124 return VINF_SUCCESS;
18125
18126 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18127 }
18128}
18129
18130
18131/** Opcode 0xf6 /0. */
18132FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
18133{
18134 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
18135 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
18136
18137 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18138 {
18139 /* register access */
18140 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
18141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18142
18143 IEM_MC_BEGIN(3, 0);
18144 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
18145 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
18146 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18147 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18148 IEM_MC_REF_EFLAGS(pEFlags);
18149 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
18150 IEM_MC_ADVANCE_RIP();
18151 IEM_MC_END();
18152 }
18153 else
18154 {
18155 /* memory access. */
18156 IEM_MC_BEGIN(3, 2);
18157 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
18158 IEM_MC_ARG(uint8_t, u8Src, 1);
18159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18161
18162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
18163 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
18164 IEM_MC_ASSIGN(u8Src, u8Imm);
18165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18166 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18167 IEM_MC_FETCH_EFLAGS(EFlags);
18168 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
18169
18170 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
18171 IEM_MC_COMMIT_EFLAGS(EFlags);
18172 IEM_MC_ADVANCE_RIP();
18173 IEM_MC_END();
18174 }
18175 return VINF_SUCCESS;
18176}
18177
18178
18179/** Opcode 0xf7 /0. */
18180FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
18181{
18182 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
18183 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
18184
18185 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18186 {
18187 /* register access */
18188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18189 switch (pVCpu->iem.s.enmEffOpSize)
18190 {
18191 case IEMMODE_16BIT:
18192 {
18193 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
18194 IEM_MC_BEGIN(3, 0);
18195 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
18196 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
18197 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18198 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18199 IEM_MC_REF_EFLAGS(pEFlags);
18200 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
18201 IEM_MC_ADVANCE_RIP();
18202 IEM_MC_END();
18203 return VINF_SUCCESS;
18204 }
18205
18206 case IEMMODE_32BIT:
18207 {
18208 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
18209 IEM_MC_BEGIN(3, 0);
18210 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18211 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
18212 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18213 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18214 IEM_MC_REF_EFLAGS(pEFlags);
18215 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
18216 /* No clearing the high dword here - test doesn't write back the result. */
18217 IEM_MC_ADVANCE_RIP();
18218 IEM_MC_END();
18219 return VINF_SUCCESS;
18220 }
18221
18222 case IEMMODE_64BIT:
18223 {
18224 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18225 IEM_MC_BEGIN(3, 0);
18226 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18227 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
18228 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18229 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18230 IEM_MC_REF_EFLAGS(pEFlags);
18231 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18232 IEM_MC_ADVANCE_RIP();
18233 IEM_MC_END();
18234 return VINF_SUCCESS;
18235 }
18236
18237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18238 }
18239 }
18240 else
18241 {
18242 /* memory access. */
18243 switch (pVCpu->iem.s.enmEffOpSize)
18244 {
18245 case IEMMODE_16BIT:
18246 {
18247 IEM_MC_BEGIN(3, 2);
18248 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
18249 IEM_MC_ARG(uint16_t, u16Src, 1);
18250 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18252
18253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
18254 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
18255 IEM_MC_ASSIGN(u16Src, u16Imm);
18256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18257 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18258 IEM_MC_FETCH_EFLAGS(EFlags);
18259 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
18260
18261 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
18262 IEM_MC_COMMIT_EFLAGS(EFlags);
18263 IEM_MC_ADVANCE_RIP();
18264 IEM_MC_END();
18265 return VINF_SUCCESS;
18266 }
18267
18268 case IEMMODE_32BIT:
18269 {
18270 IEM_MC_BEGIN(3, 2);
18271 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18272 IEM_MC_ARG(uint32_t, u32Src, 1);
18273 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18275
18276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18277 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
18278 IEM_MC_ASSIGN(u32Src, u32Imm);
18279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18280 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18281 IEM_MC_FETCH_EFLAGS(EFlags);
18282 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
18283
18284 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
18285 IEM_MC_COMMIT_EFLAGS(EFlags);
18286 IEM_MC_ADVANCE_RIP();
18287 IEM_MC_END();
18288 return VINF_SUCCESS;
18289 }
18290
18291 case IEMMODE_64BIT:
18292 {
18293 IEM_MC_BEGIN(3, 2);
18294 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18295 IEM_MC_ARG(uint64_t, u64Src, 1);
18296 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18298
18299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18300 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18301 IEM_MC_ASSIGN(u64Src, u64Imm);
18302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18303 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18304 IEM_MC_FETCH_EFLAGS(EFlags);
18305 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18306
18307 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
18308 IEM_MC_COMMIT_EFLAGS(EFlags);
18309 IEM_MC_ADVANCE_RIP();
18310 IEM_MC_END();
18311 return VINF_SUCCESS;
18312 }
18313
18314 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18315 }
18316 }
18317}
18318
18319
18320/** Opcode 0xf6 /4, /5, /6 and /7. */
18321FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
18322{
18323 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18324 {
18325 /* register access */
18326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18327 IEM_MC_BEGIN(3, 1);
18328 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18329 IEM_MC_ARG(uint8_t, u8Value, 1);
18330 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18331 IEM_MC_LOCAL(int32_t, rc);
18332
18333 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18334 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18335 IEM_MC_REF_EFLAGS(pEFlags);
18336 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18337 IEM_MC_IF_LOCAL_IS_Z(rc) {
18338 IEM_MC_ADVANCE_RIP();
18339 } IEM_MC_ELSE() {
18340 IEM_MC_RAISE_DIVIDE_ERROR();
18341 } IEM_MC_ENDIF();
18342
18343 IEM_MC_END();
18344 }
18345 else
18346 {
18347 /* memory access. */
18348 IEM_MC_BEGIN(3, 2);
18349 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18350 IEM_MC_ARG(uint8_t, u8Value, 1);
18351 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18353 IEM_MC_LOCAL(int32_t, rc);
18354
18355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18357 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18358 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18359 IEM_MC_REF_EFLAGS(pEFlags);
18360 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18361 IEM_MC_IF_LOCAL_IS_Z(rc) {
18362 IEM_MC_ADVANCE_RIP();
18363 } IEM_MC_ELSE() {
18364 IEM_MC_RAISE_DIVIDE_ERROR();
18365 } IEM_MC_ENDIF();
18366
18367 IEM_MC_END();
18368 }
18369 return VINF_SUCCESS;
18370}
18371
18372
18373/** Opcode 0xf7 /4, /5, /6 and /7. */
18374FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
18375{
18376 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18377
18378 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18379 {
18380 /* register access */
18381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18382 switch (pVCpu->iem.s.enmEffOpSize)
18383 {
18384 case IEMMODE_16BIT:
18385 {
18386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18387 IEM_MC_BEGIN(4, 1);
18388 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18389 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18390 IEM_MC_ARG(uint16_t, u16Value, 2);
18391 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18392 IEM_MC_LOCAL(int32_t, rc);
18393
18394 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18395 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18396 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18397 IEM_MC_REF_EFLAGS(pEFlags);
18398 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18399 IEM_MC_IF_LOCAL_IS_Z(rc) {
18400 IEM_MC_ADVANCE_RIP();
18401 } IEM_MC_ELSE() {
18402 IEM_MC_RAISE_DIVIDE_ERROR();
18403 } IEM_MC_ENDIF();
18404
18405 IEM_MC_END();
18406 return VINF_SUCCESS;
18407 }
18408
18409 case IEMMODE_32BIT:
18410 {
18411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18412 IEM_MC_BEGIN(4, 1);
18413 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18414 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18415 IEM_MC_ARG(uint32_t, u32Value, 2);
18416 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18417 IEM_MC_LOCAL(int32_t, rc);
18418
18419 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18420 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18421 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18422 IEM_MC_REF_EFLAGS(pEFlags);
18423 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18424 IEM_MC_IF_LOCAL_IS_Z(rc) {
18425 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18426 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18427 IEM_MC_ADVANCE_RIP();
18428 } IEM_MC_ELSE() {
18429 IEM_MC_RAISE_DIVIDE_ERROR();
18430 } IEM_MC_ENDIF();
18431
18432 IEM_MC_END();
18433 return VINF_SUCCESS;
18434 }
18435
18436 case IEMMODE_64BIT:
18437 {
18438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18439 IEM_MC_BEGIN(4, 1);
18440 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18441 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18442 IEM_MC_ARG(uint64_t, u64Value, 2);
18443 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18444 IEM_MC_LOCAL(int32_t, rc);
18445
18446 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18447 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18448 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18449 IEM_MC_REF_EFLAGS(pEFlags);
18450 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18451 IEM_MC_IF_LOCAL_IS_Z(rc) {
18452 IEM_MC_ADVANCE_RIP();
18453 } IEM_MC_ELSE() {
18454 IEM_MC_RAISE_DIVIDE_ERROR();
18455 } IEM_MC_ENDIF();
18456
18457 IEM_MC_END();
18458 return VINF_SUCCESS;
18459 }
18460
18461 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18462 }
18463 }
18464 else
18465 {
18466 /* memory access. */
18467 switch (pVCpu->iem.s.enmEffOpSize)
18468 {
18469 case IEMMODE_16BIT:
18470 {
18471 IEM_MC_BEGIN(4, 2);
18472 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18473 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18474 IEM_MC_ARG(uint16_t, u16Value, 2);
18475 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18477 IEM_MC_LOCAL(int32_t, rc);
18478
18479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18481 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18482 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18483 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18484 IEM_MC_REF_EFLAGS(pEFlags);
18485 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18486 IEM_MC_IF_LOCAL_IS_Z(rc) {
18487 IEM_MC_ADVANCE_RIP();
18488 } IEM_MC_ELSE() {
18489 IEM_MC_RAISE_DIVIDE_ERROR();
18490 } IEM_MC_ENDIF();
18491
18492 IEM_MC_END();
18493 return VINF_SUCCESS;
18494 }
18495
18496 case IEMMODE_32BIT:
18497 {
18498 IEM_MC_BEGIN(4, 2);
18499 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18500 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18501 IEM_MC_ARG(uint32_t, u32Value, 2);
18502 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18504 IEM_MC_LOCAL(int32_t, rc);
18505
18506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18508 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18509 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18510 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18511 IEM_MC_REF_EFLAGS(pEFlags);
18512 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18513 IEM_MC_IF_LOCAL_IS_Z(rc) {
18514 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18515 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18516 IEM_MC_ADVANCE_RIP();
18517 } IEM_MC_ELSE() {
18518 IEM_MC_RAISE_DIVIDE_ERROR();
18519 } IEM_MC_ENDIF();
18520
18521 IEM_MC_END();
18522 return VINF_SUCCESS;
18523 }
18524
18525 case IEMMODE_64BIT:
18526 {
18527 IEM_MC_BEGIN(4, 2);
18528 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18529 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18530 IEM_MC_ARG(uint64_t, u64Value, 2);
18531 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18533 IEM_MC_LOCAL(int32_t, rc);
18534
18535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18537 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18538 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18539 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18540 IEM_MC_REF_EFLAGS(pEFlags);
18541 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18542 IEM_MC_IF_LOCAL_IS_Z(rc) {
18543 IEM_MC_ADVANCE_RIP();
18544 } IEM_MC_ELSE() {
18545 IEM_MC_RAISE_DIVIDE_ERROR();
18546 } IEM_MC_ENDIF();
18547
18548 IEM_MC_END();
18549 return VINF_SUCCESS;
18550 }
18551
18552 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18553 }
18554 }
18555}
18556
18557/** Opcode 0xf6. */
18558FNIEMOP_DEF(iemOp_Grp3_Eb)
18559{
18560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18561 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18562 {
18563 case 0:
18564 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
18565 case 1:
18566/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18567 return IEMOP_RAISE_INVALID_OPCODE();
18568 case 2:
18569 IEMOP_MNEMONIC(not_Eb, "not Eb");
18570 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
18571 case 3:
18572 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
18573 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
18574 case 4:
18575 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
18576 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18577 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
18578 case 5:
18579 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
18580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18581 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
18582 case 6:
18583 IEMOP_MNEMONIC(div_Eb, "div Eb");
18584 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18585 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
18586 case 7:
18587 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
18588 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18589 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
18590 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18591 }
18592}
18593
18594
18595/** Opcode 0xf7. */
18596FNIEMOP_DEF(iemOp_Grp3_Ev)
18597{
18598 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18599 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18600 {
18601 case 0:
18602 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
18603 case 1:
18604/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18605 return IEMOP_RAISE_INVALID_OPCODE();
18606 case 2:
18607 IEMOP_MNEMONIC(not_Ev, "not Ev");
18608 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
18609 case 3:
18610 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
18611 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
18612 case 4:
18613 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
18614 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18615 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
18616 case 5:
18617 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
18618 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18619 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
18620 case 6:
18621 IEMOP_MNEMONIC(div_Ev, "div Ev");
18622 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18623 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
18624 case 7:
18625 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
18626 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18627 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
18628 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18629 }
18630}
18631
18632
18633/** Opcode 0xf8. */
18634FNIEMOP_DEF(iemOp_clc)
18635{
18636 IEMOP_MNEMONIC(clc, "clc");
18637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18638 IEM_MC_BEGIN(0, 0);
18639 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
18640 IEM_MC_ADVANCE_RIP();
18641 IEM_MC_END();
18642 return VINF_SUCCESS;
18643}
18644
18645
18646/** Opcode 0xf9. */
18647FNIEMOP_DEF(iemOp_stc)
18648{
18649 IEMOP_MNEMONIC(stc, "stc");
18650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18651 IEM_MC_BEGIN(0, 0);
18652 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
18653 IEM_MC_ADVANCE_RIP();
18654 IEM_MC_END();
18655 return VINF_SUCCESS;
18656}
18657
18658
18659/** Opcode 0xfa. */
18660FNIEMOP_DEF(iemOp_cli)
18661{
18662 IEMOP_MNEMONIC(cli, "cli");
18663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18664 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
18665}
18666
18667
18668FNIEMOP_DEF(iemOp_sti)
18669{
18670 IEMOP_MNEMONIC(sti, "sti");
18671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18672 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
18673}
18674
18675
18676/** Opcode 0xfc. */
18677FNIEMOP_DEF(iemOp_cld)
18678{
18679 IEMOP_MNEMONIC(cld, "cld");
18680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18681 IEM_MC_BEGIN(0, 0);
18682 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
18683 IEM_MC_ADVANCE_RIP();
18684 IEM_MC_END();
18685 return VINF_SUCCESS;
18686}
18687
18688
18689/** Opcode 0xfd. */
18690FNIEMOP_DEF(iemOp_std)
18691{
18692 IEMOP_MNEMONIC(std, "std");
18693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18694 IEM_MC_BEGIN(0, 0);
18695 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
18696 IEM_MC_ADVANCE_RIP();
18697 IEM_MC_END();
18698 return VINF_SUCCESS;
18699}
18700
18701
18702/** Opcode 0xfe. */
18703FNIEMOP_DEF(iemOp_Grp4)
18704{
18705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18706 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18707 {
18708 case 0:
18709 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
18710 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
18711 case 1:
18712 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
18713 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
18714 default:
18715 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
18716 return IEMOP_RAISE_INVALID_OPCODE();
18717 }
18718}
18719
18720
18721/**
18722 * Opcode 0xff /2.
18723 * @param bRm The RM byte.
18724 */
18725FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
18726{
18727 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
18728 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18729
18730 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18731 {
18732 /* The new RIP is taken from a register. */
18733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18734 switch (pVCpu->iem.s.enmEffOpSize)
18735 {
18736 case IEMMODE_16BIT:
18737 IEM_MC_BEGIN(1, 0);
18738 IEM_MC_ARG(uint16_t, u16Target, 0);
18739 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18740 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18741 IEM_MC_END()
18742 return VINF_SUCCESS;
18743
18744 case IEMMODE_32BIT:
18745 IEM_MC_BEGIN(1, 0);
18746 IEM_MC_ARG(uint32_t, u32Target, 0);
18747 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18748 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18749 IEM_MC_END()
18750 return VINF_SUCCESS;
18751
18752 case IEMMODE_64BIT:
18753 IEM_MC_BEGIN(1, 0);
18754 IEM_MC_ARG(uint64_t, u64Target, 0);
18755 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18756 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18757 IEM_MC_END()
18758 return VINF_SUCCESS;
18759
18760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18761 }
18762 }
18763 else
18764 {
18765 /* The new RIP is taken from a register. */
18766 switch (pVCpu->iem.s.enmEffOpSize)
18767 {
18768 case IEMMODE_16BIT:
18769 IEM_MC_BEGIN(1, 1);
18770 IEM_MC_ARG(uint16_t, u16Target, 0);
18771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18774 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18775 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18776 IEM_MC_END()
18777 return VINF_SUCCESS;
18778
18779 case IEMMODE_32BIT:
18780 IEM_MC_BEGIN(1, 1);
18781 IEM_MC_ARG(uint32_t, u32Target, 0);
18782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18783 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18785 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18786 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18787 IEM_MC_END()
18788 return VINF_SUCCESS;
18789
18790 case IEMMODE_64BIT:
18791 IEM_MC_BEGIN(1, 1);
18792 IEM_MC_ARG(uint64_t, u64Target, 0);
18793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18796 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18797 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18798 IEM_MC_END()
18799 return VINF_SUCCESS;
18800
18801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18802 }
18803 }
18804}
18805
18806typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
18807
18808FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
18809{
18810 /* Registers? How?? */
18811 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
18812 { /* likely */ }
18813 else
18814 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
18815
18816 /* Far pointer loaded from memory. */
18817 switch (pVCpu->iem.s.enmEffOpSize)
18818 {
18819 case IEMMODE_16BIT:
18820 IEM_MC_BEGIN(3, 1);
18821 IEM_MC_ARG(uint16_t, u16Sel, 0);
18822 IEM_MC_ARG(uint16_t, offSeg, 1);
18823 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18827 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18828 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
18829 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18830 IEM_MC_END();
18831 return VINF_SUCCESS;
18832
18833 case IEMMODE_64BIT:
18834 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
18835 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
18836 * and call far qword [rsp] encodings. */
18837 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
18838 {
18839 IEM_MC_BEGIN(3, 1);
18840 IEM_MC_ARG(uint16_t, u16Sel, 0);
18841 IEM_MC_ARG(uint64_t, offSeg, 1);
18842 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18846 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18847 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
18848 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18849 IEM_MC_END();
18850 return VINF_SUCCESS;
18851 }
18852 /* AMD falls thru. */
18853 /* fall thru */
18854
18855 case IEMMODE_32BIT:
18856 IEM_MC_BEGIN(3, 1);
18857 IEM_MC_ARG(uint16_t, u16Sel, 0);
18858 IEM_MC_ARG(uint32_t, offSeg, 1);
18859 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
18860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18863 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18864 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
18865 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18866 IEM_MC_END();
18867 return VINF_SUCCESS;
18868
18869 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18870 }
18871}
18872
18873
18874/**
18875 * Opcode 0xff /3.
18876 * @param bRm The RM byte.
18877 */
18878FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
18879{
18880 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
18881 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
18882}
18883
18884
18885/**
18886 * Opcode 0xff /4.
18887 * @param bRm The RM byte.
18888 */
18889FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
18890{
18891 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
18892 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18893
18894 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18895 {
18896 /* The new RIP is taken from a register. */
18897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18898 switch (pVCpu->iem.s.enmEffOpSize)
18899 {
18900 case IEMMODE_16BIT:
18901 IEM_MC_BEGIN(0, 1);
18902 IEM_MC_LOCAL(uint16_t, u16Target);
18903 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18904 IEM_MC_SET_RIP_U16(u16Target);
18905 IEM_MC_END()
18906 return VINF_SUCCESS;
18907
18908 case IEMMODE_32BIT:
18909 IEM_MC_BEGIN(0, 1);
18910 IEM_MC_LOCAL(uint32_t, u32Target);
18911 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18912 IEM_MC_SET_RIP_U32(u32Target);
18913 IEM_MC_END()
18914 return VINF_SUCCESS;
18915
18916 case IEMMODE_64BIT:
18917 IEM_MC_BEGIN(0, 1);
18918 IEM_MC_LOCAL(uint64_t, u64Target);
18919 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18920 IEM_MC_SET_RIP_U64(u64Target);
18921 IEM_MC_END()
18922 return VINF_SUCCESS;
18923
18924 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18925 }
18926 }
18927 else
18928 {
18929 /* The new RIP is taken from a memory location. */
18930 switch (pVCpu->iem.s.enmEffOpSize)
18931 {
18932 case IEMMODE_16BIT:
18933 IEM_MC_BEGIN(0, 2);
18934 IEM_MC_LOCAL(uint16_t, u16Target);
18935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18938 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18939 IEM_MC_SET_RIP_U16(u16Target);
18940 IEM_MC_END()
18941 return VINF_SUCCESS;
18942
18943 case IEMMODE_32BIT:
18944 IEM_MC_BEGIN(0, 2);
18945 IEM_MC_LOCAL(uint32_t, u32Target);
18946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18949 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18950 IEM_MC_SET_RIP_U32(u32Target);
18951 IEM_MC_END()
18952 return VINF_SUCCESS;
18953
18954 case IEMMODE_64BIT:
18955 IEM_MC_BEGIN(0, 2);
18956 IEM_MC_LOCAL(uint64_t, u64Target);
18957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18960 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18961 IEM_MC_SET_RIP_U64(u64Target);
18962 IEM_MC_END()
18963 return VINF_SUCCESS;
18964
18965 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18966 }
18967 }
18968}
18969
18970
18971/**
18972 * Opcode 0xff /5.
18973 * @param bRm The RM byte.
18974 */
18975FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
18976{
18977 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
18978 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
18979}
18980
18981
18982/**
18983 * Opcode 0xff /6.
18984 * @param bRm The RM byte.
18985 */
18986FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
18987{
18988 IEMOP_MNEMONIC(push_Ev, "push Ev");
18989
18990 /* Registers are handled by a common worker. */
18991 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18992 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18993
18994 /* Memory we do here. */
18995 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18996 switch (pVCpu->iem.s.enmEffOpSize)
18997 {
18998 case IEMMODE_16BIT:
18999 IEM_MC_BEGIN(0, 2);
19000 IEM_MC_LOCAL(uint16_t, u16Src);
19001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
19002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
19003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
19004 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
19005 IEM_MC_PUSH_U16(u16Src);
19006 IEM_MC_ADVANCE_RIP();
19007 IEM_MC_END();
19008 return VINF_SUCCESS;
19009
19010 case IEMMODE_32BIT:
19011 IEM_MC_BEGIN(0, 2);
19012 IEM_MC_LOCAL(uint32_t, u32Src);
19013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
19014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
19015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
19016 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
19017 IEM_MC_PUSH_U32(u32Src);
19018 IEM_MC_ADVANCE_RIP();
19019 IEM_MC_END();
19020 return VINF_SUCCESS;
19021
19022 case IEMMODE_64BIT:
19023 IEM_MC_BEGIN(0, 2);
19024 IEM_MC_LOCAL(uint64_t, u64Src);
19025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
19026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
19027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
19028 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
19029 IEM_MC_PUSH_U64(u64Src);
19030 IEM_MC_ADVANCE_RIP();
19031 IEM_MC_END();
19032 return VINF_SUCCESS;
19033
19034 IEM_NOT_REACHED_DEFAULT_CASE_RET();
19035 }
19036}
19037
19038
19039/** Opcode 0xff. */
19040FNIEMOP_DEF(iemOp_Grp5)
19041{
19042 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
19043 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
19044 {
19045 case 0:
19046 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
19047 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
19048 case 1:
19049 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
19050 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
19051 case 2:
19052 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
19053 case 3:
19054 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
19055 case 4:
19056 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
19057 case 5:
19058 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
19059 case 6:
19060 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
19061 case 7:
19062 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
19063 return IEMOP_RAISE_INVALID_OPCODE();
19064 }
19065 AssertFailedReturn(VERR_IEM_IPE_3);
19066}
19067
19068
19069
19070const PFNIEMOP g_apfnOneByteMap[256] =
19071{
19072 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
19073 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
19074 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
19075 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
19076 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
19077 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
19078 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
19079 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
19080 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
19081 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
19082 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
19083 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
19084 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
19085 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
19086 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
19087 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
19088 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
19089 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
19090 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
19091 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
19092 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
19093 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
19094 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
19095 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
19096 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
19097 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
19098 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
19099 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
19100 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
19101 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
19102 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
19103 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
19104 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
19105 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
19106 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
19107 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
19108 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
19109 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
19110 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
19111 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
19112 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
19113 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
19114 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
19115 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
19116 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
19117 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
19118 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
19119 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
19120 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
19121 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
19122 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
19123 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
19124 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
19125 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
19126 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
19127 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
19128 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
19129 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
19130 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
19131 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
19132 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
19133 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
19134 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
19135 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
19136};
19137
19138
19139/** @} */
19140
19141#ifdef _MSC_VER
19142# pragma warning(pop)
19143#endif
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette