VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 65752

Last change on this file since 65752 was 65752, checked in by vboxsync, 8 years ago

IEM: 0x0f 0x70 split up.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 667.7 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 65752 2017-02-13 08:56:57Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24#ifdef _MSC_VER
25# pragma warning(push)
26# pragma warning(disable: 4702) /* Unreachable code like return in iemOp_Grp6_lldt. */
27#endif
28
29
30/**
31 * Common worker for instructions like ADD, AND, OR, ++ with a byte
32 * memory/register as the destination.
33 *
34 * @param pImpl Pointer to the instruction implementation (assembly).
35 */
36FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
37{
38 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
39
40 /*
41 * If rm is denoting a register, no more instruction bytes.
42 */
43 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
44 {
45 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
46
47 IEM_MC_BEGIN(3, 0);
48 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
49 IEM_MC_ARG(uint8_t, u8Src, 1);
50 IEM_MC_ARG(uint32_t *, pEFlags, 2);
51
52 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
53 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
54 IEM_MC_REF_EFLAGS(pEFlags);
55 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
56
57 IEM_MC_ADVANCE_RIP();
58 IEM_MC_END();
59 }
60 else
61 {
62 /*
63 * We're accessing memory.
64 * Note! We're putting the eflags on the stack here so we can commit them
65 * after the memory.
66 */
67 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
68 IEM_MC_BEGIN(3, 2);
69 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
70 IEM_MC_ARG(uint8_t, u8Src, 1);
71 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
72 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
73
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 if (!pImpl->pfnLockedU8)
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
77 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
78 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
79 IEM_MC_FETCH_EFLAGS(EFlags);
80 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
81 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
82 else
83 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
84
85 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
86 IEM_MC_COMMIT_EFLAGS(EFlags);
87 IEM_MC_ADVANCE_RIP();
88 IEM_MC_END();
89 }
90 return VINF_SUCCESS;
91}
92
93
94/**
95 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
96 * memory/register as the destination.
97 *
98 * @param pImpl Pointer to the instruction implementation (assembly).
99 */
100FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
101{
102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
103
104 /*
105 * If rm is denoting a register, no more instruction bytes.
106 */
107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
108 {
109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
110
111 switch (pVCpu->iem.s.enmEffOpSize)
112 {
113 case IEMMODE_16BIT:
114 IEM_MC_BEGIN(3, 0);
115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
116 IEM_MC_ARG(uint16_t, u16Src, 1);
117 IEM_MC_ARG(uint32_t *, pEFlags, 2);
118
119 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
120 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
121 IEM_MC_REF_EFLAGS(pEFlags);
122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
123
124 IEM_MC_ADVANCE_RIP();
125 IEM_MC_END();
126 break;
127
128 case IEMMODE_32BIT:
129 IEM_MC_BEGIN(3, 0);
130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
131 IEM_MC_ARG(uint32_t, u32Src, 1);
132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
133
134 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
135 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
136 IEM_MC_REF_EFLAGS(pEFlags);
137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
138
139 if (pImpl != &g_iemAImpl_test)
140 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
141 IEM_MC_ADVANCE_RIP();
142 IEM_MC_END();
143 break;
144
145 case IEMMODE_64BIT:
146 IEM_MC_BEGIN(3, 0);
147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
148 IEM_MC_ARG(uint64_t, u64Src, 1);
149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
150
151 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
152 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
153 IEM_MC_REF_EFLAGS(pEFlags);
154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
155
156 IEM_MC_ADVANCE_RIP();
157 IEM_MC_END();
158 break;
159 }
160 }
161 else
162 {
163 /*
164 * We're accessing memory.
165 * Note! We're putting the eflags on the stack here so we can commit them
166 * after the memory.
167 */
168 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
169 switch (pVCpu->iem.s.enmEffOpSize)
170 {
171 case IEMMODE_16BIT:
172 IEM_MC_BEGIN(3, 2);
173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
174 IEM_MC_ARG(uint16_t, u16Src, 1);
175 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
177
178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
179 if (!pImpl->pfnLockedU16)
180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
181 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
182 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
183 IEM_MC_FETCH_EFLAGS(EFlags);
184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
186 else
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
188
189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
190 IEM_MC_COMMIT_EFLAGS(EFlags);
191 IEM_MC_ADVANCE_RIP();
192 IEM_MC_END();
193 break;
194
195 case IEMMODE_32BIT:
196 IEM_MC_BEGIN(3, 2);
197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
198 IEM_MC_ARG(uint32_t, u32Src, 1);
199 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
201
202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
203 if (!pImpl->pfnLockedU32)
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
206 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
207 IEM_MC_FETCH_EFLAGS(EFlags);
208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
210 else
211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
212
213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
214 IEM_MC_COMMIT_EFLAGS(EFlags);
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 break;
218
219 case IEMMODE_64BIT:
220 IEM_MC_BEGIN(3, 2);
221 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
222 IEM_MC_ARG(uint64_t, u64Src, 1);
223 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
225
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
227 if (!pImpl->pfnLockedU64)
228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
229 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
230 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
231 IEM_MC_FETCH_EFLAGS(EFlags);
232 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
234 else
235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
236
237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
238 IEM_MC_COMMIT_EFLAGS(EFlags);
239 IEM_MC_ADVANCE_RIP();
240 IEM_MC_END();
241 break;
242 }
243 }
244 return VINF_SUCCESS;
245}
246
247
248/**
249 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
250 * the destination.
251 *
252 * @param pImpl Pointer to the instruction implementation (assembly).
253 */
254FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
255{
256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
257
258 /*
259 * If rm is denoting a register, no more instruction bytes.
260 */
261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
262 {
263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
264 IEM_MC_BEGIN(3, 0);
265 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
266 IEM_MC_ARG(uint8_t, u8Src, 1);
267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
268
269 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
270 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
271 IEM_MC_REF_EFLAGS(pEFlags);
272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
273
274 IEM_MC_ADVANCE_RIP();
275 IEM_MC_END();
276 }
277 else
278 {
279 /*
280 * We're accessing memory.
281 */
282 IEM_MC_BEGIN(3, 1);
283 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
284 IEM_MC_ARG(uint8_t, u8Src, 1);
285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
287
288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
290 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
291 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
292 IEM_MC_REF_EFLAGS(pEFlags);
293 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
294
295 IEM_MC_ADVANCE_RIP();
296 IEM_MC_END();
297 }
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
304 * register as the destination.
305 *
306 * @param pImpl Pointer to the instruction implementation (assembly).
307 */
308FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
309{
310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
311
312 /*
313 * If rm is denoting a register, no more instruction bytes.
314 */
315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
316 {
317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
318 switch (pVCpu->iem.s.enmEffOpSize)
319 {
320 case IEMMODE_16BIT:
321 IEM_MC_BEGIN(3, 0);
322 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
323 IEM_MC_ARG(uint16_t, u16Src, 1);
324 IEM_MC_ARG(uint32_t *, pEFlags, 2);
325
326 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
327 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
328 IEM_MC_REF_EFLAGS(pEFlags);
329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
330
331 IEM_MC_ADVANCE_RIP();
332 IEM_MC_END();
333 break;
334
335 case IEMMODE_32BIT:
336 IEM_MC_BEGIN(3, 0);
337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
338 IEM_MC_ARG(uint32_t, u32Src, 1);
339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
340
341 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
342 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
343 IEM_MC_REF_EFLAGS(pEFlags);
344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
345
346 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
347 IEM_MC_ADVANCE_RIP();
348 IEM_MC_END();
349 break;
350
351 case IEMMODE_64BIT:
352 IEM_MC_BEGIN(3, 0);
353 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
354 IEM_MC_ARG(uint64_t, u64Src, 1);
355 IEM_MC_ARG(uint32_t *, pEFlags, 2);
356
357 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
358 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
359 IEM_MC_REF_EFLAGS(pEFlags);
360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
361
362 IEM_MC_ADVANCE_RIP();
363 IEM_MC_END();
364 break;
365 }
366 }
367 else
368 {
369 /*
370 * We're accessing memory.
371 */
372 switch (pVCpu->iem.s.enmEffOpSize)
373 {
374 case IEMMODE_16BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
377 IEM_MC_ARG(uint16_t, u16Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
383 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
384 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
385 IEM_MC_REF_EFLAGS(pEFlags);
386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
387
388 IEM_MC_ADVANCE_RIP();
389 IEM_MC_END();
390 break;
391
392 case IEMMODE_32BIT:
393 IEM_MC_BEGIN(3, 1);
394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
395 IEM_MC_ARG(uint32_t, u32Src, 1);
396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
398
399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
401 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
402 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
403 IEM_MC_REF_EFLAGS(pEFlags);
404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
405
406 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 break;
410
411 case IEMMODE_64BIT:
412 IEM_MC_BEGIN(3, 1);
413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
414 IEM_MC_ARG(uint64_t, u64Src, 1);
415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
417
418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
420 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
421 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
422 IEM_MC_REF_EFLAGS(pEFlags);
423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
424
425 IEM_MC_ADVANCE_RIP();
426 IEM_MC_END();
427 break;
428 }
429 }
430 return VINF_SUCCESS;
431}
432
433
434/**
435 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
436 * a byte immediate.
437 *
438 * @param pImpl Pointer to the instruction implementation (assembly).
439 */
440FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
441{
442 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
444
445 IEM_MC_BEGIN(3, 0);
446 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
447 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
449
450 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
451 IEM_MC_REF_EFLAGS(pEFlags);
452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
453
454 IEM_MC_ADVANCE_RIP();
455 IEM_MC_END();
456 return VINF_SUCCESS;
457}
458
459
460/**
461 * Common worker for instructions like ADD, AND, OR, ++ with working on
462 * AX/EAX/RAX with a word/dword immediate.
463 *
464 * @param pImpl Pointer to the instruction implementation (assembly).
465 */
466FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
467{
468 switch (pVCpu->iem.s.enmEffOpSize)
469 {
470 case IEMMODE_16BIT:
471 {
472 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
474
475 IEM_MC_BEGIN(3, 0);
476 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
477 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
479
480 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
481 IEM_MC_REF_EFLAGS(pEFlags);
482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
483
484 IEM_MC_ADVANCE_RIP();
485 IEM_MC_END();
486 return VINF_SUCCESS;
487 }
488
489 case IEMMODE_32BIT:
490 {
491 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493
494 IEM_MC_BEGIN(3, 0);
495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
496 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
498
499 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
500 IEM_MC_REF_EFLAGS(pEFlags);
501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
502
503 if (pImpl != &g_iemAImpl_test)
504 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
505 IEM_MC_ADVANCE_RIP();
506 IEM_MC_END();
507 return VINF_SUCCESS;
508 }
509
510 case IEMMODE_64BIT:
511 {
512 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
514
515 IEM_MC_BEGIN(3, 0);
516 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
517 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
519
520 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
521 IEM_MC_REF_EFLAGS(pEFlags);
522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
523
524 IEM_MC_ADVANCE_RIP();
525 IEM_MC_END();
526 return VINF_SUCCESS;
527 }
528
529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
530 }
531}
532
533
534/** Opcodes 0xf1, 0xd6. */
535FNIEMOP_DEF(iemOp_Invalid)
536{
537 IEMOP_MNEMONIC(Invalid, "Invalid");
538 return IEMOP_RAISE_INVALID_OPCODE();
539}
540
541
542/** Invalid with RM byte . */
543FNIEMOPRM_DEF(iemOp_InvalidWithRM)
544{
545 RT_NOREF_PV(bRm);
546 IEMOP_MNEMONIC(InvalidWithRm, "InvalidWithRM");
547 return IEMOP_RAISE_INVALID_OPCODE();
548}
549
550
551/** Invalid opcode where intel requires Mod R/M sequence. */
552FNIEMOP_DEF(iemOp_InvalidNeedRM)
553{
554 IEMOP_MNEMONIC(InvalidNeedRM, "InvalidNeedRM");
555 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
556 {
557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
558#ifndef TST_IEM_CHECK_MC
559 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
560 {
561 RTGCPTR GCPtrEff;
562 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
563 if (rcStrict != VINF_SUCCESS)
564 return rcStrict;
565 }
566#endif
567 IEMOP_HLP_DONE_DECODING();
568 }
569 return IEMOP_RAISE_INVALID_OPCODE();
570}
571
572
573/** Invalid opcode where intel requires Mod R/M sequence and 8-byte
574 * immediate. */
575FNIEMOP_DEF(iemOp_InvalidNeedRMImm8)
576{
577 IEMOP_MNEMONIC(InvalidNeedRMImm8, "InvalidNeedRMImm8");
578 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
579 {
580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
581#ifndef TST_IEM_CHECK_MC
582 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
583 {
584 RTGCPTR GCPtrEff;
585 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
586 if (rcStrict != VINF_SUCCESS)
587 return rcStrict;
588 }
589#endif
590 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
591 IEMOP_HLP_DONE_DECODING();
592 }
593 return IEMOP_RAISE_INVALID_OPCODE();
594}
595
596
597/** Invalid opcode where intel requires a 3rd escape byte and a Mod R/M
598 * sequence. */
599FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRM)
600{
601 IEMOP_MNEMONIC(InvalidNeed3ByteEscRM, "InvalidNeed3ByteEscRM");
602 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
603 {
604 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
606#ifndef TST_IEM_CHECK_MC
607 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
608 {
609 RTGCPTR GCPtrEff;
610 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
611 if (rcStrict != VINF_SUCCESS)
612 return rcStrict;
613 }
614#endif
615 IEMOP_HLP_DONE_DECODING();
616 }
617 return IEMOP_RAISE_INVALID_OPCODE();
618}
619
620
621/** Invalid opcode where intel requires a 3rd escape byte, Mod R/M sequence, and
622 * a 8-byte immediate. */
623FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRMImm8)
624{
625 IEMOP_MNEMONIC(InvalidNeed3ByteEscRMImm8, "InvalidNeed3ByteEscRMImm8");
626 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
627 {
628 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
630#ifndef TST_IEM_CHECK_MC
631 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
632 {
633 RTGCPTR GCPtrEff;
634 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 1, &GCPtrEff);
635 if (rcStrict != VINF_SUCCESS)
636 return rcStrict;
637 }
638#endif
639 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
640 IEMOP_HLP_DONE_DECODING();
641 }
642 return IEMOP_RAISE_INVALID_OPCODE();
643}
644
645
646
647/** @name ..... opcodes.
648 *
649 * @{
650 */
651
652/** @} */
653
654
655/** @name Two byte opcodes (first byte 0x0f).
656 *
657 * @{
658 */
659
660/** Opcode 0x0f 0x00 /0. */
661FNIEMOPRM_DEF(iemOp_Grp6_sldt)
662{
663 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
664 IEMOP_HLP_MIN_286();
665 IEMOP_HLP_NO_REAL_OR_V86_MODE();
666
667 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
668 {
669 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
670 switch (pVCpu->iem.s.enmEffOpSize)
671 {
672 case IEMMODE_16BIT:
673 IEM_MC_BEGIN(0, 1);
674 IEM_MC_LOCAL(uint16_t, u16Ldtr);
675 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
676 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
677 IEM_MC_ADVANCE_RIP();
678 IEM_MC_END();
679 break;
680
681 case IEMMODE_32BIT:
682 IEM_MC_BEGIN(0, 1);
683 IEM_MC_LOCAL(uint32_t, u32Ldtr);
684 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
685 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
686 IEM_MC_ADVANCE_RIP();
687 IEM_MC_END();
688 break;
689
690 case IEMMODE_64BIT:
691 IEM_MC_BEGIN(0, 1);
692 IEM_MC_LOCAL(uint64_t, u64Ldtr);
693 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
694 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
695 IEM_MC_ADVANCE_RIP();
696 IEM_MC_END();
697 break;
698
699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
700 }
701 }
702 else
703 {
704 IEM_MC_BEGIN(0, 2);
705 IEM_MC_LOCAL(uint16_t, u16Ldtr);
706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
708 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
709 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
710 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
711 IEM_MC_ADVANCE_RIP();
712 IEM_MC_END();
713 }
714 return VINF_SUCCESS;
715}
716
717
718/** Opcode 0x0f 0x00 /1. */
719FNIEMOPRM_DEF(iemOp_Grp6_str)
720{
721 IEMOP_MNEMONIC(str, "str Rv/Mw");
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 switch (pVCpu->iem.s.enmEffOpSize)
729 {
730 case IEMMODE_16BIT:
731 IEM_MC_BEGIN(0, 1);
732 IEM_MC_LOCAL(uint16_t, u16Tr);
733 IEM_MC_FETCH_TR_U16(u16Tr);
734 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
735 IEM_MC_ADVANCE_RIP();
736 IEM_MC_END();
737 break;
738
739 case IEMMODE_32BIT:
740 IEM_MC_BEGIN(0, 1);
741 IEM_MC_LOCAL(uint32_t, u32Tr);
742 IEM_MC_FETCH_TR_U32(u32Tr);
743 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
744 IEM_MC_ADVANCE_RIP();
745 IEM_MC_END();
746 break;
747
748 case IEMMODE_64BIT:
749 IEM_MC_BEGIN(0, 1);
750 IEM_MC_LOCAL(uint64_t, u64Tr);
751 IEM_MC_FETCH_TR_U64(u64Tr);
752 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
753 IEM_MC_ADVANCE_RIP();
754 IEM_MC_END();
755 break;
756
757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
758 }
759 }
760 else
761 {
762 IEM_MC_BEGIN(0, 2);
763 IEM_MC_LOCAL(uint16_t, u16Tr);
764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
766 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
767 IEM_MC_FETCH_TR_U16(u16Tr);
768 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
769 IEM_MC_ADVANCE_RIP();
770 IEM_MC_END();
771 }
772 return VINF_SUCCESS;
773}
774
775
776/** Opcode 0x0f 0x00 /2. */
777FNIEMOPRM_DEF(iemOp_Grp6_lldt)
778{
779 IEMOP_MNEMONIC(lldt, "lldt Ew");
780 IEMOP_HLP_MIN_286();
781 IEMOP_HLP_NO_REAL_OR_V86_MODE();
782
783 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
784 {
785 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
786 IEM_MC_BEGIN(1, 0);
787 IEM_MC_ARG(uint16_t, u16Sel, 0);
788 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
789 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
790 IEM_MC_END();
791 }
792 else
793 {
794 IEM_MC_BEGIN(1, 1);
795 IEM_MC_ARG(uint16_t, u16Sel, 0);
796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
798 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
799 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
800 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
802 IEM_MC_END();
803 }
804 return VINF_SUCCESS;
805}
806
807
808/** Opcode 0x0f 0x00 /3. */
809FNIEMOPRM_DEF(iemOp_Grp6_ltr)
810{
811 IEMOP_MNEMONIC(ltr, "ltr Ew");
812 IEMOP_HLP_MIN_286();
813 IEMOP_HLP_NO_REAL_OR_V86_MODE();
814
815 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
816 {
817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
818 IEM_MC_BEGIN(1, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 0);
820 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
821 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
822 IEM_MC_END();
823 }
824 else
825 {
826 IEM_MC_BEGIN(1, 1);
827 IEM_MC_ARG(uint16_t, u16Sel, 0);
828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
831 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
832 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
833 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
834 IEM_MC_END();
835 }
836 return VINF_SUCCESS;
837}
838
839
840/** Opcode 0x0f 0x00 /3. */
841FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
842{
843 IEMOP_HLP_MIN_286();
844 IEMOP_HLP_NO_REAL_OR_V86_MODE();
845
846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
847 {
848 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
849 IEM_MC_BEGIN(2, 0);
850 IEM_MC_ARG(uint16_t, u16Sel, 0);
851 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
852 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
853 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
854 IEM_MC_END();
855 }
856 else
857 {
858 IEM_MC_BEGIN(2, 1);
859 IEM_MC_ARG(uint16_t, u16Sel, 0);
860 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
863 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
864 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
865 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
866 IEM_MC_END();
867 }
868 return VINF_SUCCESS;
869}
870
871
872/** Opcode 0x0f 0x00 /4. */
873FNIEMOPRM_DEF(iemOp_Grp6_verr)
874{
875 IEMOP_MNEMONIC(verr, "verr Ew");
876 IEMOP_HLP_MIN_286();
877 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
878}
879
880
881/** Opcode 0x0f 0x00 /5. */
882FNIEMOPRM_DEF(iemOp_Grp6_verw)
883{
884 IEMOP_MNEMONIC(verw, "verw Ew");
885 IEMOP_HLP_MIN_286();
886 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
887}
888
889
890/**
891 * Group 6 jump table.
892 */
893IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
894{
895 iemOp_Grp6_sldt,
896 iemOp_Grp6_str,
897 iemOp_Grp6_lldt,
898 iemOp_Grp6_ltr,
899 iemOp_Grp6_verr,
900 iemOp_Grp6_verw,
901 iemOp_InvalidWithRM,
902 iemOp_InvalidWithRM
903};
904
905/** Opcode 0x0f 0x00. */
906FNIEMOP_DEF(iemOp_Grp6)
907{
908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
909 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
910}
911
912
913/** Opcode 0x0f 0x01 /0. */
914FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
915{
916 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
917 IEMOP_HLP_MIN_286();
918 IEMOP_HLP_64BIT_OP_SIZE();
919 IEM_MC_BEGIN(2, 1);
920 IEM_MC_ARG(uint8_t, iEffSeg, 0);
921 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
924 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
925 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
926 IEM_MC_END();
927 return VINF_SUCCESS;
928}
929
930
931/** Opcode 0x0f 0x01 /0. */
932FNIEMOP_DEF(iemOp_Grp7_vmcall)
933{
934 IEMOP_BITCH_ABOUT_STUB();
935 return IEMOP_RAISE_INVALID_OPCODE();
936}
937
938
939/** Opcode 0x0f 0x01 /0. */
940FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
941{
942 IEMOP_BITCH_ABOUT_STUB();
943 return IEMOP_RAISE_INVALID_OPCODE();
944}
945
946
947/** Opcode 0x0f 0x01 /0. */
948FNIEMOP_DEF(iemOp_Grp7_vmresume)
949{
950 IEMOP_BITCH_ABOUT_STUB();
951 return IEMOP_RAISE_INVALID_OPCODE();
952}
953
954
955/** Opcode 0x0f 0x01 /0. */
956FNIEMOP_DEF(iemOp_Grp7_vmxoff)
957{
958 IEMOP_BITCH_ABOUT_STUB();
959 return IEMOP_RAISE_INVALID_OPCODE();
960}
961
962
963/** Opcode 0x0f 0x01 /1. */
964FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
965{
966 IEMOP_MNEMONIC(sidt, "sidt Ms");
967 IEMOP_HLP_MIN_286();
968 IEMOP_HLP_64BIT_OP_SIZE();
969 IEM_MC_BEGIN(2, 1);
970 IEM_MC_ARG(uint8_t, iEffSeg, 0);
971 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
974 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
975 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
976 IEM_MC_END();
977 return VINF_SUCCESS;
978}
979
980
981/** Opcode 0x0f 0x01 /1. */
982FNIEMOP_DEF(iemOp_Grp7_monitor)
983{
984 IEMOP_MNEMONIC(monitor, "monitor");
985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
986 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
987}
988
989
990/** Opcode 0x0f 0x01 /1. */
991FNIEMOP_DEF(iemOp_Grp7_mwait)
992{
993 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
995 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
996}
997
998
999/** Opcode 0x0f 0x01 /2. */
1000FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1001{
1002 IEMOP_MNEMONIC(lgdt, "lgdt");
1003 IEMOP_HLP_64BIT_OP_SIZE();
1004 IEM_MC_BEGIN(3, 1);
1005 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1006 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1007 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1010 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1011 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1012 IEM_MC_END();
1013 return VINF_SUCCESS;
1014}
1015
1016
1017/** Opcode 0x0f 0x01 0xd0. */
1018FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1019{
1020 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1021 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1022 {
1023 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1024 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1025 }
1026 return IEMOP_RAISE_INVALID_OPCODE();
1027}
1028
1029
1030/** Opcode 0x0f 0x01 0xd1. */
1031FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1032{
1033 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1034 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1035 {
1036 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1037 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1038 }
1039 return IEMOP_RAISE_INVALID_OPCODE();
1040}
1041
1042
1043/** Opcode 0x0f 0x01 /3. */
1044FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1045{
1046 IEMOP_MNEMONIC(lidt, "lidt");
1047 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1048 ? IEMMODE_64BIT
1049 : pVCpu->iem.s.enmEffOpSize;
1050 IEM_MC_BEGIN(3, 1);
1051 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1052 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1053 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1056 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1057 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1058 IEM_MC_END();
1059 return VINF_SUCCESS;
1060}
1061
1062
1063/** Opcode 0x0f 0x01 0xd8. */
1064FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1065
1066/** Opcode 0x0f 0x01 0xd9. */
1067FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
1068
1069/** Opcode 0x0f 0x01 0xda. */
1070FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1071
1072/** Opcode 0x0f 0x01 0xdb. */
1073FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1074
1075/** Opcode 0x0f 0x01 0xdc. */
1076FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1077
1078/** Opcode 0x0f 0x01 0xdd. */
1079FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1080
1081/** Opcode 0x0f 0x01 0xde. */
1082FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1083
1084/** Opcode 0x0f 0x01 0xdf. */
1085FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1086
1087/** Opcode 0x0f 0x01 /4. */
1088FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1089{
1090 IEMOP_MNEMONIC(smsw, "smsw");
1091 IEMOP_HLP_MIN_286();
1092 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1093 {
1094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1095 switch (pVCpu->iem.s.enmEffOpSize)
1096 {
1097 case IEMMODE_16BIT:
1098 IEM_MC_BEGIN(0, 1);
1099 IEM_MC_LOCAL(uint16_t, u16Tmp);
1100 IEM_MC_FETCH_CR0_U16(u16Tmp);
1101 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1102 { /* likely */ }
1103 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
1104 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1105 else
1106 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1107 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
1108 IEM_MC_ADVANCE_RIP();
1109 IEM_MC_END();
1110 return VINF_SUCCESS;
1111
1112 case IEMMODE_32BIT:
1113 IEM_MC_BEGIN(0, 1);
1114 IEM_MC_LOCAL(uint32_t, u32Tmp);
1115 IEM_MC_FETCH_CR0_U32(u32Tmp);
1116 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
1117 IEM_MC_ADVANCE_RIP();
1118 IEM_MC_END();
1119 return VINF_SUCCESS;
1120
1121 case IEMMODE_64BIT:
1122 IEM_MC_BEGIN(0, 1);
1123 IEM_MC_LOCAL(uint64_t, u64Tmp);
1124 IEM_MC_FETCH_CR0_U64(u64Tmp);
1125 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
1126 IEM_MC_ADVANCE_RIP();
1127 IEM_MC_END();
1128 return VINF_SUCCESS;
1129
1130 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1131 }
1132 }
1133 else
1134 {
1135 /* Ignore operand size here, memory refs are always 16-bit. */
1136 IEM_MC_BEGIN(0, 2);
1137 IEM_MC_LOCAL(uint16_t, u16Tmp);
1138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1141 IEM_MC_FETCH_CR0_U16(u16Tmp);
1142 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1143 { /* likely */ }
1144 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
1145 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1146 else
1147 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1148 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
1149 IEM_MC_ADVANCE_RIP();
1150 IEM_MC_END();
1151 return VINF_SUCCESS;
1152 }
1153}
1154
1155
1156/** Opcode 0x0f 0x01 /6. */
1157FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1158{
1159 /* The operand size is effectively ignored, all is 16-bit and only the
1160 lower 3-bits are used. */
1161 IEMOP_MNEMONIC(lmsw, "lmsw");
1162 IEMOP_HLP_MIN_286();
1163 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1164 {
1165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1166 IEM_MC_BEGIN(1, 0);
1167 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1168 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1169 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1170 IEM_MC_END();
1171 }
1172 else
1173 {
1174 IEM_MC_BEGIN(1, 1);
1175 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1179 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1180 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1181 IEM_MC_END();
1182 }
1183 return VINF_SUCCESS;
1184}
1185
1186
1187/** Opcode 0x0f 0x01 /7. */
1188FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1189{
1190 IEMOP_MNEMONIC(invlpg, "invlpg");
1191 IEMOP_HLP_MIN_486();
1192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1193 IEM_MC_BEGIN(1, 1);
1194 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1196 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1197 IEM_MC_END();
1198 return VINF_SUCCESS;
1199}
1200
1201
1202/** Opcode 0x0f 0x01 /7. */
1203FNIEMOP_DEF(iemOp_Grp7_swapgs)
1204{
1205 IEMOP_MNEMONIC(swapgs, "swapgs");
1206 IEMOP_HLP_ONLY_64BIT();
1207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1208 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1209}
1210
1211
1212/** Opcode 0x0f 0x01 /7. */
1213FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1214{
1215 NOREF(pVCpu);
1216 IEMOP_BITCH_ABOUT_STUB();
1217 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1218}
1219
1220
1221/** Opcode 0x0f 0x01. */
1222FNIEMOP_DEF(iemOp_Grp7)
1223{
1224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1225 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1226 {
1227 case 0:
1228 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1229 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1230 switch (bRm & X86_MODRM_RM_MASK)
1231 {
1232 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1233 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1234 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1235 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1236 }
1237 return IEMOP_RAISE_INVALID_OPCODE();
1238
1239 case 1:
1240 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1241 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1242 switch (bRm & X86_MODRM_RM_MASK)
1243 {
1244 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1245 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1246 }
1247 return IEMOP_RAISE_INVALID_OPCODE();
1248
1249 case 2:
1250 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1251 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1252 switch (bRm & X86_MODRM_RM_MASK)
1253 {
1254 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1255 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1256 }
1257 return IEMOP_RAISE_INVALID_OPCODE();
1258
1259 case 3:
1260 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1261 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1262 switch (bRm & X86_MODRM_RM_MASK)
1263 {
1264 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1265 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1266 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1267 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1268 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1269 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1270 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1271 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1273 }
1274
1275 case 4:
1276 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1277
1278 case 5:
1279 return IEMOP_RAISE_INVALID_OPCODE();
1280
1281 case 6:
1282 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1283
1284 case 7:
1285 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1286 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1287 switch (bRm & X86_MODRM_RM_MASK)
1288 {
1289 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1290 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1291 }
1292 return IEMOP_RAISE_INVALID_OPCODE();
1293
1294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1295 }
1296}
1297
1298/** Opcode 0x0f 0x00 /3. */
1299FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1300{
1301 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1303
1304 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1305 {
1306 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1307 switch (pVCpu->iem.s.enmEffOpSize)
1308 {
1309 case IEMMODE_16BIT:
1310 {
1311 IEM_MC_BEGIN(3, 0);
1312 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1313 IEM_MC_ARG(uint16_t, u16Sel, 1);
1314 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1315
1316 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1317 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1318 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1319
1320 IEM_MC_END();
1321 return VINF_SUCCESS;
1322 }
1323
1324 case IEMMODE_32BIT:
1325 case IEMMODE_64BIT:
1326 {
1327 IEM_MC_BEGIN(3, 0);
1328 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1329 IEM_MC_ARG(uint16_t, u16Sel, 1);
1330 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1331
1332 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1333 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1334 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1335
1336 IEM_MC_END();
1337 return VINF_SUCCESS;
1338 }
1339
1340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1341 }
1342 }
1343 else
1344 {
1345 switch (pVCpu->iem.s.enmEffOpSize)
1346 {
1347 case IEMMODE_16BIT:
1348 {
1349 IEM_MC_BEGIN(3, 1);
1350 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1351 IEM_MC_ARG(uint16_t, u16Sel, 1);
1352 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1354
1355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1356 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1357
1358 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1359 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1360 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1361
1362 IEM_MC_END();
1363 return VINF_SUCCESS;
1364 }
1365
1366 case IEMMODE_32BIT:
1367 case IEMMODE_64BIT:
1368 {
1369 IEM_MC_BEGIN(3, 1);
1370 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1371 IEM_MC_ARG(uint16_t, u16Sel, 1);
1372 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1374
1375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1376 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1377/** @todo testcase: make sure it's a 16-bit read. */
1378
1379 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1380 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1381 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1382
1383 IEM_MC_END();
1384 return VINF_SUCCESS;
1385 }
1386
1387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1388 }
1389 }
1390}
1391
1392
1393
1394/** Opcode 0x0f 0x02. */
1395FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1396{
1397 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1398 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1399}
1400
1401
1402/** Opcode 0x0f 0x03. */
1403FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1404{
1405 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1406 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1407}
1408
1409
1410/** Opcode 0x0f 0x05. */
1411FNIEMOP_DEF(iemOp_syscall)
1412{
1413 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1416}
1417
1418
1419/** Opcode 0x0f 0x06. */
1420FNIEMOP_DEF(iemOp_clts)
1421{
1422 IEMOP_MNEMONIC(clts, "clts");
1423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1424 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1425}
1426
1427
1428/** Opcode 0x0f 0x07. */
1429FNIEMOP_DEF(iemOp_sysret)
1430{
1431 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1433 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1434}
1435
1436
1437/** Opcode 0x0f 0x08. */
1438FNIEMOP_STUB(iemOp_invd);
1439// IEMOP_HLP_MIN_486();
1440
1441
1442/** Opcode 0x0f 0x09. */
1443FNIEMOP_DEF(iemOp_wbinvd)
1444{
1445 IEMOP_MNEMONIC(wbinvd, "wbinvd");
1446 IEMOP_HLP_MIN_486();
1447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1448 IEM_MC_BEGIN(0, 0);
1449 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1450 IEM_MC_ADVANCE_RIP();
1451 IEM_MC_END();
1452 return VINF_SUCCESS; /* ignore for now */
1453}
1454
1455
1456/** Opcode 0x0f 0x0b. */
1457FNIEMOP_DEF(iemOp_ud2)
1458{
1459 IEMOP_MNEMONIC(ud2, "ud2");
1460 return IEMOP_RAISE_INVALID_OPCODE();
1461}
1462
1463/** Opcode 0x0f 0x0d. */
1464FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1465{
1466 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1467 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1468 {
1469 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1470 return IEMOP_RAISE_INVALID_OPCODE();
1471 }
1472
1473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1474 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1475 {
1476 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1477 return IEMOP_RAISE_INVALID_OPCODE();
1478 }
1479
1480 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1481 {
1482 case 2: /* Aliased to /0 for the time being. */
1483 case 4: /* Aliased to /0 for the time being. */
1484 case 5: /* Aliased to /0 for the time being. */
1485 case 6: /* Aliased to /0 for the time being. */
1486 case 7: /* Aliased to /0 for the time being. */
1487 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1488 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1489 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1491 }
1492
1493 IEM_MC_BEGIN(0, 1);
1494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1497 /* Currently a NOP. */
1498 NOREF(GCPtrEffSrc);
1499 IEM_MC_ADVANCE_RIP();
1500 IEM_MC_END();
1501 return VINF_SUCCESS;
1502}
1503
1504
1505/** Opcode 0x0f 0x0e. */
1506FNIEMOP_STUB(iemOp_femms);
1507
1508
1509/** Opcode 0x0f 0x0f 0x0c. */
1510FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1511
1512/** Opcode 0x0f 0x0f 0x0d. */
1513FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1514
1515/** Opcode 0x0f 0x0f 0x1c. */
1516FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1517
1518/** Opcode 0x0f 0x0f 0x1d. */
1519FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1520
1521/** Opcode 0x0f 0x0f 0x8a. */
1522FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1523
1524/** Opcode 0x0f 0x0f 0x8e. */
1525FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1526
1527/** Opcode 0x0f 0x0f 0x90. */
1528FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1529
1530/** Opcode 0x0f 0x0f 0x94. */
1531FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1532
1533/** Opcode 0x0f 0x0f 0x96. */
1534FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1535
1536/** Opcode 0x0f 0x0f 0x97. */
1537FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1538
1539/** Opcode 0x0f 0x0f 0x9a. */
1540FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1541
1542/** Opcode 0x0f 0x0f 0x9e. */
1543FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1544
1545/** Opcode 0x0f 0x0f 0xa0. */
1546FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1547
1548/** Opcode 0x0f 0x0f 0xa4. */
1549FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1550
1551/** Opcode 0x0f 0x0f 0xa6. */
1552FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1553
1554/** Opcode 0x0f 0x0f 0xa7. */
1555FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1556
1557/** Opcode 0x0f 0x0f 0xaa. */
1558FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1559
1560/** Opcode 0x0f 0x0f 0xae. */
1561FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1562
1563/** Opcode 0x0f 0x0f 0xb0. */
1564FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1565
1566/** Opcode 0x0f 0x0f 0xb4. */
1567FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1568
1569/** Opcode 0x0f 0x0f 0xb6. */
1570FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1571
1572/** Opcode 0x0f 0x0f 0xb7. */
1573FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1574
1575/** Opcode 0x0f 0x0f 0xbb. */
1576FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1577
1578/** Opcode 0x0f 0x0f 0xbf. */
1579FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1580
1581
1582/** Opcode 0x0f 0x0f. */
1583FNIEMOP_DEF(iemOp_3Dnow)
1584{
1585 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1586 {
1587 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1588 return IEMOP_RAISE_INVALID_OPCODE();
1589 }
1590
1591 /* This is pretty sparse, use switch instead of table. */
1592 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1593 switch (b)
1594 {
1595 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1596 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1597 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1598 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1599 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1600 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1601 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1602 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1603 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1604 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1605 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1606 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1607 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1608 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1609 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1610 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1611 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1612 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1613 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1614 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1615 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1616 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1617 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1618 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1619 default:
1620 return IEMOP_RAISE_INVALID_OPCODE();
1621 }
1622}
1623
1624
1625/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1626FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1627/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1628FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1629/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1630FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1631/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1632FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1633
1634
1635/** Opcode 0x0f 0x11 - vmovups Wps, Vps */
1636FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1637{
1638 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1640 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1641 {
1642 /*
1643 * Register, register.
1644 */
1645 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1646 IEM_MC_BEGIN(0, 0);
1647 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1648 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1649 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1650 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1651 IEM_MC_ADVANCE_RIP();
1652 IEM_MC_END();
1653 }
1654 else
1655 {
1656 /*
1657 * Memory, register.
1658 */
1659 IEM_MC_BEGIN(0, 2);
1660 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1662
1663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1664 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1665 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1666 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1667
1668 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1669 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1670
1671 IEM_MC_ADVANCE_RIP();
1672 IEM_MC_END();
1673 }
1674 return VINF_SUCCESS;
1675}
1676
1677
1678/** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */
1679FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
1680
1681/** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */
1682FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
1683
1684/** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */
1685FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1686{
1687 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1689 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1690 {
1691 /*
1692 * Register, register.
1693 */
1694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1695 IEM_MC_BEGIN(0, 1);
1696 IEM_MC_LOCAL(uint64_t, uSrc);
1697
1698 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1699 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1700 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1701 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1702
1703 IEM_MC_ADVANCE_RIP();
1704 IEM_MC_END();
1705 }
1706 else
1707 {
1708 /*
1709 * Memory, register.
1710 */
1711 IEM_MC_BEGIN(0, 2);
1712 IEM_MC_LOCAL(uint64_t, uSrc);
1713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1714
1715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1717 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1718 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1719
1720 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1721 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1722
1723 IEM_MC_ADVANCE_RIP();
1724 IEM_MC_END();
1725 }
1726 return VINF_SUCCESS;
1727}
1728
1729
1730/** Opcode 0x0f 0x12. */
1731FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1732
1733/** Opcode 0x66 0x0f 0x12. */
1734FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1735
1736/** Opcode 0xf3 0x0f 0x12. */
1737FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1738
1739/** Opcode 0xf2 0x0f 0x12. */
1740FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1741
1742/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1743FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1744
1745/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1746FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1747{
1748 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1750 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1751 {
1752#if 0
1753 /*
1754 * Register, register.
1755 */
1756 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1757 IEM_MC_BEGIN(0, 1);
1758 IEM_MC_LOCAL(uint64_t, uSrc);
1759 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1760 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1761 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1762 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1763 IEM_MC_ADVANCE_RIP();
1764 IEM_MC_END();
1765#else
1766 return IEMOP_RAISE_INVALID_OPCODE();
1767#endif
1768 }
1769 else
1770 {
1771 /*
1772 * Memory, register.
1773 */
1774 IEM_MC_BEGIN(0, 2);
1775 IEM_MC_LOCAL(uint64_t, uSrc);
1776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1777
1778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1779 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1780 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1781 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1782
1783 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1784 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1785
1786 IEM_MC_ADVANCE_RIP();
1787 IEM_MC_END();
1788 }
1789 return VINF_SUCCESS;
1790}
1791
1792/* Opcode 0xf3 0x0f 0x13 - invalid */
1793/* Opcode 0xf2 0x0f 0x13 - invalid */
1794
1795/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1796FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1797/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1798FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1799/* Opcode 0xf3 0x0f 0x14 - invalid */
1800/* Opcode 0xf2 0x0f 0x14 - invalid */
1801/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1802FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1803/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1804FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1805/* Opcode 0xf3 0x0f 0x15 - invalid */
1806/* Opcode 0xf2 0x0f 0x15 - invalid */
1807/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1808FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1809/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1810FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1811/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1812FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1813/* Opcode 0xf2 0x0f 0x16 - invalid */
1814/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1815FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1816/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1817FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1818/* Opcode 0xf3 0x0f 0x17 - invalid */
1819/* Opcode 0xf2 0x0f 0x17 - invalid */
1820
1821
1822/** Opcode 0x0f 0x18. */
1823FNIEMOP_DEF(iemOp_prefetch_Grp16)
1824{
1825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1826 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1827 {
1828 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1829 {
1830 case 4: /* Aliased to /0 for the time being according to AMD. */
1831 case 5: /* Aliased to /0 for the time being according to AMD. */
1832 case 6: /* Aliased to /0 for the time being according to AMD. */
1833 case 7: /* Aliased to /0 for the time being according to AMD. */
1834 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1835 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1836 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1837 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1839 }
1840
1841 IEM_MC_BEGIN(0, 1);
1842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1845 /* Currently a NOP. */
1846 NOREF(GCPtrEffSrc);
1847 IEM_MC_ADVANCE_RIP();
1848 IEM_MC_END();
1849 return VINF_SUCCESS;
1850 }
1851
1852 return IEMOP_RAISE_INVALID_OPCODE();
1853}
1854
1855
1856/** Opcode 0x0f 0x19..0x1f. */
1857FNIEMOP_DEF(iemOp_nop_Ev)
1858{
1859 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1861 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1862 {
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_BEGIN(0, 0);
1865 IEM_MC_ADVANCE_RIP();
1866 IEM_MC_END();
1867 }
1868 else
1869 {
1870 IEM_MC_BEGIN(0, 1);
1871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1874 /* Currently a NOP. */
1875 NOREF(GCPtrEffSrc);
1876 IEM_MC_ADVANCE_RIP();
1877 IEM_MC_END();
1878 }
1879 return VINF_SUCCESS;
1880}
1881
1882
1883/** Opcode 0x0f 0x20. */
1884FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1885{
1886 /* mod is ignored, as is operand size overrides. */
1887 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1888 IEMOP_HLP_MIN_386();
1889 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1890 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1891 else
1892 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1893
1894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1895 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1896 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1897 {
1898 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1899 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1900 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1901 iCrReg |= 8;
1902 }
1903 switch (iCrReg)
1904 {
1905 case 0: case 2: case 3: case 4: case 8:
1906 break;
1907 default:
1908 return IEMOP_RAISE_INVALID_OPCODE();
1909 }
1910 IEMOP_HLP_DONE_DECODING();
1911
1912 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1913}
1914
1915
1916/** Opcode 0x0f 0x21. */
1917FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1918{
1919 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1920 IEMOP_HLP_MIN_386();
1921 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1923 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1924 return IEMOP_RAISE_INVALID_OPCODE();
1925 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1926 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1927 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1928}
1929
1930
1931/** Opcode 0x0f 0x22. */
1932FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1933{
1934 /* mod is ignored, as is operand size overrides. */
1935 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1936 IEMOP_HLP_MIN_386();
1937 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1938 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1939 else
1940 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1941
1942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1943 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1944 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1945 {
1946 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1947 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1948 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1949 iCrReg |= 8;
1950 }
1951 switch (iCrReg)
1952 {
1953 case 0: case 2: case 3: case 4: case 8:
1954 break;
1955 default:
1956 return IEMOP_RAISE_INVALID_OPCODE();
1957 }
1958 IEMOP_HLP_DONE_DECODING();
1959
1960 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1961}
1962
1963
1964/** Opcode 0x0f 0x23. */
1965FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1966{
1967 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1968 IEMOP_HLP_MIN_386();
1969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1971 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1972 return IEMOP_RAISE_INVALID_OPCODE();
1973 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1974 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1975 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1976}
1977
1978
1979/** Opcode 0x0f 0x24. */
1980FNIEMOP_DEF(iemOp_mov_Rd_Td)
1981{
1982 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1983 /** @todo works on 386 and 486. */
1984 /* The RM byte is not considered, see testcase. */
1985 return IEMOP_RAISE_INVALID_OPCODE();
1986}
1987
1988
1989/** Opcode 0x0f 0x26. */
1990FNIEMOP_DEF(iemOp_mov_Td_Rd)
1991{
1992 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1993 /** @todo works on 386 and 486. */
1994 /* The RM byte is not considered, see testcase. */
1995 return IEMOP_RAISE_INVALID_OPCODE();
1996}
1997
1998
1999/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
2000FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
2001{
2002 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
2003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2004 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2005 {
2006 /*
2007 * Register, register.
2008 */
2009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2010 IEM_MC_BEGIN(0, 0);
2011 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2012 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2013 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2014 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2015 IEM_MC_ADVANCE_RIP();
2016 IEM_MC_END();
2017 }
2018 else
2019 {
2020 /*
2021 * Register, memory.
2022 */
2023 IEM_MC_BEGIN(0, 2);
2024 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2026
2027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2029 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2030 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2031
2032 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2033 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2034
2035 IEM_MC_ADVANCE_RIP();
2036 IEM_MC_END();
2037 }
2038 return VINF_SUCCESS;
2039}
2040
2041/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
2042FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
2043{
2044 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
2045 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2046 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2047 {
2048 /*
2049 * Register, register.
2050 */
2051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2052 IEM_MC_BEGIN(0, 0);
2053 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2054 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2055 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2056 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2057 IEM_MC_ADVANCE_RIP();
2058 IEM_MC_END();
2059 }
2060 else
2061 {
2062 /*
2063 * Register, memory.
2064 */
2065 IEM_MC_BEGIN(0, 2);
2066 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2068
2069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2071 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2072 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2073
2074 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2075 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2076
2077 IEM_MC_ADVANCE_RIP();
2078 IEM_MC_END();
2079 }
2080 return VINF_SUCCESS;
2081}
2082
2083/* Opcode 0xf3 0x0f 0x28 - invalid */
2084/* Opcode 0xf2 0x0f 0x28 - invalid */
2085
2086/** Opcode 0x0f 0x29. */
2087FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
2088{
2089 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2090 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
2091 else
2092 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
2093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2094 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2095 {
2096 /*
2097 * Register, register.
2098 */
2099 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
2100 IEM_MC_BEGIN(0, 0);
2101 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2102 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2103 else
2104 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2105 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2106 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2107 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2108 IEM_MC_ADVANCE_RIP();
2109 IEM_MC_END();
2110 }
2111 else
2112 {
2113 /*
2114 * Memory, register.
2115 */
2116 IEM_MC_BEGIN(0, 2);
2117 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2119
2120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2121 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2122 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2123 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2124 else
2125 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2126 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2127
2128 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2129 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2130
2131 IEM_MC_ADVANCE_RIP();
2132 IEM_MC_END();
2133 }
2134 return VINF_SUCCESS;
2135}
2136
2137
2138/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2139FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2140/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2141FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2142/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2143FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
2144/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2145FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
2146
2147
2148/** Opcode 0x0f 0x2b. */
2149FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
2150{
2151 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2152 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2153 else
2154 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2156 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2157 {
2158 /*
2159 * memory, register.
2160 */
2161 IEM_MC_BEGIN(0, 2);
2162 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2164
2165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2166 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2167 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2168 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2169 else
2170 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2171 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2172
2173 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2174 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2175
2176 IEM_MC_ADVANCE_RIP();
2177 IEM_MC_END();
2178 }
2179 /* The register, register encoding is invalid. */
2180 else
2181 return IEMOP_RAISE_INVALID_OPCODE();
2182 return VINF_SUCCESS;
2183}
2184
2185
2186/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2187FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2188/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2189FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2190/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
2191FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2192/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
2193FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2194
2195/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2196FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2197/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2198FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2199/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
2200FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2201/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
2202FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2203
2204/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
2205FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
2206/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
2207FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
2208/* Opcode 0xf3 0x0f 0x2e - invalid */
2209/* Opcode 0xf2 0x0f 0x2e - invalid */
2210
2211/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
2212FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
2213/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
2214FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
2215/* Opcode 0xf3 0x0f 0x2f - invalid */
2216/* Opcode 0xf2 0x0f 0x2f - invalid */
2217
2218/** Opcode 0x0f 0x30. */
2219FNIEMOP_DEF(iemOp_wrmsr)
2220{
2221 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2223 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2224}
2225
2226
2227/** Opcode 0x0f 0x31. */
2228FNIEMOP_DEF(iemOp_rdtsc)
2229{
2230 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2232 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2233}
2234
2235
2236/** Opcode 0x0f 0x33. */
2237FNIEMOP_DEF(iemOp_rdmsr)
2238{
2239 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2241 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2242}
2243
2244
2245/** Opcode 0x0f 0x34. */
2246FNIEMOP_STUB(iemOp_rdpmc);
2247/** Opcode 0x0f 0x34. */
2248FNIEMOP_STUB(iemOp_sysenter);
2249/** Opcode 0x0f 0x35. */
2250FNIEMOP_STUB(iemOp_sysexit);
2251/** Opcode 0x0f 0x37. */
2252FNIEMOP_STUB(iemOp_getsec);
2253/** Opcode 0x0f 0x38. */
2254FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2255/** Opcode 0x0f 0x3a. */
2256FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2257
2258
2259/**
2260 * Implements a conditional move.
2261 *
2262 * Wish there was an obvious way to do this where we could share and reduce
2263 * code bloat.
2264 *
2265 * @param a_Cnd The conditional "microcode" operation.
2266 */
2267#define CMOV_X(a_Cnd) \
2268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2269 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2270 { \
2271 switch (pVCpu->iem.s.enmEffOpSize) \
2272 { \
2273 case IEMMODE_16BIT: \
2274 IEM_MC_BEGIN(0, 1); \
2275 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2276 a_Cnd { \
2277 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2278 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2279 } IEM_MC_ENDIF(); \
2280 IEM_MC_ADVANCE_RIP(); \
2281 IEM_MC_END(); \
2282 return VINF_SUCCESS; \
2283 \
2284 case IEMMODE_32BIT: \
2285 IEM_MC_BEGIN(0, 1); \
2286 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2287 a_Cnd { \
2288 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2289 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2290 } IEM_MC_ELSE() { \
2291 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2292 } IEM_MC_ENDIF(); \
2293 IEM_MC_ADVANCE_RIP(); \
2294 IEM_MC_END(); \
2295 return VINF_SUCCESS; \
2296 \
2297 case IEMMODE_64BIT: \
2298 IEM_MC_BEGIN(0, 1); \
2299 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2300 a_Cnd { \
2301 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2302 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2303 } IEM_MC_ENDIF(); \
2304 IEM_MC_ADVANCE_RIP(); \
2305 IEM_MC_END(); \
2306 return VINF_SUCCESS; \
2307 \
2308 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2309 } \
2310 } \
2311 else \
2312 { \
2313 switch (pVCpu->iem.s.enmEffOpSize) \
2314 { \
2315 case IEMMODE_16BIT: \
2316 IEM_MC_BEGIN(0, 2); \
2317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2318 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2320 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2321 a_Cnd { \
2322 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2323 } IEM_MC_ENDIF(); \
2324 IEM_MC_ADVANCE_RIP(); \
2325 IEM_MC_END(); \
2326 return VINF_SUCCESS; \
2327 \
2328 case IEMMODE_32BIT: \
2329 IEM_MC_BEGIN(0, 2); \
2330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2331 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2333 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2334 a_Cnd { \
2335 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2336 } IEM_MC_ELSE() { \
2337 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2338 } IEM_MC_ENDIF(); \
2339 IEM_MC_ADVANCE_RIP(); \
2340 IEM_MC_END(); \
2341 return VINF_SUCCESS; \
2342 \
2343 case IEMMODE_64BIT: \
2344 IEM_MC_BEGIN(0, 2); \
2345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2346 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2348 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2349 a_Cnd { \
2350 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2351 } IEM_MC_ENDIF(); \
2352 IEM_MC_ADVANCE_RIP(); \
2353 IEM_MC_END(); \
2354 return VINF_SUCCESS; \
2355 \
2356 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2357 } \
2358 } do {} while (0)
2359
2360
2361
2362/** Opcode 0x0f 0x40. */
2363FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2364{
2365 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2366 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2367}
2368
2369
2370/** Opcode 0x0f 0x41. */
2371FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2372{
2373 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2374 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2375}
2376
2377
2378/** Opcode 0x0f 0x42. */
2379FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2380{
2381 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2382 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2383}
2384
2385
2386/** Opcode 0x0f 0x43. */
2387FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2388{
2389 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2390 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2391}
2392
2393
2394/** Opcode 0x0f 0x44. */
2395FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2396{
2397 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2398 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2399}
2400
2401
2402/** Opcode 0x0f 0x45. */
2403FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2404{
2405 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2406 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2407}
2408
2409
2410/** Opcode 0x0f 0x46. */
2411FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2412{
2413 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2414 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2415}
2416
2417
2418/** Opcode 0x0f 0x47. */
2419FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2420{
2421 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2422 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2423}
2424
2425
2426/** Opcode 0x0f 0x48. */
2427FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2428{
2429 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2430 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2431}
2432
2433
2434/** Opcode 0x0f 0x49. */
2435FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2436{
2437 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2438 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2439}
2440
2441
2442/** Opcode 0x0f 0x4a. */
2443FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2444{
2445 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2446 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2447}
2448
2449
2450/** Opcode 0x0f 0x4b. */
2451FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2452{
2453 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2454 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2455}
2456
2457
2458/** Opcode 0x0f 0x4c. */
2459FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2460{
2461 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2462 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2463}
2464
2465
2466/** Opcode 0x0f 0x4d. */
2467FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2468{
2469 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2470 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2471}
2472
2473
2474/** Opcode 0x0f 0x4e. */
2475FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2476{
2477 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2478 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2479}
2480
2481
2482/** Opcode 0x0f 0x4f. */
2483FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2484{
2485 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2486 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2487}
2488
2489#undef CMOV_X
2490
2491/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
2492FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
2493/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
2494FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
2495/* Opcode 0xf3 0x0f 0x50 - invalid */
2496/* Opcode 0xf2 0x0f 0x50 - invalid */
2497
2498/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
2499FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2500/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
2501FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2502/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
2503FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2504/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2505FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2506
2507/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
2508FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2509/* Opcode 0x66 0x0f 0x52 - invalid */
2510/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
2511FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2512/* Opcode 0xf2 0x0f 0x52 - invalid */
2513
2514/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
2515FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2516/* Opcode 0x66 0x0f 0x53 - invalid */
2517/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
2518FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2519/* Opcode 0xf2 0x0f 0x53 - invalid */
2520
2521/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
2522FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
2523/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
2524FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
2525/* Opcode 0xf3 0x0f 0x54 - invalid */
2526/* Opcode 0xf2 0x0f 0x54 - invalid */
2527
2528/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
2529FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
2530/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
2531FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
2532/* Opcode 0xf3 0x0f 0x55 - invalid */
2533/* Opcode 0xf2 0x0f 0x55 - invalid */
2534
2535/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
2536FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
2537/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
2538FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
2539/* Opcode 0xf3 0x0f 0x56 - invalid */
2540/* Opcode 0xf2 0x0f 0x56 - invalid */
2541
2542/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
2543FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
2544/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
2545FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
2546/* Opcode 0xf3 0x0f 0x57 - invalid */
2547/* Opcode 0xf2 0x0f 0x57 - invalid */
2548
2549/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2550FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2551/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2552FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2553/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2554FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2555/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2556FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2557
2558/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2559FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2560/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2561FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2562/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2563FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2564/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2565FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2566
2567/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2568FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2569/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2570FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2571/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2572FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2573/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2574FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2575
2576/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2577FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2578/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2579FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2580/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2581FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2582/* Opcode 0xf2 0x0f 0x5b - invalid */
2583
2584/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2585FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2586/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2587FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2588/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2589FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2590/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2591FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2592
2593/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2594FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2595/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2596FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2597/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2598FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2599/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2600FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2601
2602/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2603FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2604/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2605FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2606/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2607FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2608/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2609FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2610
2611/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2612FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2613/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2614FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2615/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2616FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2617/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2618FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2619
2620/**
2621 * Common worker for MMX instructions on the forms:
2622 * pxxxx mm1, mm2/mem32
2623 *
2624 * The 2nd operand is the first half of a register, which in the memory case
2625 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2626 * memory accessed for MMX.
2627 *
2628 * Exceptions type 4.
2629 */
2630FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2631{
2632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2633 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2634 {
2635 /*
2636 * Register, register.
2637 */
2638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2639 IEM_MC_BEGIN(2, 0);
2640 IEM_MC_ARG(uint128_t *, pDst, 0);
2641 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2642 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2643 IEM_MC_PREPARE_SSE_USAGE();
2644 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2645 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2646 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2647 IEM_MC_ADVANCE_RIP();
2648 IEM_MC_END();
2649 }
2650 else
2651 {
2652 /*
2653 * Register, memory.
2654 */
2655 IEM_MC_BEGIN(2, 2);
2656 IEM_MC_ARG(uint128_t *, pDst, 0);
2657 IEM_MC_LOCAL(uint64_t, uSrc);
2658 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2660
2661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2663 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2664 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2665
2666 IEM_MC_PREPARE_SSE_USAGE();
2667 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2668 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2669
2670 IEM_MC_ADVANCE_RIP();
2671 IEM_MC_END();
2672 }
2673 return VINF_SUCCESS;
2674}
2675
2676
2677/**
2678 * Common worker for SSE2 instructions on the forms:
2679 * pxxxx xmm1, xmm2/mem128
2680 *
2681 * The 2nd operand is the first half of a register, which in the memory case
2682 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2683 * memory accessed for MMX.
2684 *
2685 * Exceptions type 4.
2686 */
2687FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2688{
2689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2690 if (!pImpl->pfnU64)
2691 return IEMOP_RAISE_INVALID_OPCODE();
2692 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2693 {
2694 /*
2695 * Register, register.
2696 */
2697 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2698 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2700 IEM_MC_BEGIN(2, 0);
2701 IEM_MC_ARG(uint64_t *, pDst, 0);
2702 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2703 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2704 IEM_MC_PREPARE_FPU_USAGE();
2705 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2706 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2707 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2708 IEM_MC_ADVANCE_RIP();
2709 IEM_MC_END();
2710 }
2711 else
2712 {
2713 /*
2714 * Register, memory.
2715 */
2716 IEM_MC_BEGIN(2, 2);
2717 IEM_MC_ARG(uint64_t *, pDst, 0);
2718 IEM_MC_LOCAL(uint32_t, uSrc);
2719 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2721
2722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2724 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2725 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2726
2727 IEM_MC_PREPARE_FPU_USAGE();
2728 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2729 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2730
2731 IEM_MC_ADVANCE_RIP();
2732 IEM_MC_END();
2733 }
2734 return VINF_SUCCESS;
2735}
2736
2737
2738/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2739FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2740{
2741 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2742 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2743}
2744
2745/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2746FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2747{
2748 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2749 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2750}
2751
2752/* Opcode 0xf3 0x0f 0x60 - invalid */
2753
2754
2755/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2756FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2757{
2758 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2759 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2760}
2761
2762/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2763FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2764{
2765 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2766 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2767}
2768
2769/* Opcode 0xf3 0x0f 0x61 - invalid */
2770
2771
2772/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2773FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2774{
2775 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2776 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2777}
2778
2779/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2780FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2781{
2782 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2783 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2784}
2785
2786/* Opcode 0xf3 0x0f 0x62 - invalid */
2787
2788
2789
2790/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2791FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2792/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2793FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2794/* Opcode 0xf3 0x0f 0x63 - invalid */
2795
2796/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2797FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2798/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2799FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2800/* Opcode 0xf3 0x0f 0x64 - invalid */
2801
2802/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2803FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2804/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2805FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2806/* Opcode 0xf3 0x0f 0x65 - invalid */
2807
2808/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2809FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2810/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2811FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2812/* Opcode 0xf3 0x0f 0x66 - invalid */
2813
2814/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2815FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2816/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2817FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2818/* Opcode 0xf3 0x0f 0x67 - invalid */
2819
2820
2821/**
2822 * Common worker for MMX instructions on the form:
2823 * pxxxx mm1, mm2/mem64
2824 *
2825 * The 2nd operand is the second half of a register, which in the memory case
2826 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2827 * where it may read the full 128 bits or only the upper 64 bits.
2828 *
2829 * Exceptions type 4.
2830 */
2831FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2832{
2833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2834 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2835 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2836 {
2837 /*
2838 * Register, register.
2839 */
2840 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2841 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2843 IEM_MC_BEGIN(2, 0);
2844 IEM_MC_ARG(uint64_t *, pDst, 0);
2845 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2846 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2847 IEM_MC_PREPARE_FPU_USAGE();
2848 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2849 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2850 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2851 IEM_MC_ADVANCE_RIP();
2852 IEM_MC_END();
2853 }
2854 else
2855 {
2856 /*
2857 * Register, memory.
2858 */
2859 IEM_MC_BEGIN(2, 2);
2860 IEM_MC_ARG(uint64_t *, pDst, 0);
2861 IEM_MC_LOCAL(uint64_t, uSrc);
2862 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2864
2865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2867 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2868 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2869
2870 IEM_MC_PREPARE_FPU_USAGE();
2871 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2872 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2873
2874 IEM_MC_ADVANCE_RIP();
2875 IEM_MC_END();
2876 }
2877 return VINF_SUCCESS;
2878}
2879
2880
2881/**
2882 * Common worker for SSE2 instructions on the form:
2883 * pxxxx xmm1, xmm2/mem128
2884 *
2885 * The 2nd operand is the second half of a register, which in the memory case
2886 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2887 * where it may read the full 128 bits or only the upper 64 bits.
2888 *
2889 * Exceptions type 4.
2890 */
2891FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2892{
2893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2894 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2895 {
2896 /*
2897 * Register, register.
2898 */
2899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2900 IEM_MC_BEGIN(2, 0);
2901 IEM_MC_ARG(uint128_t *, pDst, 0);
2902 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2903 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2904 IEM_MC_PREPARE_SSE_USAGE();
2905 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2906 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2907 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2908 IEM_MC_ADVANCE_RIP();
2909 IEM_MC_END();
2910 }
2911 else
2912 {
2913 /*
2914 * Register, memory.
2915 */
2916 IEM_MC_BEGIN(2, 2);
2917 IEM_MC_ARG(uint128_t *, pDst, 0);
2918 IEM_MC_LOCAL(uint128_t, uSrc);
2919 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2921
2922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2924 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2925 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2926
2927 IEM_MC_PREPARE_SSE_USAGE();
2928 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2929 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2930
2931 IEM_MC_ADVANCE_RIP();
2932 IEM_MC_END();
2933 }
2934 return VINF_SUCCESS;
2935}
2936
2937
2938/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2939FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2940{
2941 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2942 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2943}
2944
2945/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2946FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2947{
2948 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2949 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2950}
2951/* Opcode 0xf3 0x0f 0x68 - invalid */
2952
2953
2954/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2955FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2956{
2957 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2958 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2959}
2960
2961/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2962FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2963{
2964 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2965 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2966
2967}
2968/* Opcode 0xf3 0x0f 0x69 - invalid */
2969
2970
2971/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2972FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2973{
2974 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2975 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2976}
2977
2978/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2979FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2980{
2981 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2982 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2983}
2984/* Opcode 0xf3 0x0f 0x6a - invalid */
2985
2986
2987/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2988FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2989/** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */
2990FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2991/* Opcode 0xf3 0x0f 0x6b - invalid */
2992
2993
2994/* Opcode 0x0f 0x6c - invalid */
2995
2996/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2997FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2998{
2999 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
3000 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3001}
3002
3003/* Opcode 0xf3 0x0f 0x6c - invalid */
3004/* Opcode 0xf2 0x0f 0x6c - invalid */
3005
3006
3007/* Opcode 0x0f 0x6d - invalid */
3008
3009/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
3010FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3011{
3012 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
3013 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3014}
3015
3016/* Opcode 0xf3 0x0f 0x6d - invalid */
3017
3018
3019/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
3020FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3021{
3022 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3023 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3024 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
3025 else
3026 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
3027 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3028 {
3029 /* MMX, greg */
3030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3031 IEM_MC_BEGIN(0, 1);
3032 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3033 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3034 IEM_MC_LOCAL(uint64_t, u64Tmp);
3035 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3036 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3037 else
3038 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3039 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3040 IEM_MC_ADVANCE_RIP();
3041 IEM_MC_END();
3042 }
3043 else
3044 {
3045 /* MMX, [mem] */
3046 IEM_MC_BEGIN(0, 2);
3047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3048 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3051 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3052 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3053 {
3054 IEM_MC_LOCAL(uint64_t, u64Tmp);
3055 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3056 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3057 }
3058 else
3059 {
3060 IEM_MC_LOCAL(uint32_t, u32Tmp);
3061 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3062 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3063 }
3064 IEM_MC_ADVANCE_RIP();
3065 IEM_MC_END();
3066 }
3067 return VINF_SUCCESS;
3068}
3069
3070/** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */
3071FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3072{
3073 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3074 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3075 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
3076 else
3077 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
3078 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3079 {
3080 /* XMM, greg*/
3081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3082 IEM_MC_BEGIN(0, 1);
3083 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3084 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3085 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3086 {
3087 IEM_MC_LOCAL(uint64_t, u64Tmp);
3088 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3089 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3090 }
3091 else
3092 {
3093 IEM_MC_LOCAL(uint32_t, u32Tmp);
3094 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3095 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3096 }
3097 IEM_MC_ADVANCE_RIP();
3098 IEM_MC_END();
3099 }
3100 else
3101 {
3102 /* XMM, [mem] */
3103 IEM_MC_BEGIN(0, 2);
3104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3105 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3108 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3109 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3110 {
3111 IEM_MC_LOCAL(uint64_t, u64Tmp);
3112 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3113 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3114 }
3115 else
3116 {
3117 IEM_MC_LOCAL(uint32_t, u32Tmp);
3118 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3119 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3120 }
3121 IEM_MC_ADVANCE_RIP();
3122 IEM_MC_END();
3123 }
3124 return VINF_SUCCESS;
3125}
3126
3127/* Opcode 0xf3 0x0f 0x6e - invalid */
3128
3129
3130/** Opcode 0x0f 0x6f - movq Pq, Qq */
3131FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3132{
3133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3134 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3135 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3136 {
3137 /*
3138 * Register, register.
3139 */
3140 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3141 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3143 IEM_MC_BEGIN(0, 1);
3144 IEM_MC_LOCAL(uint64_t, u64Tmp);
3145 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3146 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3147 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3148 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3149 IEM_MC_ADVANCE_RIP();
3150 IEM_MC_END();
3151 }
3152 else
3153 {
3154 /*
3155 * Register, memory.
3156 */
3157 IEM_MC_BEGIN(0, 2);
3158 IEM_MC_LOCAL(uint64_t, u64Tmp);
3159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3160
3161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3163 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3164 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3165 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3166 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3167
3168 IEM_MC_ADVANCE_RIP();
3169 IEM_MC_END();
3170 }
3171 return VINF_SUCCESS;
3172}
3173
3174/** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */
3175FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3176{
3177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3178 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3179 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3180 {
3181 /*
3182 * Register, register.
3183 */
3184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3185 IEM_MC_BEGIN(0, 0);
3186 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3187 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3188 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3189 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3190 IEM_MC_ADVANCE_RIP();
3191 IEM_MC_END();
3192 }
3193 else
3194 {
3195 /*
3196 * Register, memory.
3197 */
3198 IEM_MC_BEGIN(0, 2);
3199 IEM_MC_LOCAL(uint128_t, u128Tmp);
3200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3201
3202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3204 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3205 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3206 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3207 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3208
3209 IEM_MC_ADVANCE_RIP();
3210 IEM_MC_END();
3211 }
3212 return VINF_SUCCESS;
3213}
3214
3215/** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */
3216FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3217{
3218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3219 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3220 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3221 {
3222 /*
3223 * Register, register.
3224 */
3225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3226 IEM_MC_BEGIN(0, 0);
3227 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3228 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3229 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3230 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3231 IEM_MC_ADVANCE_RIP();
3232 IEM_MC_END();
3233 }
3234 else
3235 {
3236 /*
3237 * Register, memory.
3238 */
3239 IEM_MC_BEGIN(0, 2);
3240 IEM_MC_LOCAL(uint128_t, u128Tmp);
3241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3242
3243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3245 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3246 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3247 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3248 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3249
3250 IEM_MC_ADVANCE_RIP();
3251 IEM_MC_END();
3252 }
3253 return VINF_SUCCESS;
3254}
3255
3256
3257/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3258FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3259{
3260 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3262 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3263 {
3264 /*
3265 * Register, register.
3266 */
3267 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3269
3270 IEM_MC_BEGIN(3, 0);
3271 IEM_MC_ARG(uint64_t *, pDst, 0);
3272 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3273 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3274 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3275 IEM_MC_PREPARE_FPU_USAGE();
3276 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3277 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3278 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3279 IEM_MC_ADVANCE_RIP();
3280 IEM_MC_END();
3281 }
3282 else
3283 {
3284 /*
3285 * Register, memory.
3286 */
3287 IEM_MC_BEGIN(3, 2);
3288 IEM_MC_ARG(uint64_t *, pDst, 0);
3289 IEM_MC_LOCAL(uint64_t, uSrc);
3290 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3292
3293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3294 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3295 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3297 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3298
3299 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3300 IEM_MC_PREPARE_FPU_USAGE();
3301 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3302 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3303
3304 IEM_MC_ADVANCE_RIP();
3305 IEM_MC_END();
3306 }
3307 return VINF_SUCCESS;
3308}
3309
3310/** Opcode 0x66 0x0f 0x70 - vpshufd Vx, Wx, Ib */
3311FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3312{
3313 IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
3314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3316 {
3317 /*
3318 * Register, register.
3319 */
3320 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3322
3323 IEM_MC_BEGIN(3, 0);
3324 IEM_MC_ARG(uint128_t *, pDst, 0);
3325 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3326 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3327 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3328 IEM_MC_PREPARE_SSE_USAGE();
3329 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3330 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3331 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3332 IEM_MC_ADVANCE_RIP();
3333 IEM_MC_END();
3334 }
3335 else
3336 {
3337 /*
3338 * Register, memory.
3339 */
3340 IEM_MC_BEGIN(3, 2);
3341 IEM_MC_ARG(uint128_t *, pDst, 0);
3342 IEM_MC_LOCAL(uint128_t, uSrc);
3343 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3345
3346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3347 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3348 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3350 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3351
3352 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3353 IEM_MC_PREPARE_SSE_USAGE();
3354 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3355 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3356
3357 IEM_MC_ADVANCE_RIP();
3358 IEM_MC_END();
3359 }
3360 return VINF_SUCCESS;
3361}
3362
3363/** Opcode 0xf3 0x0f 0x70 - vpshufhw Vx, Wx, Ib */
3364FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3365{
3366 IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
3367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3368 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3369 {
3370 /*
3371 * Register, register.
3372 */
3373 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3375
3376 IEM_MC_BEGIN(3, 0);
3377 IEM_MC_ARG(uint128_t *, pDst, 0);
3378 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3379 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3380 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3381 IEM_MC_PREPARE_SSE_USAGE();
3382 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3383 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3384 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3385 IEM_MC_ADVANCE_RIP();
3386 IEM_MC_END();
3387 }
3388 else
3389 {
3390 /*
3391 * Register, memory.
3392 */
3393 IEM_MC_BEGIN(3, 2);
3394 IEM_MC_ARG(uint128_t *, pDst, 0);
3395 IEM_MC_LOCAL(uint128_t, uSrc);
3396 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3398
3399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3400 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3401 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3403 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3404
3405 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3406 IEM_MC_PREPARE_SSE_USAGE();
3407 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3408 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3409
3410 IEM_MC_ADVANCE_RIP();
3411 IEM_MC_END();
3412 }
3413 return VINF_SUCCESS;
3414}
3415
3416/** Opcode 0xf2 0x0f 0x70 - vpshuflw Vx, Wx, Ib */
3417FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3418{
3419 IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
3420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3421 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3422 {
3423 /*
3424 * Register, register.
3425 */
3426 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3428
3429 IEM_MC_BEGIN(3, 0);
3430 IEM_MC_ARG(uint128_t *, pDst, 0);
3431 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3432 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3433 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3434 IEM_MC_PREPARE_SSE_USAGE();
3435 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3436 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3437 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3438 IEM_MC_ADVANCE_RIP();
3439 IEM_MC_END();
3440 }
3441 else
3442 {
3443 /*
3444 * Register, memory.
3445 */
3446 IEM_MC_BEGIN(3, 2);
3447 IEM_MC_ARG(uint128_t *, pDst, 0);
3448 IEM_MC_LOCAL(uint128_t, uSrc);
3449 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3451
3452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3453 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3454 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3456 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3457
3458 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3459 IEM_MC_PREPARE_SSE_USAGE();
3460 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3461 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3462
3463 IEM_MC_ADVANCE_RIP();
3464 IEM_MC_END();
3465 }
3466 return VINF_SUCCESS;
3467}
3468
3469
3470/** Opcode 0x0f 0x71 11/2. */
3471FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3472
3473/** Opcode 0x66 0x0f 0x71 11/2. */
3474FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
3475
3476/** Opcode 0x0f 0x71 11/4. */
3477FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3478
3479/** Opcode 0x66 0x0f 0x71 11/4. */
3480FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
3481
3482/** Opcode 0x0f 0x71 11/6. */
3483FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3484
3485/** Opcode 0x66 0x0f 0x71 11/6. */
3486FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
3487
3488
3489/** Opcode 0x0f 0x71. */
3490FNIEMOP_DEF(iemOp_Grp12)
3491{
3492 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3493 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3494 return IEMOP_RAISE_INVALID_OPCODE();
3495 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3496 {
3497 case 0: case 1: case 3: case 5: case 7:
3498 return IEMOP_RAISE_INVALID_OPCODE();
3499 case 2:
3500 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3501 {
3502 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
3503 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
3504 default: return IEMOP_RAISE_INVALID_OPCODE();
3505 }
3506 case 4:
3507 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3508 {
3509 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
3510 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
3511 default: return IEMOP_RAISE_INVALID_OPCODE();
3512 }
3513 case 6:
3514 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3515 {
3516 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
3517 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
3518 default: return IEMOP_RAISE_INVALID_OPCODE();
3519 }
3520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3521 }
3522}
3523
3524
3525/** Opcode 0x0f 0x72 11/2. */
3526FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3527
3528/** Opcode 0x66 0x0f 0x72 11/2. */
3529FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
3530
3531/** Opcode 0x0f 0x72 11/4. */
3532FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3533
3534/** Opcode 0x66 0x0f 0x72 11/4. */
3535FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
3536
3537/** Opcode 0x0f 0x72 11/6. */
3538FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3539
3540/** Opcode 0x66 0x0f 0x72 11/6. */
3541FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
3542
3543
3544/** Opcode 0x0f 0x72. */
3545FNIEMOP_DEF(iemOp_Grp13)
3546{
3547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3548 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3549 return IEMOP_RAISE_INVALID_OPCODE();
3550 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3551 {
3552 case 0: case 1: case 3: case 5: case 7:
3553 return IEMOP_RAISE_INVALID_OPCODE();
3554 case 2:
3555 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3556 {
3557 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
3558 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
3559 default: return IEMOP_RAISE_INVALID_OPCODE();
3560 }
3561 case 4:
3562 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3563 {
3564 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
3565 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3566 default: return IEMOP_RAISE_INVALID_OPCODE();
3567 }
3568 case 6:
3569 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3570 {
3571 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3572 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3573 default: return IEMOP_RAISE_INVALID_OPCODE();
3574 }
3575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3576 }
3577}
3578
3579
3580/** Opcode 0x0f 0x73 11/2. */
3581FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3582
3583/** Opcode 0x66 0x0f 0x73 11/2. */
3584FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3585
3586/** Opcode 0x66 0x0f 0x73 11/3. */
3587FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3588
3589/** Opcode 0x0f 0x73 11/6. */
3590FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3591
3592/** Opcode 0x66 0x0f 0x73 11/6. */
3593FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3594
3595/** Opcode 0x66 0x0f 0x73 11/7. */
3596FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3597
3598
3599/** Opcode 0x0f 0x73. */
3600FNIEMOP_DEF(iemOp_Grp14)
3601{
3602 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3603 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3604 return IEMOP_RAISE_INVALID_OPCODE();
3605 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3606 {
3607 case 0: case 1: case 4: case 5:
3608 return IEMOP_RAISE_INVALID_OPCODE();
3609 case 2:
3610 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3611 {
3612 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3613 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3614 default: return IEMOP_RAISE_INVALID_OPCODE();
3615 }
3616 case 3:
3617 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3618 {
3619 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3620 default: return IEMOP_RAISE_INVALID_OPCODE();
3621 }
3622 case 6:
3623 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3624 {
3625 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3626 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3627 default: return IEMOP_RAISE_INVALID_OPCODE();
3628 }
3629 case 7:
3630 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3631 {
3632 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3633 default: return IEMOP_RAISE_INVALID_OPCODE();
3634 }
3635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3636 }
3637}
3638
3639
3640/**
3641 * Common worker for SSE2 and MMX instructions on the forms:
3642 * pxxx mm1, mm2/mem64
3643 * pxxx xmm1, xmm2/mem128
3644 *
3645 * Proper alignment of the 128-bit operand is enforced.
3646 * Exceptions type 4. SSE2 and MMX cpuid checks.
3647 */
3648FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3649{
3650 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3651 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3652 {
3653 case IEM_OP_PRF_SIZE_OP: /* SSE */
3654 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3655 {
3656 /*
3657 * Register, register.
3658 */
3659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3660 IEM_MC_BEGIN(2, 0);
3661 IEM_MC_ARG(uint128_t *, pDst, 0);
3662 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3663 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3664 IEM_MC_PREPARE_SSE_USAGE();
3665 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3666 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3667 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3668 IEM_MC_ADVANCE_RIP();
3669 IEM_MC_END();
3670 }
3671 else
3672 {
3673 /*
3674 * Register, memory.
3675 */
3676 IEM_MC_BEGIN(2, 2);
3677 IEM_MC_ARG(uint128_t *, pDst, 0);
3678 IEM_MC_LOCAL(uint128_t, uSrc);
3679 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3681
3682 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3684 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3685 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3686
3687 IEM_MC_PREPARE_SSE_USAGE();
3688 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3689 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3690
3691 IEM_MC_ADVANCE_RIP();
3692 IEM_MC_END();
3693 }
3694 return VINF_SUCCESS;
3695
3696 case 0: /* MMX */
3697 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3698 {
3699 /*
3700 * Register, register.
3701 */
3702 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3703 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3705 IEM_MC_BEGIN(2, 0);
3706 IEM_MC_ARG(uint64_t *, pDst, 0);
3707 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3708 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3709 IEM_MC_PREPARE_FPU_USAGE();
3710 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3711 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3712 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3713 IEM_MC_ADVANCE_RIP();
3714 IEM_MC_END();
3715 }
3716 else
3717 {
3718 /*
3719 * Register, memory.
3720 */
3721 IEM_MC_BEGIN(2, 2);
3722 IEM_MC_ARG(uint64_t *, pDst, 0);
3723 IEM_MC_LOCAL(uint64_t, uSrc);
3724 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3726
3727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3729 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3730 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3731
3732 IEM_MC_PREPARE_FPU_USAGE();
3733 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3734 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3735
3736 IEM_MC_ADVANCE_RIP();
3737 IEM_MC_END();
3738 }
3739 return VINF_SUCCESS;
3740
3741 default:
3742 return IEMOP_RAISE_INVALID_OPCODE();
3743 }
3744}
3745
3746
3747/** Opcode 0x0f 0x74. */
3748FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3749{
3750 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3751 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3752}
3753
3754
3755/** Opcode 0x0f 0x75. */
3756FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3757{
3758 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3759 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3760}
3761
3762
3763/** Opcode 0x0f 0x76. */
3764FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3765{
3766 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3767 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3768}
3769
3770
3771/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3772FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3773/* Opcode 0x66 0x0f 0x77 - invalid */
3774/* Opcode 0xf3 0x0f 0x77 - invalid */
3775/* Opcode 0xf2 0x0f 0x77 - invalid */
3776
3777/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3778FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3779/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3780FNIEMOP_STUB(iemOp_AmdGrp17);
3781/* Opcode 0xf3 0x0f 0x78 - invalid */
3782/* Opcode 0xf2 0x0f 0x78 - invalid */
3783
3784/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3785FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3786/* Opcode 0x66 0x0f 0x79 - invalid */
3787/* Opcode 0xf3 0x0f 0x79 - invalid */
3788/* Opcode 0xf2 0x0f 0x79 - invalid */
3789
3790/* Opcode 0x0f 0x7a - invalid */
3791/* Opcode 0x66 0x0f 0x7a - invalid */
3792/* Opcode 0xf3 0x0f 0x7a - invalid */
3793/* Opcode 0xf2 0x0f 0x7a - invalid */
3794
3795/* Opcode 0x0f 0x7b - invalid */
3796/* Opcode 0x66 0x0f 0x7b - invalid */
3797/* Opcode 0xf3 0x0f 0x7b - invalid */
3798/* Opcode 0xf2 0x0f 0x7b - invalid */
3799
3800/* Opcode 0x0f 0x7c - invalid */
3801/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3802FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3803/* Opcode 0xf3 0x0f 0x7c - invalid */
3804/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3805FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3806
3807/* Opcode 0x0f 0x7d - invalid */
3808/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3809FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3810/* Opcode 0xf3 0x0f 0x7d - invalid */
3811/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3812FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3813
3814
3815/** Opcode 0x0f 0x7e. */
3816FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3817{
3818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3819 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3820 {
3821 case IEM_OP_PRF_SIZE_OP: /* SSE */
3822 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3823 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3824 else
3825 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3826 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3827 {
3828 /* greg, XMM */
3829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3830 IEM_MC_BEGIN(0, 1);
3831 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3832 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3833 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3834 {
3835 IEM_MC_LOCAL(uint64_t, u64Tmp);
3836 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3837 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3838 }
3839 else
3840 {
3841 IEM_MC_LOCAL(uint32_t, u32Tmp);
3842 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3843 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3844 }
3845 IEM_MC_ADVANCE_RIP();
3846 IEM_MC_END();
3847 }
3848 else
3849 {
3850 /* [mem], XMM */
3851 IEM_MC_BEGIN(0, 2);
3852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3853 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3856 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3857 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3858 {
3859 IEM_MC_LOCAL(uint64_t, u64Tmp);
3860 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3861 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3862 }
3863 else
3864 {
3865 IEM_MC_LOCAL(uint32_t, u32Tmp);
3866 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3867 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3868 }
3869 IEM_MC_ADVANCE_RIP();
3870 IEM_MC_END();
3871 }
3872 return VINF_SUCCESS;
3873
3874 case 0: /* MMX */
3875 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3876 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3877 else
3878 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3879 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3880 {
3881 /* greg, MMX */
3882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3883 IEM_MC_BEGIN(0, 1);
3884 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3885 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3886 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3887 {
3888 IEM_MC_LOCAL(uint64_t, u64Tmp);
3889 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3890 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3891 }
3892 else
3893 {
3894 IEM_MC_LOCAL(uint32_t, u32Tmp);
3895 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3896 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3897 }
3898 IEM_MC_ADVANCE_RIP();
3899 IEM_MC_END();
3900 }
3901 else
3902 {
3903 /* [mem], MMX */
3904 IEM_MC_BEGIN(0, 2);
3905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3906 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3909 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3910 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3911 {
3912 IEM_MC_LOCAL(uint64_t, u64Tmp);
3913 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3914 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3915 }
3916 else
3917 {
3918 IEM_MC_LOCAL(uint32_t, u32Tmp);
3919 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3920 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3921 }
3922 IEM_MC_ADVANCE_RIP();
3923 IEM_MC_END();
3924 }
3925 return VINF_SUCCESS;
3926
3927 default:
3928 return IEMOP_RAISE_INVALID_OPCODE();
3929 }
3930}
3931
3932
3933/** Opcode 0x0f 0x7f. */
3934FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3935{
3936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3937 bool fAligned = false;
3938 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3939 {
3940 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3941 fAligned = true;
3942 /* fall thru */
3943 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3944 if (fAligned)
3945 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wdq,Vdq");
3946 else
3947 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wdq,Vdq");
3948 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3949 {
3950 /*
3951 * Register, register.
3952 */
3953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3954 IEM_MC_BEGIN(0, 0);
3955 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3956 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3957 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3958 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3959 IEM_MC_ADVANCE_RIP();
3960 IEM_MC_END();
3961 }
3962 else
3963 {
3964 /*
3965 * Register, memory.
3966 */
3967 IEM_MC_BEGIN(0, 2);
3968 IEM_MC_LOCAL(uint128_t, u128Tmp);
3969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3970
3971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3973 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3974 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3975
3976 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3977 if (fAligned)
3978 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3979 else
3980 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3981
3982 IEM_MC_ADVANCE_RIP();
3983 IEM_MC_END();
3984 }
3985 return VINF_SUCCESS;
3986
3987 case 0: /* MMX */
3988 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3989
3990 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3991 {
3992 /*
3993 * Register, register.
3994 */
3995 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3996 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3998 IEM_MC_BEGIN(0, 1);
3999 IEM_MC_LOCAL(uint64_t, u64Tmp);
4000 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4001 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4002 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4003 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4004 IEM_MC_ADVANCE_RIP();
4005 IEM_MC_END();
4006 }
4007 else
4008 {
4009 /*
4010 * Register, memory.
4011 */
4012 IEM_MC_BEGIN(0, 2);
4013 IEM_MC_LOCAL(uint64_t, u64Tmp);
4014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4015
4016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4018 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4019 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4020
4021 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4022 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4023
4024 IEM_MC_ADVANCE_RIP();
4025 IEM_MC_END();
4026 }
4027 return VINF_SUCCESS;
4028
4029 default:
4030 return IEMOP_RAISE_INVALID_OPCODE();
4031 }
4032}
4033
4034
4035
4036/** Opcode 0x0f 0x80. */
4037FNIEMOP_DEF(iemOp_jo_Jv)
4038{
4039 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4040 IEMOP_HLP_MIN_386();
4041 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4042 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4043 {
4044 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4046
4047 IEM_MC_BEGIN(0, 0);
4048 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4049 IEM_MC_REL_JMP_S16(i16Imm);
4050 } IEM_MC_ELSE() {
4051 IEM_MC_ADVANCE_RIP();
4052 } IEM_MC_ENDIF();
4053 IEM_MC_END();
4054 }
4055 else
4056 {
4057 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4059
4060 IEM_MC_BEGIN(0, 0);
4061 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4062 IEM_MC_REL_JMP_S32(i32Imm);
4063 } IEM_MC_ELSE() {
4064 IEM_MC_ADVANCE_RIP();
4065 } IEM_MC_ENDIF();
4066 IEM_MC_END();
4067 }
4068 return VINF_SUCCESS;
4069}
4070
4071
4072/** Opcode 0x0f 0x81. */
4073FNIEMOP_DEF(iemOp_jno_Jv)
4074{
4075 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4076 IEMOP_HLP_MIN_386();
4077 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4078 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4079 {
4080 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4082
4083 IEM_MC_BEGIN(0, 0);
4084 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4085 IEM_MC_ADVANCE_RIP();
4086 } IEM_MC_ELSE() {
4087 IEM_MC_REL_JMP_S16(i16Imm);
4088 } IEM_MC_ENDIF();
4089 IEM_MC_END();
4090 }
4091 else
4092 {
4093 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4095
4096 IEM_MC_BEGIN(0, 0);
4097 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4098 IEM_MC_ADVANCE_RIP();
4099 } IEM_MC_ELSE() {
4100 IEM_MC_REL_JMP_S32(i32Imm);
4101 } IEM_MC_ENDIF();
4102 IEM_MC_END();
4103 }
4104 return VINF_SUCCESS;
4105}
4106
4107
4108/** Opcode 0x0f 0x82. */
4109FNIEMOP_DEF(iemOp_jc_Jv)
4110{
4111 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4112 IEMOP_HLP_MIN_386();
4113 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4114 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4115 {
4116 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4118
4119 IEM_MC_BEGIN(0, 0);
4120 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4121 IEM_MC_REL_JMP_S16(i16Imm);
4122 } IEM_MC_ELSE() {
4123 IEM_MC_ADVANCE_RIP();
4124 } IEM_MC_ENDIF();
4125 IEM_MC_END();
4126 }
4127 else
4128 {
4129 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4131
4132 IEM_MC_BEGIN(0, 0);
4133 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4134 IEM_MC_REL_JMP_S32(i32Imm);
4135 } IEM_MC_ELSE() {
4136 IEM_MC_ADVANCE_RIP();
4137 } IEM_MC_ENDIF();
4138 IEM_MC_END();
4139 }
4140 return VINF_SUCCESS;
4141}
4142
4143
4144/** Opcode 0x0f 0x83. */
4145FNIEMOP_DEF(iemOp_jnc_Jv)
4146{
4147 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4148 IEMOP_HLP_MIN_386();
4149 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4150 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4151 {
4152 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4154
4155 IEM_MC_BEGIN(0, 0);
4156 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4157 IEM_MC_ADVANCE_RIP();
4158 } IEM_MC_ELSE() {
4159 IEM_MC_REL_JMP_S16(i16Imm);
4160 } IEM_MC_ENDIF();
4161 IEM_MC_END();
4162 }
4163 else
4164 {
4165 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4167
4168 IEM_MC_BEGIN(0, 0);
4169 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4170 IEM_MC_ADVANCE_RIP();
4171 } IEM_MC_ELSE() {
4172 IEM_MC_REL_JMP_S32(i32Imm);
4173 } IEM_MC_ENDIF();
4174 IEM_MC_END();
4175 }
4176 return VINF_SUCCESS;
4177}
4178
4179
4180/** Opcode 0x0f 0x84. */
4181FNIEMOP_DEF(iemOp_je_Jv)
4182{
4183 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4184 IEMOP_HLP_MIN_386();
4185 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4186 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4187 {
4188 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4190
4191 IEM_MC_BEGIN(0, 0);
4192 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4193 IEM_MC_REL_JMP_S16(i16Imm);
4194 } IEM_MC_ELSE() {
4195 IEM_MC_ADVANCE_RIP();
4196 } IEM_MC_ENDIF();
4197 IEM_MC_END();
4198 }
4199 else
4200 {
4201 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4203
4204 IEM_MC_BEGIN(0, 0);
4205 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4206 IEM_MC_REL_JMP_S32(i32Imm);
4207 } IEM_MC_ELSE() {
4208 IEM_MC_ADVANCE_RIP();
4209 } IEM_MC_ENDIF();
4210 IEM_MC_END();
4211 }
4212 return VINF_SUCCESS;
4213}
4214
4215
4216/** Opcode 0x0f 0x85. */
4217FNIEMOP_DEF(iemOp_jne_Jv)
4218{
4219 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4220 IEMOP_HLP_MIN_386();
4221 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4222 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4223 {
4224 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4226
4227 IEM_MC_BEGIN(0, 0);
4228 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4229 IEM_MC_ADVANCE_RIP();
4230 } IEM_MC_ELSE() {
4231 IEM_MC_REL_JMP_S16(i16Imm);
4232 } IEM_MC_ENDIF();
4233 IEM_MC_END();
4234 }
4235 else
4236 {
4237 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4239
4240 IEM_MC_BEGIN(0, 0);
4241 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4242 IEM_MC_ADVANCE_RIP();
4243 } IEM_MC_ELSE() {
4244 IEM_MC_REL_JMP_S32(i32Imm);
4245 } IEM_MC_ENDIF();
4246 IEM_MC_END();
4247 }
4248 return VINF_SUCCESS;
4249}
4250
4251
4252/** Opcode 0x0f 0x86. */
4253FNIEMOP_DEF(iemOp_jbe_Jv)
4254{
4255 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4256 IEMOP_HLP_MIN_386();
4257 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4258 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4259 {
4260 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4262
4263 IEM_MC_BEGIN(0, 0);
4264 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4265 IEM_MC_REL_JMP_S16(i16Imm);
4266 } IEM_MC_ELSE() {
4267 IEM_MC_ADVANCE_RIP();
4268 } IEM_MC_ENDIF();
4269 IEM_MC_END();
4270 }
4271 else
4272 {
4273 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4275
4276 IEM_MC_BEGIN(0, 0);
4277 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4278 IEM_MC_REL_JMP_S32(i32Imm);
4279 } IEM_MC_ELSE() {
4280 IEM_MC_ADVANCE_RIP();
4281 } IEM_MC_ENDIF();
4282 IEM_MC_END();
4283 }
4284 return VINF_SUCCESS;
4285}
4286
4287
4288/** Opcode 0x0f 0x87. */
4289FNIEMOP_DEF(iemOp_jnbe_Jv)
4290{
4291 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4292 IEMOP_HLP_MIN_386();
4293 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4294 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4295 {
4296 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4298
4299 IEM_MC_BEGIN(0, 0);
4300 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4301 IEM_MC_ADVANCE_RIP();
4302 } IEM_MC_ELSE() {
4303 IEM_MC_REL_JMP_S16(i16Imm);
4304 } IEM_MC_ENDIF();
4305 IEM_MC_END();
4306 }
4307 else
4308 {
4309 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4311
4312 IEM_MC_BEGIN(0, 0);
4313 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4314 IEM_MC_ADVANCE_RIP();
4315 } IEM_MC_ELSE() {
4316 IEM_MC_REL_JMP_S32(i32Imm);
4317 } IEM_MC_ENDIF();
4318 IEM_MC_END();
4319 }
4320 return VINF_SUCCESS;
4321}
4322
4323
4324/** Opcode 0x0f 0x88. */
4325FNIEMOP_DEF(iemOp_js_Jv)
4326{
4327 IEMOP_MNEMONIC(js_Jv, "js Jv");
4328 IEMOP_HLP_MIN_386();
4329 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4330 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4331 {
4332 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4334
4335 IEM_MC_BEGIN(0, 0);
4336 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4337 IEM_MC_REL_JMP_S16(i16Imm);
4338 } IEM_MC_ELSE() {
4339 IEM_MC_ADVANCE_RIP();
4340 } IEM_MC_ENDIF();
4341 IEM_MC_END();
4342 }
4343 else
4344 {
4345 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4347
4348 IEM_MC_BEGIN(0, 0);
4349 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4350 IEM_MC_REL_JMP_S32(i32Imm);
4351 } IEM_MC_ELSE() {
4352 IEM_MC_ADVANCE_RIP();
4353 } IEM_MC_ENDIF();
4354 IEM_MC_END();
4355 }
4356 return VINF_SUCCESS;
4357}
4358
4359
4360/** Opcode 0x0f 0x89. */
4361FNIEMOP_DEF(iemOp_jns_Jv)
4362{
4363 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4364 IEMOP_HLP_MIN_386();
4365 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4366 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4367 {
4368 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4370
4371 IEM_MC_BEGIN(0, 0);
4372 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4373 IEM_MC_ADVANCE_RIP();
4374 } IEM_MC_ELSE() {
4375 IEM_MC_REL_JMP_S16(i16Imm);
4376 } IEM_MC_ENDIF();
4377 IEM_MC_END();
4378 }
4379 else
4380 {
4381 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4383
4384 IEM_MC_BEGIN(0, 0);
4385 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4386 IEM_MC_ADVANCE_RIP();
4387 } IEM_MC_ELSE() {
4388 IEM_MC_REL_JMP_S32(i32Imm);
4389 } IEM_MC_ENDIF();
4390 IEM_MC_END();
4391 }
4392 return VINF_SUCCESS;
4393}
4394
4395
4396/** Opcode 0x0f 0x8a. */
4397FNIEMOP_DEF(iemOp_jp_Jv)
4398{
4399 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4400 IEMOP_HLP_MIN_386();
4401 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4402 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4403 {
4404 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4406
4407 IEM_MC_BEGIN(0, 0);
4408 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4409 IEM_MC_REL_JMP_S16(i16Imm);
4410 } IEM_MC_ELSE() {
4411 IEM_MC_ADVANCE_RIP();
4412 } IEM_MC_ENDIF();
4413 IEM_MC_END();
4414 }
4415 else
4416 {
4417 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4419
4420 IEM_MC_BEGIN(0, 0);
4421 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4422 IEM_MC_REL_JMP_S32(i32Imm);
4423 } IEM_MC_ELSE() {
4424 IEM_MC_ADVANCE_RIP();
4425 } IEM_MC_ENDIF();
4426 IEM_MC_END();
4427 }
4428 return VINF_SUCCESS;
4429}
4430
4431
4432/** Opcode 0x0f 0x8b. */
4433FNIEMOP_DEF(iemOp_jnp_Jv)
4434{
4435 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4436 IEMOP_HLP_MIN_386();
4437 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4438 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4439 {
4440 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4442
4443 IEM_MC_BEGIN(0, 0);
4444 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4445 IEM_MC_ADVANCE_RIP();
4446 } IEM_MC_ELSE() {
4447 IEM_MC_REL_JMP_S16(i16Imm);
4448 } IEM_MC_ENDIF();
4449 IEM_MC_END();
4450 }
4451 else
4452 {
4453 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4455
4456 IEM_MC_BEGIN(0, 0);
4457 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4458 IEM_MC_ADVANCE_RIP();
4459 } IEM_MC_ELSE() {
4460 IEM_MC_REL_JMP_S32(i32Imm);
4461 } IEM_MC_ENDIF();
4462 IEM_MC_END();
4463 }
4464 return VINF_SUCCESS;
4465}
4466
4467
4468/** Opcode 0x0f 0x8c. */
4469FNIEMOP_DEF(iemOp_jl_Jv)
4470{
4471 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4472 IEMOP_HLP_MIN_386();
4473 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4474 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4475 {
4476 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4478
4479 IEM_MC_BEGIN(0, 0);
4480 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4481 IEM_MC_REL_JMP_S16(i16Imm);
4482 } IEM_MC_ELSE() {
4483 IEM_MC_ADVANCE_RIP();
4484 } IEM_MC_ENDIF();
4485 IEM_MC_END();
4486 }
4487 else
4488 {
4489 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4491
4492 IEM_MC_BEGIN(0, 0);
4493 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4494 IEM_MC_REL_JMP_S32(i32Imm);
4495 } IEM_MC_ELSE() {
4496 IEM_MC_ADVANCE_RIP();
4497 } IEM_MC_ENDIF();
4498 IEM_MC_END();
4499 }
4500 return VINF_SUCCESS;
4501}
4502
4503
4504/** Opcode 0x0f 0x8d. */
4505FNIEMOP_DEF(iemOp_jnl_Jv)
4506{
4507 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4508 IEMOP_HLP_MIN_386();
4509 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4510 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4511 {
4512 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4514
4515 IEM_MC_BEGIN(0, 0);
4516 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4517 IEM_MC_ADVANCE_RIP();
4518 } IEM_MC_ELSE() {
4519 IEM_MC_REL_JMP_S16(i16Imm);
4520 } IEM_MC_ENDIF();
4521 IEM_MC_END();
4522 }
4523 else
4524 {
4525 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4527
4528 IEM_MC_BEGIN(0, 0);
4529 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4530 IEM_MC_ADVANCE_RIP();
4531 } IEM_MC_ELSE() {
4532 IEM_MC_REL_JMP_S32(i32Imm);
4533 } IEM_MC_ENDIF();
4534 IEM_MC_END();
4535 }
4536 return VINF_SUCCESS;
4537}
4538
4539
4540/** Opcode 0x0f 0x8e. */
4541FNIEMOP_DEF(iemOp_jle_Jv)
4542{
4543 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4544 IEMOP_HLP_MIN_386();
4545 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4546 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4547 {
4548 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4550
4551 IEM_MC_BEGIN(0, 0);
4552 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4553 IEM_MC_REL_JMP_S16(i16Imm);
4554 } IEM_MC_ELSE() {
4555 IEM_MC_ADVANCE_RIP();
4556 } IEM_MC_ENDIF();
4557 IEM_MC_END();
4558 }
4559 else
4560 {
4561 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4563
4564 IEM_MC_BEGIN(0, 0);
4565 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4566 IEM_MC_REL_JMP_S32(i32Imm);
4567 } IEM_MC_ELSE() {
4568 IEM_MC_ADVANCE_RIP();
4569 } IEM_MC_ENDIF();
4570 IEM_MC_END();
4571 }
4572 return VINF_SUCCESS;
4573}
4574
4575
4576/** Opcode 0x0f 0x8f. */
4577FNIEMOP_DEF(iemOp_jnle_Jv)
4578{
4579 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4580 IEMOP_HLP_MIN_386();
4581 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4582 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4583 {
4584 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4586
4587 IEM_MC_BEGIN(0, 0);
4588 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4589 IEM_MC_ADVANCE_RIP();
4590 } IEM_MC_ELSE() {
4591 IEM_MC_REL_JMP_S16(i16Imm);
4592 } IEM_MC_ENDIF();
4593 IEM_MC_END();
4594 }
4595 else
4596 {
4597 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4599
4600 IEM_MC_BEGIN(0, 0);
4601 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4602 IEM_MC_ADVANCE_RIP();
4603 } IEM_MC_ELSE() {
4604 IEM_MC_REL_JMP_S32(i32Imm);
4605 } IEM_MC_ENDIF();
4606 IEM_MC_END();
4607 }
4608 return VINF_SUCCESS;
4609}
4610
4611
4612/** Opcode 0x0f 0x90. */
4613FNIEMOP_DEF(iemOp_seto_Eb)
4614{
4615 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4616 IEMOP_HLP_MIN_386();
4617 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4618
4619 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4620 * any way. AMD says it's "unused", whatever that means. We're
4621 * ignoring for now. */
4622 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4623 {
4624 /* register target */
4625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4626 IEM_MC_BEGIN(0, 0);
4627 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4628 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4629 } IEM_MC_ELSE() {
4630 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4631 } IEM_MC_ENDIF();
4632 IEM_MC_ADVANCE_RIP();
4633 IEM_MC_END();
4634 }
4635 else
4636 {
4637 /* memory target */
4638 IEM_MC_BEGIN(0, 1);
4639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4642 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4643 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4644 } IEM_MC_ELSE() {
4645 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4646 } IEM_MC_ENDIF();
4647 IEM_MC_ADVANCE_RIP();
4648 IEM_MC_END();
4649 }
4650 return VINF_SUCCESS;
4651}
4652
4653
4654/** Opcode 0x0f 0x91. */
4655FNIEMOP_DEF(iemOp_setno_Eb)
4656{
4657 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4658 IEMOP_HLP_MIN_386();
4659 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4660
4661 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4662 * any way. AMD says it's "unused", whatever that means. We're
4663 * ignoring for now. */
4664 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4665 {
4666 /* register target */
4667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4668 IEM_MC_BEGIN(0, 0);
4669 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4670 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4671 } IEM_MC_ELSE() {
4672 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4673 } IEM_MC_ENDIF();
4674 IEM_MC_ADVANCE_RIP();
4675 IEM_MC_END();
4676 }
4677 else
4678 {
4679 /* memory target */
4680 IEM_MC_BEGIN(0, 1);
4681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4682 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4684 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4685 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4686 } IEM_MC_ELSE() {
4687 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4688 } IEM_MC_ENDIF();
4689 IEM_MC_ADVANCE_RIP();
4690 IEM_MC_END();
4691 }
4692 return VINF_SUCCESS;
4693}
4694
4695
4696/** Opcode 0x0f 0x92. */
4697FNIEMOP_DEF(iemOp_setc_Eb)
4698{
4699 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4700 IEMOP_HLP_MIN_386();
4701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4702
4703 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4704 * any way. AMD says it's "unused", whatever that means. We're
4705 * ignoring for now. */
4706 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4707 {
4708 /* register target */
4709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4710 IEM_MC_BEGIN(0, 0);
4711 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4712 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4713 } IEM_MC_ELSE() {
4714 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4715 } IEM_MC_ENDIF();
4716 IEM_MC_ADVANCE_RIP();
4717 IEM_MC_END();
4718 }
4719 else
4720 {
4721 /* memory target */
4722 IEM_MC_BEGIN(0, 1);
4723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4726 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4727 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4728 } IEM_MC_ELSE() {
4729 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4730 } IEM_MC_ENDIF();
4731 IEM_MC_ADVANCE_RIP();
4732 IEM_MC_END();
4733 }
4734 return VINF_SUCCESS;
4735}
4736
4737
4738/** Opcode 0x0f 0x93. */
4739FNIEMOP_DEF(iemOp_setnc_Eb)
4740{
4741 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4742 IEMOP_HLP_MIN_386();
4743 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4744
4745 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4746 * any way. AMD says it's "unused", whatever that means. We're
4747 * ignoring for now. */
4748 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4749 {
4750 /* register target */
4751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4752 IEM_MC_BEGIN(0, 0);
4753 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4754 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4755 } IEM_MC_ELSE() {
4756 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4757 } IEM_MC_ENDIF();
4758 IEM_MC_ADVANCE_RIP();
4759 IEM_MC_END();
4760 }
4761 else
4762 {
4763 /* memory target */
4764 IEM_MC_BEGIN(0, 1);
4765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4768 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4769 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4770 } IEM_MC_ELSE() {
4771 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4772 } IEM_MC_ENDIF();
4773 IEM_MC_ADVANCE_RIP();
4774 IEM_MC_END();
4775 }
4776 return VINF_SUCCESS;
4777}
4778
4779
4780/** Opcode 0x0f 0x94. */
4781FNIEMOP_DEF(iemOp_sete_Eb)
4782{
4783 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4784 IEMOP_HLP_MIN_386();
4785 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4786
4787 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4788 * any way. AMD says it's "unused", whatever that means. We're
4789 * ignoring for now. */
4790 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4791 {
4792 /* register target */
4793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4794 IEM_MC_BEGIN(0, 0);
4795 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4796 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4797 } IEM_MC_ELSE() {
4798 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4799 } IEM_MC_ENDIF();
4800 IEM_MC_ADVANCE_RIP();
4801 IEM_MC_END();
4802 }
4803 else
4804 {
4805 /* memory target */
4806 IEM_MC_BEGIN(0, 1);
4807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4810 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4811 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4812 } IEM_MC_ELSE() {
4813 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4814 } IEM_MC_ENDIF();
4815 IEM_MC_ADVANCE_RIP();
4816 IEM_MC_END();
4817 }
4818 return VINF_SUCCESS;
4819}
4820
4821
4822/** Opcode 0x0f 0x95. */
4823FNIEMOP_DEF(iemOp_setne_Eb)
4824{
4825 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4826 IEMOP_HLP_MIN_386();
4827 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4828
4829 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4830 * any way. AMD says it's "unused", whatever that means. We're
4831 * ignoring for now. */
4832 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4833 {
4834 /* register target */
4835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4836 IEM_MC_BEGIN(0, 0);
4837 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4838 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4839 } IEM_MC_ELSE() {
4840 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4841 } IEM_MC_ENDIF();
4842 IEM_MC_ADVANCE_RIP();
4843 IEM_MC_END();
4844 }
4845 else
4846 {
4847 /* memory target */
4848 IEM_MC_BEGIN(0, 1);
4849 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4852 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4853 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4854 } IEM_MC_ELSE() {
4855 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4856 } IEM_MC_ENDIF();
4857 IEM_MC_ADVANCE_RIP();
4858 IEM_MC_END();
4859 }
4860 return VINF_SUCCESS;
4861}
4862
4863
4864/** Opcode 0x0f 0x96. */
4865FNIEMOP_DEF(iemOp_setbe_Eb)
4866{
4867 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4868 IEMOP_HLP_MIN_386();
4869 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4870
4871 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4872 * any way. AMD says it's "unused", whatever that means. We're
4873 * ignoring for now. */
4874 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4875 {
4876 /* register target */
4877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4878 IEM_MC_BEGIN(0, 0);
4879 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4880 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4881 } IEM_MC_ELSE() {
4882 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4883 } IEM_MC_ENDIF();
4884 IEM_MC_ADVANCE_RIP();
4885 IEM_MC_END();
4886 }
4887 else
4888 {
4889 /* memory target */
4890 IEM_MC_BEGIN(0, 1);
4891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4894 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4895 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4896 } IEM_MC_ELSE() {
4897 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4898 } IEM_MC_ENDIF();
4899 IEM_MC_ADVANCE_RIP();
4900 IEM_MC_END();
4901 }
4902 return VINF_SUCCESS;
4903}
4904
4905
4906/** Opcode 0x0f 0x97. */
4907FNIEMOP_DEF(iemOp_setnbe_Eb)
4908{
4909 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4910 IEMOP_HLP_MIN_386();
4911 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4912
4913 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4914 * any way. AMD says it's "unused", whatever that means. We're
4915 * ignoring for now. */
4916 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4917 {
4918 /* register target */
4919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4920 IEM_MC_BEGIN(0, 0);
4921 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4922 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4923 } IEM_MC_ELSE() {
4924 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4925 } IEM_MC_ENDIF();
4926 IEM_MC_ADVANCE_RIP();
4927 IEM_MC_END();
4928 }
4929 else
4930 {
4931 /* memory target */
4932 IEM_MC_BEGIN(0, 1);
4933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4936 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4937 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4938 } IEM_MC_ELSE() {
4939 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4940 } IEM_MC_ENDIF();
4941 IEM_MC_ADVANCE_RIP();
4942 IEM_MC_END();
4943 }
4944 return VINF_SUCCESS;
4945}
4946
4947
4948/** Opcode 0x0f 0x98. */
4949FNIEMOP_DEF(iemOp_sets_Eb)
4950{
4951 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4952 IEMOP_HLP_MIN_386();
4953 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4954
4955 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4956 * any way. AMD says it's "unused", whatever that means. We're
4957 * ignoring for now. */
4958 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4959 {
4960 /* register target */
4961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4962 IEM_MC_BEGIN(0, 0);
4963 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4964 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4965 } IEM_MC_ELSE() {
4966 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4967 } IEM_MC_ENDIF();
4968 IEM_MC_ADVANCE_RIP();
4969 IEM_MC_END();
4970 }
4971 else
4972 {
4973 /* memory target */
4974 IEM_MC_BEGIN(0, 1);
4975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4978 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4979 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4980 } IEM_MC_ELSE() {
4981 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4982 } IEM_MC_ENDIF();
4983 IEM_MC_ADVANCE_RIP();
4984 IEM_MC_END();
4985 }
4986 return VINF_SUCCESS;
4987}
4988
4989
4990/** Opcode 0x0f 0x99. */
4991FNIEMOP_DEF(iemOp_setns_Eb)
4992{
4993 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4994 IEMOP_HLP_MIN_386();
4995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4996
4997 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4998 * any way. AMD says it's "unused", whatever that means. We're
4999 * ignoring for now. */
5000 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5001 {
5002 /* register target */
5003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5004 IEM_MC_BEGIN(0, 0);
5005 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5006 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5007 } IEM_MC_ELSE() {
5008 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5009 } IEM_MC_ENDIF();
5010 IEM_MC_ADVANCE_RIP();
5011 IEM_MC_END();
5012 }
5013 else
5014 {
5015 /* memory target */
5016 IEM_MC_BEGIN(0, 1);
5017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5020 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5021 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5022 } IEM_MC_ELSE() {
5023 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5024 } IEM_MC_ENDIF();
5025 IEM_MC_ADVANCE_RIP();
5026 IEM_MC_END();
5027 }
5028 return VINF_SUCCESS;
5029}
5030
5031
5032/** Opcode 0x0f 0x9a. */
5033FNIEMOP_DEF(iemOp_setp_Eb)
5034{
5035 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5036 IEMOP_HLP_MIN_386();
5037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5038
5039 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5040 * any way. AMD says it's "unused", whatever that means. We're
5041 * ignoring for now. */
5042 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5043 {
5044 /* register target */
5045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5046 IEM_MC_BEGIN(0, 0);
5047 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5048 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5049 } IEM_MC_ELSE() {
5050 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5051 } IEM_MC_ENDIF();
5052 IEM_MC_ADVANCE_RIP();
5053 IEM_MC_END();
5054 }
5055 else
5056 {
5057 /* memory target */
5058 IEM_MC_BEGIN(0, 1);
5059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5062 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5063 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5064 } IEM_MC_ELSE() {
5065 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5066 } IEM_MC_ENDIF();
5067 IEM_MC_ADVANCE_RIP();
5068 IEM_MC_END();
5069 }
5070 return VINF_SUCCESS;
5071}
5072
5073
5074/** Opcode 0x0f 0x9b. */
5075FNIEMOP_DEF(iemOp_setnp_Eb)
5076{
5077 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5078 IEMOP_HLP_MIN_386();
5079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5080
5081 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5082 * any way. AMD says it's "unused", whatever that means. We're
5083 * ignoring for now. */
5084 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5085 {
5086 /* register target */
5087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5088 IEM_MC_BEGIN(0, 0);
5089 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5090 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5091 } IEM_MC_ELSE() {
5092 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5093 } IEM_MC_ENDIF();
5094 IEM_MC_ADVANCE_RIP();
5095 IEM_MC_END();
5096 }
5097 else
5098 {
5099 /* memory target */
5100 IEM_MC_BEGIN(0, 1);
5101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5104 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5105 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5106 } IEM_MC_ELSE() {
5107 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5108 } IEM_MC_ENDIF();
5109 IEM_MC_ADVANCE_RIP();
5110 IEM_MC_END();
5111 }
5112 return VINF_SUCCESS;
5113}
5114
5115
5116/** Opcode 0x0f 0x9c. */
5117FNIEMOP_DEF(iemOp_setl_Eb)
5118{
5119 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5120 IEMOP_HLP_MIN_386();
5121 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5122
5123 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5124 * any way. AMD says it's "unused", whatever that means. We're
5125 * ignoring for now. */
5126 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5127 {
5128 /* register target */
5129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5130 IEM_MC_BEGIN(0, 0);
5131 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5132 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5133 } IEM_MC_ELSE() {
5134 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5135 } IEM_MC_ENDIF();
5136 IEM_MC_ADVANCE_RIP();
5137 IEM_MC_END();
5138 }
5139 else
5140 {
5141 /* memory target */
5142 IEM_MC_BEGIN(0, 1);
5143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5146 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5147 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5148 } IEM_MC_ELSE() {
5149 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5150 } IEM_MC_ENDIF();
5151 IEM_MC_ADVANCE_RIP();
5152 IEM_MC_END();
5153 }
5154 return VINF_SUCCESS;
5155}
5156
5157
5158/** Opcode 0x0f 0x9d. */
5159FNIEMOP_DEF(iemOp_setnl_Eb)
5160{
5161 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5162 IEMOP_HLP_MIN_386();
5163 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5164
5165 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5166 * any way. AMD says it's "unused", whatever that means. We're
5167 * ignoring for now. */
5168 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5169 {
5170 /* register target */
5171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5172 IEM_MC_BEGIN(0, 0);
5173 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5174 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5175 } IEM_MC_ELSE() {
5176 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5177 } IEM_MC_ENDIF();
5178 IEM_MC_ADVANCE_RIP();
5179 IEM_MC_END();
5180 }
5181 else
5182 {
5183 /* memory target */
5184 IEM_MC_BEGIN(0, 1);
5185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5188 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5189 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5190 } IEM_MC_ELSE() {
5191 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5192 } IEM_MC_ENDIF();
5193 IEM_MC_ADVANCE_RIP();
5194 IEM_MC_END();
5195 }
5196 return VINF_SUCCESS;
5197}
5198
5199
5200/** Opcode 0x0f 0x9e. */
5201FNIEMOP_DEF(iemOp_setle_Eb)
5202{
5203 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5204 IEMOP_HLP_MIN_386();
5205 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5206
5207 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5208 * any way. AMD says it's "unused", whatever that means. We're
5209 * ignoring for now. */
5210 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5211 {
5212 /* register target */
5213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5214 IEM_MC_BEGIN(0, 0);
5215 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5216 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5217 } IEM_MC_ELSE() {
5218 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5219 } IEM_MC_ENDIF();
5220 IEM_MC_ADVANCE_RIP();
5221 IEM_MC_END();
5222 }
5223 else
5224 {
5225 /* memory target */
5226 IEM_MC_BEGIN(0, 1);
5227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5230 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5231 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5232 } IEM_MC_ELSE() {
5233 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5234 } IEM_MC_ENDIF();
5235 IEM_MC_ADVANCE_RIP();
5236 IEM_MC_END();
5237 }
5238 return VINF_SUCCESS;
5239}
5240
5241
5242/** Opcode 0x0f 0x9f. */
5243FNIEMOP_DEF(iemOp_setnle_Eb)
5244{
5245 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5246 IEMOP_HLP_MIN_386();
5247 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5248
5249 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5250 * any way. AMD says it's "unused", whatever that means. We're
5251 * ignoring for now. */
5252 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5253 {
5254 /* register target */
5255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5256 IEM_MC_BEGIN(0, 0);
5257 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5258 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5259 } IEM_MC_ELSE() {
5260 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5261 } IEM_MC_ENDIF();
5262 IEM_MC_ADVANCE_RIP();
5263 IEM_MC_END();
5264 }
5265 else
5266 {
5267 /* memory target */
5268 IEM_MC_BEGIN(0, 1);
5269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5272 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5273 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5274 } IEM_MC_ELSE() {
5275 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5276 } IEM_MC_ENDIF();
5277 IEM_MC_ADVANCE_RIP();
5278 IEM_MC_END();
5279 }
5280 return VINF_SUCCESS;
5281}
5282
5283
5284/**
5285 * Common 'push segment-register' helper.
5286 */
5287FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5288{
5289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5290 if (iReg < X86_SREG_FS)
5291 IEMOP_HLP_NO_64BIT();
5292 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5293
5294 switch (pVCpu->iem.s.enmEffOpSize)
5295 {
5296 case IEMMODE_16BIT:
5297 IEM_MC_BEGIN(0, 1);
5298 IEM_MC_LOCAL(uint16_t, u16Value);
5299 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5300 IEM_MC_PUSH_U16(u16Value);
5301 IEM_MC_ADVANCE_RIP();
5302 IEM_MC_END();
5303 break;
5304
5305 case IEMMODE_32BIT:
5306 IEM_MC_BEGIN(0, 1);
5307 IEM_MC_LOCAL(uint32_t, u32Value);
5308 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5309 IEM_MC_PUSH_U32_SREG(u32Value);
5310 IEM_MC_ADVANCE_RIP();
5311 IEM_MC_END();
5312 break;
5313
5314 case IEMMODE_64BIT:
5315 IEM_MC_BEGIN(0, 1);
5316 IEM_MC_LOCAL(uint64_t, u64Value);
5317 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5318 IEM_MC_PUSH_U64(u64Value);
5319 IEM_MC_ADVANCE_RIP();
5320 IEM_MC_END();
5321 break;
5322 }
5323
5324 return VINF_SUCCESS;
5325}
5326
5327
5328/** Opcode 0x0f 0xa0. */
5329FNIEMOP_DEF(iemOp_push_fs)
5330{
5331 IEMOP_MNEMONIC(push_fs, "push fs");
5332 IEMOP_HLP_MIN_386();
5333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5334 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5335}
5336
5337
5338/** Opcode 0x0f 0xa1. */
5339FNIEMOP_DEF(iemOp_pop_fs)
5340{
5341 IEMOP_MNEMONIC(pop_fs, "pop fs");
5342 IEMOP_HLP_MIN_386();
5343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5344 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5345}
5346
5347
5348/** Opcode 0x0f 0xa2. */
5349FNIEMOP_DEF(iemOp_cpuid)
5350{
5351 IEMOP_MNEMONIC(cpuid, "cpuid");
5352 IEMOP_HLP_MIN_486(); /* not all 486es. */
5353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5354 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5355}
5356
5357
5358/**
5359 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5360 * iemOp_bts_Ev_Gv.
5361 */
5362FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5363{
5364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5365 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5366
5367 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5368 {
5369 /* register destination. */
5370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5371 switch (pVCpu->iem.s.enmEffOpSize)
5372 {
5373 case IEMMODE_16BIT:
5374 IEM_MC_BEGIN(3, 0);
5375 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5376 IEM_MC_ARG(uint16_t, u16Src, 1);
5377 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5378
5379 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5380 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5381 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5382 IEM_MC_REF_EFLAGS(pEFlags);
5383 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5384
5385 IEM_MC_ADVANCE_RIP();
5386 IEM_MC_END();
5387 return VINF_SUCCESS;
5388
5389 case IEMMODE_32BIT:
5390 IEM_MC_BEGIN(3, 0);
5391 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5392 IEM_MC_ARG(uint32_t, u32Src, 1);
5393 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5394
5395 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5396 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5397 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5398 IEM_MC_REF_EFLAGS(pEFlags);
5399 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5400
5401 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5402 IEM_MC_ADVANCE_RIP();
5403 IEM_MC_END();
5404 return VINF_SUCCESS;
5405
5406 case IEMMODE_64BIT:
5407 IEM_MC_BEGIN(3, 0);
5408 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5409 IEM_MC_ARG(uint64_t, u64Src, 1);
5410 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5411
5412 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5413 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5414 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5415 IEM_MC_REF_EFLAGS(pEFlags);
5416 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5417
5418 IEM_MC_ADVANCE_RIP();
5419 IEM_MC_END();
5420 return VINF_SUCCESS;
5421
5422 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5423 }
5424 }
5425 else
5426 {
5427 /* memory destination. */
5428
5429 uint32_t fAccess;
5430 if (pImpl->pfnLockedU16)
5431 fAccess = IEM_ACCESS_DATA_RW;
5432 else /* BT */
5433 fAccess = IEM_ACCESS_DATA_R;
5434
5435 /** @todo test negative bit offsets! */
5436 switch (pVCpu->iem.s.enmEffOpSize)
5437 {
5438 case IEMMODE_16BIT:
5439 IEM_MC_BEGIN(3, 2);
5440 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5441 IEM_MC_ARG(uint16_t, u16Src, 1);
5442 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5444 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5445
5446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5447 if (pImpl->pfnLockedU16)
5448 IEMOP_HLP_DONE_DECODING();
5449 else
5450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5451 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5452 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5453 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5454 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5455 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5456 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5457 IEM_MC_FETCH_EFLAGS(EFlags);
5458
5459 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5460 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5461 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5462 else
5463 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5464 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5465
5466 IEM_MC_COMMIT_EFLAGS(EFlags);
5467 IEM_MC_ADVANCE_RIP();
5468 IEM_MC_END();
5469 return VINF_SUCCESS;
5470
5471 case IEMMODE_32BIT:
5472 IEM_MC_BEGIN(3, 2);
5473 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5474 IEM_MC_ARG(uint32_t, u32Src, 1);
5475 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5477 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5478
5479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5480 if (pImpl->pfnLockedU16)
5481 IEMOP_HLP_DONE_DECODING();
5482 else
5483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5484 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5485 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5486 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5487 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5488 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5489 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5490 IEM_MC_FETCH_EFLAGS(EFlags);
5491
5492 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5493 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5494 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5495 else
5496 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5497 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5498
5499 IEM_MC_COMMIT_EFLAGS(EFlags);
5500 IEM_MC_ADVANCE_RIP();
5501 IEM_MC_END();
5502 return VINF_SUCCESS;
5503
5504 case IEMMODE_64BIT:
5505 IEM_MC_BEGIN(3, 2);
5506 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5507 IEM_MC_ARG(uint64_t, u64Src, 1);
5508 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5510 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5511
5512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5513 if (pImpl->pfnLockedU16)
5514 IEMOP_HLP_DONE_DECODING();
5515 else
5516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5517 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5518 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5519 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5520 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5521 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5522 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5523 IEM_MC_FETCH_EFLAGS(EFlags);
5524
5525 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5526 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5527 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5528 else
5529 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5530 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5531
5532 IEM_MC_COMMIT_EFLAGS(EFlags);
5533 IEM_MC_ADVANCE_RIP();
5534 IEM_MC_END();
5535 return VINF_SUCCESS;
5536
5537 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5538 }
5539 }
5540}
5541
5542
5543/** Opcode 0x0f 0xa3. */
5544FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5545{
5546 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5547 IEMOP_HLP_MIN_386();
5548 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5549}
5550
5551
5552/**
5553 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5554 */
5555FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5556{
5557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5558 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5559
5560 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5561 {
5562 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5564
5565 switch (pVCpu->iem.s.enmEffOpSize)
5566 {
5567 case IEMMODE_16BIT:
5568 IEM_MC_BEGIN(4, 0);
5569 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5570 IEM_MC_ARG(uint16_t, u16Src, 1);
5571 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5572 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5573
5574 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5575 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5576 IEM_MC_REF_EFLAGS(pEFlags);
5577 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5578
5579 IEM_MC_ADVANCE_RIP();
5580 IEM_MC_END();
5581 return VINF_SUCCESS;
5582
5583 case IEMMODE_32BIT:
5584 IEM_MC_BEGIN(4, 0);
5585 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5586 IEM_MC_ARG(uint32_t, u32Src, 1);
5587 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5588 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5589
5590 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5591 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5592 IEM_MC_REF_EFLAGS(pEFlags);
5593 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5594
5595 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5596 IEM_MC_ADVANCE_RIP();
5597 IEM_MC_END();
5598 return VINF_SUCCESS;
5599
5600 case IEMMODE_64BIT:
5601 IEM_MC_BEGIN(4, 0);
5602 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5603 IEM_MC_ARG(uint64_t, u64Src, 1);
5604 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5605 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5606
5607 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5608 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5609 IEM_MC_REF_EFLAGS(pEFlags);
5610 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5611
5612 IEM_MC_ADVANCE_RIP();
5613 IEM_MC_END();
5614 return VINF_SUCCESS;
5615
5616 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5617 }
5618 }
5619 else
5620 {
5621 switch (pVCpu->iem.s.enmEffOpSize)
5622 {
5623 case IEMMODE_16BIT:
5624 IEM_MC_BEGIN(4, 2);
5625 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5626 IEM_MC_ARG(uint16_t, u16Src, 1);
5627 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5628 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5630
5631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5632 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5633 IEM_MC_ASSIGN(cShiftArg, cShift);
5634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5635 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5636 IEM_MC_FETCH_EFLAGS(EFlags);
5637 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5638 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5639
5640 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5641 IEM_MC_COMMIT_EFLAGS(EFlags);
5642 IEM_MC_ADVANCE_RIP();
5643 IEM_MC_END();
5644 return VINF_SUCCESS;
5645
5646 case IEMMODE_32BIT:
5647 IEM_MC_BEGIN(4, 2);
5648 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5649 IEM_MC_ARG(uint32_t, u32Src, 1);
5650 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5651 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5653
5654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5655 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5656 IEM_MC_ASSIGN(cShiftArg, cShift);
5657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5658 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5659 IEM_MC_FETCH_EFLAGS(EFlags);
5660 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5661 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5662
5663 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5664 IEM_MC_COMMIT_EFLAGS(EFlags);
5665 IEM_MC_ADVANCE_RIP();
5666 IEM_MC_END();
5667 return VINF_SUCCESS;
5668
5669 case IEMMODE_64BIT:
5670 IEM_MC_BEGIN(4, 2);
5671 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5672 IEM_MC_ARG(uint64_t, u64Src, 1);
5673 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5674 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5676
5677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5678 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5679 IEM_MC_ASSIGN(cShiftArg, cShift);
5680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5681 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5682 IEM_MC_FETCH_EFLAGS(EFlags);
5683 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5684 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5685
5686 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5687 IEM_MC_COMMIT_EFLAGS(EFlags);
5688 IEM_MC_ADVANCE_RIP();
5689 IEM_MC_END();
5690 return VINF_SUCCESS;
5691
5692 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5693 }
5694 }
5695}
5696
5697
5698/**
5699 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5700 */
5701FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5702{
5703 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5704 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5705
5706 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5707 {
5708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5709
5710 switch (pVCpu->iem.s.enmEffOpSize)
5711 {
5712 case IEMMODE_16BIT:
5713 IEM_MC_BEGIN(4, 0);
5714 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5715 IEM_MC_ARG(uint16_t, u16Src, 1);
5716 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5717 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5718
5719 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5720 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5721 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5722 IEM_MC_REF_EFLAGS(pEFlags);
5723 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5724
5725 IEM_MC_ADVANCE_RIP();
5726 IEM_MC_END();
5727 return VINF_SUCCESS;
5728
5729 case IEMMODE_32BIT:
5730 IEM_MC_BEGIN(4, 0);
5731 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5732 IEM_MC_ARG(uint32_t, u32Src, 1);
5733 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5734 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5735
5736 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5737 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5738 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5739 IEM_MC_REF_EFLAGS(pEFlags);
5740 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5741
5742 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5743 IEM_MC_ADVANCE_RIP();
5744 IEM_MC_END();
5745 return VINF_SUCCESS;
5746
5747 case IEMMODE_64BIT:
5748 IEM_MC_BEGIN(4, 0);
5749 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5750 IEM_MC_ARG(uint64_t, u64Src, 1);
5751 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5752 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5753
5754 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5755 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5756 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5757 IEM_MC_REF_EFLAGS(pEFlags);
5758 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5759
5760 IEM_MC_ADVANCE_RIP();
5761 IEM_MC_END();
5762 return VINF_SUCCESS;
5763
5764 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5765 }
5766 }
5767 else
5768 {
5769 switch (pVCpu->iem.s.enmEffOpSize)
5770 {
5771 case IEMMODE_16BIT:
5772 IEM_MC_BEGIN(4, 2);
5773 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5774 IEM_MC_ARG(uint16_t, u16Src, 1);
5775 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5776 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5778
5779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5781 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5782 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5783 IEM_MC_FETCH_EFLAGS(EFlags);
5784 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5785 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5786
5787 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5788 IEM_MC_COMMIT_EFLAGS(EFlags);
5789 IEM_MC_ADVANCE_RIP();
5790 IEM_MC_END();
5791 return VINF_SUCCESS;
5792
5793 case IEMMODE_32BIT:
5794 IEM_MC_BEGIN(4, 2);
5795 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5796 IEM_MC_ARG(uint32_t, u32Src, 1);
5797 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5798 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5800
5801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5803 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5804 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5805 IEM_MC_FETCH_EFLAGS(EFlags);
5806 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5807 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5808
5809 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5810 IEM_MC_COMMIT_EFLAGS(EFlags);
5811 IEM_MC_ADVANCE_RIP();
5812 IEM_MC_END();
5813 return VINF_SUCCESS;
5814
5815 case IEMMODE_64BIT:
5816 IEM_MC_BEGIN(4, 2);
5817 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5818 IEM_MC_ARG(uint64_t, u64Src, 1);
5819 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5820 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5822
5823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5825 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5826 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5827 IEM_MC_FETCH_EFLAGS(EFlags);
5828 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5829 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5830
5831 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5832 IEM_MC_COMMIT_EFLAGS(EFlags);
5833 IEM_MC_ADVANCE_RIP();
5834 IEM_MC_END();
5835 return VINF_SUCCESS;
5836
5837 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5838 }
5839 }
5840}
5841
5842
5843
5844/** Opcode 0x0f 0xa4. */
5845FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5846{
5847 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5848 IEMOP_HLP_MIN_386();
5849 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5850}
5851
5852
5853/** Opcode 0x0f 0xa5. */
5854FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5855{
5856 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5857 IEMOP_HLP_MIN_386();
5858 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5859}
5860
5861
5862/** Opcode 0x0f 0xa8. */
5863FNIEMOP_DEF(iemOp_push_gs)
5864{
5865 IEMOP_MNEMONIC(push_gs, "push gs");
5866 IEMOP_HLP_MIN_386();
5867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5868 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5869}
5870
5871
5872/** Opcode 0x0f 0xa9. */
5873FNIEMOP_DEF(iemOp_pop_gs)
5874{
5875 IEMOP_MNEMONIC(pop_gs, "pop gs");
5876 IEMOP_HLP_MIN_386();
5877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5878 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5879}
5880
5881
5882/** Opcode 0x0f 0xaa. */
5883FNIEMOP_STUB(iemOp_rsm);
5884//IEMOP_HLP_MIN_386();
5885
5886
5887/** Opcode 0x0f 0xab. */
5888FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5889{
5890 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5891 IEMOP_HLP_MIN_386();
5892 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5893}
5894
5895
5896/** Opcode 0x0f 0xac. */
5897FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5898{
5899 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5900 IEMOP_HLP_MIN_386();
5901 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5902}
5903
5904
5905/** Opcode 0x0f 0xad. */
5906FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5907{
5908 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5909 IEMOP_HLP_MIN_386();
5910 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5911}
5912
5913
5914/** Opcode 0x0f 0xae mem/0. */
5915FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5916{
5917 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5918 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5919 return IEMOP_RAISE_INVALID_OPCODE();
5920
5921 IEM_MC_BEGIN(3, 1);
5922 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5923 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5924 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5927 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5928 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5929 IEM_MC_END();
5930 return VINF_SUCCESS;
5931}
5932
5933
5934/** Opcode 0x0f 0xae mem/1. */
5935FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5936{
5937 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5938 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5939 return IEMOP_RAISE_INVALID_OPCODE();
5940
5941 IEM_MC_BEGIN(3, 1);
5942 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5943 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5944 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5947 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5948 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5949 IEM_MC_END();
5950 return VINF_SUCCESS;
5951}
5952
5953
5954/** Opcode 0x0f 0xae mem/2. */
5955FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5956
5957/** Opcode 0x0f 0xae mem/3. */
5958FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5959
5960/** Opcode 0x0f 0xae mem/4. */
5961FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5962
5963/** Opcode 0x0f 0xae mem/5. */
5964FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5965
5966/** Opcode 0x0f 0xae mem/6. */
5967FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5968
5969/** Opcode 0x0f 0xae mem/7. */
5970FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5971
5972
5973/** Opcode 0x0f 0xae 11b/5. */
5974FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5975{
5976 RT_NOREF_PV(bRm);
5977 IEMOP_MNEMONIC(lfence, "lfence");
5978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5979 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5980 return IEMOP_RAISE_INVALID_OPCODE();
5981
5982 IEM_MC_BEGIN(0, 0);
5983 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5984 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5985 else
5986 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5987 IEM_MC_ADVANCE_RIP();
5988 IEM_MC_END();
5989 return VINF_SUCCESS;
5990}
5991
5992
5993/** Opcode 0x0f 0xae 11b/6. */
5994FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5995{
5996 RT_NOREF_PV(bRm);
5997 IEMOP_MNEMONIC(mfence, "mfence");
5998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5999 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6000 return IEMOP_RAISE_INVALID_OPCODE();
6001
6002 IEM_MC_BEGIN(0, 0);
6003 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6004 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6005 else
6006 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6007 IEM_MC_ADVANCE_RIP();
6008 IEM_MC_END();
6009 return VINF_SUCCESS;
6010}
6011
6012
6013/** Opcode 0x0f 0xae 11b/7. */
6014FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6015{
6016 RT_NOREF_PV(bRm);
6017 IEMOP_MNEMONIC(sfence, "sfence");
6018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6019 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6020 return IEMOP_RAISE_INVALID_OPCODE();
6021
6022 IEM_MC_BEGIN(0, 0);
6023 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6024 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6025 else
6026 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6027 IEM_MC_ADVANCE_RIP();
6028 IEM_MC_END();
6029 return VINF_SUCCESS;
6030}
6031
6032
6033/** Opcode 0xf3 0x0f 0xae 11b/0. */
6034FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6035
6036/** Opcode 0xf3 0x0f 0xae 11b/1. */
6037FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6038
6039/** Opcode 0xf3 0x0f 0xae 11b/2. */
6040FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6041
6042/** Opcode 0xf3 0x0f 0xae 11b/3. */
6043FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6044
6045
6046/** Opcode 0x0f 0xae. */
6047FNIEMOP_DEF(iemOp_Grp15)
6048{
6049 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6050 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6051 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6052 {
6053 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6054 {
6055 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
6056 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
6057 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
6058 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
6059 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
6060 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
6061 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
6062 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
6063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6064 }
6065 }
6066 else
6067 {
6068 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
6069 {
6070 case 0:
6071 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6072 {
6073 case 0: return IEMOP_RAISE_INVALID_OPCODE();
6074 case 1: return IEMOP_RAISE_INVALID_OPCODE();
6075 case 2: return IEMOP_RAISE_INVALID_OPCODE();
6076 case 3: return IEMOP_RAISE_INVALID_OPCODE();
6077 case 4: return IEMOP_RAISE_INVALID_OPCODE();
6078 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
6079 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
6080 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
6081 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6082 }
6083 break;
6084
6085 case IEM_OP_PRF_REPZ:
6086 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6087 {
6088 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
6089 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
6090 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
6091 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
6092 case 4: return IEMOP_RAISE_INVALID_OPCODE();
6093 case 5: return IEMOP_RAISE_INVALID_OPCODE();
6094 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6095 case 7: return IEMOP_RAISE_INVALID_OPCODE();
6096 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6097 }
6098 break;
6099
6100 default:
6101 return IEMOP_RAISE_INVALID_OPCODE();
6102 }
6103 }
6104}
6105
6106
6107/** Opcode 0x0f 0xaf. */
6108FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6109{
6110 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6111 IEMOP_HLP_MIN_386();
6112 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6113 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6114}
6115
6116
6117/** Opcode 0x0f 0xb0. */
6118FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6119{
6120 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6121 IEMOP_HLP_MIN_486();
6122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6123
6124 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6125 {
6126 IEMOP_HLP_DONE_DECODING();
6127 IEM_MC_BEGIN(4, 0);
6128 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6129 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6130 IEM_MC_ARG(uint8_t, u8Src, 2);
6131 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6132
6133 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6134 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6135 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6136 IEM_MC_REF_EFLAGS(pEFlags);
6137 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6138 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6139 else
6140 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6141
6142 IEM_MC_ADVANCE_RIP();
6143 IEM_MC_END();
6144 }
6145 else
6146 {
6147 IEM_MC_BEGIN(4, 3);
6148 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6149 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6150 IEM_MC_ARG(uint8_t, u8Src, 2);
6151 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6152 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6153 IEM_MC_LOCAL(uint8_t, u8Al);
6154
6155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6156 IEMOP_HLP_DONE_DECODING();
6157 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6158 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6159 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6160 IEM_MC_FETCH_EFLAGS(EFlags);
6161 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6162 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6163 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6164 else
6165 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6166
6167 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6168 IEM_MC_COMMIT_EFLAGS(EFlags);
6169 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6170 IEM_MC_ADVANCE_RIP();
6171 IEM_MC_END();
6172 }
6173 return VINF_SUCCESS;
6174}
6175
6176/** Opcode 0x0f 0xb1. */
6177FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6178{
6179 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6180 IEMOP_HLP_MIN_486();
6181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6182
6183 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6184 {
6185 IEMOP_HLP_DONE_DECODING();
6186 switch (pVCpu->iem.s.enmEffOpSize)
6187 {
6188 case IEMMODE_16BIT:
6189 IEM_MC_BEGIN(4, 0);
6190 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6191 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6192 IEM_MC_ARG(uint16_t, u16Src, 2);
6193 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6194
6195 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6196 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6197 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6198 IEM_MC_REF_EFLAGS(pEFlags);
6199 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6200 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6201 else
6202 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6203
6204 IEM_MC_ADVANCE_RIP();
6205 IEM_MC_END();
6206 return VINF_SUCCESS;
6207
6208 case IEMMODE_32BIT:
6209 IEM_MC_BEGIN(4, 0);
6210 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6211 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6212 IEM_MC_ARG(uint32_t, u32Src, 2);
6213 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6214
6215 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6216 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6217 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6218 IEM_MC_REF_EFLAGS(pEFlags);
6219 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6220 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6221 else
6222 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6223
6224 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6225 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6226 IEM_MC_ADVANCE_RIP();
6227 IEM_MC_END();
6228 return VINF_SUCCESS;
6229
6230 case IEMMODE_64BIT:
6231 IEM_MC_BEGIN(4, 0);
6232 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6233 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6234#ifdef RT_ARCH_X86
6235 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6236#else
6237 IEM_MC_ARG(uint64_t, u64Src, 2);
6238#endif
6239 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6240
6241 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6242 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6243 IEM_MC_REF_EFLAGS(pEFlags);
6244#ifdef RT_ARCH_X86
6245 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6246 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6247 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6248 else
6249 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6250#else
6251 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6252 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6253 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6254 else
6255 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6256#endif
6257
6258 IEM_MC_ADVANCE_RIP();
6259 IEM_MC_END();
6260 return VINF_SUCCESS;
6261
6262 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6263 }
6264 }
6265 else
6266 {
6267 switch (pVCpu->iem.s.enmEffOpSize)
6268 {
6269 case IEMMODE_16BIT:
6270 IEM_MC_BEGIN(4, 3);
6271 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6272 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6273 IEM_MC_ARG(uint16_t, u16Src, 2);
6274 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6276 IEM_MC_LOCAL(uint16_t, u16Ax);
6277
6278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6279 IEMOP_HLP_DONE_DECODING();
6280 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6281 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6282 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6283 IEM_MC_FETCH_EFLAGS(EFlags);
6284 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6285 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6286 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6287 else
6288 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6289
6290 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6291 IEM_MC_COMMIT_EFLAGS(EFlags);
6292 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6293 IEM_MC_ADVANCE_RIP();
6294 IEM_MC_END();
6295 return VINF_SUCCESS;
6296
6297 case IEMMODE_32BIT:
6298 IEM_MC_BEGIN(4, 3);
6299 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6300 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6301 IEM_MC_ARG(uint32_t, u32Src, 2);
6302 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6304 IEM_MC_LOCAL(uint32_t, u32Eax);
6305
6306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6307 IEMOP_HLP_DONE_DECODING();
6308 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6309 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6310 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6311 IEM_MC_FETCH_EFLAGS(EFlags);
6312 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6313 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6314 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6315 else
6316 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6317
6318 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6319 IEM_MC_COMMIT_EFLAGS(EFlags);
6320 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6321 IEM_MC_ADVANCE_RIP();
6322 IEM_MC_END();
6323 return VINF_SUCCESS;
6324
6325 case IEMMODE_64BIT:
6326 IEM_MC_BEGIN(4, 3);
6327 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6328 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6329#ifdef RT_ARCH_X86
6330 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6331#else
6332 IEM_MC_ARG(uint64_t, u64Src, 2);
6333#endif
6334 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6336 IEM_MC_LOCAL(uint64_t, u64Rax);
6337
6338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6339 IEMOP_HLP_DONE_DECODING();
6340 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6341 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6342 IEM_MC_FETCH_EFLAGS(EFlags);
6343 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6344#ifdef RT_ARCH_X86
6345 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6346 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6347 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6348 else
6349 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6350#else
6351 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6352 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6353 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6354 else
6355 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6356#endif
6357
6358 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6359 IEM_MC_COMMIT_EFLAGS(EFlags);
6360 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6361 IEM_MC_ADVANCE_RIP();
6362 IEM_MC_END();
6363 return VINF_SUCCESS;
6364
6365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6366 }
6367 }
6368}
6369
6370
6371FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6372{
6373 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6374 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6375
6376 switch (pVCpu->iem.s.enmEffOpSize)
6377 {
6378 case IEMMODE_16BIT:
6379 IEM_MC_BEGIN(5, 1);
6380 IEM_MC_ARG(uint16_t, uSel, 0);
6381 IEM_MC_ARG(uint16_t, offSeg, 1);
6382 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6383 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6384 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6385 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6388 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6389 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6390 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6391 IEM_MC_END();
6392 return VINF_SUCCESS;
6393
6394 case IEMMODE_32BIT:
6395 IEM_MC_BEGIN(5, 1);
6396 IEM_MC_ARG(uint16_t, uSel, 0);
6397 IEM_MC_ARG(uint32_t, offSeg, 1);
6398 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6399 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6400 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6401 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6404 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6405 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6406 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6407 IEM_MC_END();
6408 return VINF_SUCCESS;
6409
6410 case IEMMODE_64BIT:
6411 IEM_MC_BEGIN(5, 1);
6412 IEM_MC_ARG(uint16_t, uSel, 0);
6413 IEM_MC_ARG(uint64_t, offSeg, 1);
6414 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6415 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6416 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6417 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6420 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6421 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6422 else
6423 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6424 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6425 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6426 IEM_MC_END();
6427 return VINF_SUCCESS;
6428
6429 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6430 }
6431}
6432
6433
6434/** Opcode 0x0f 0xb2. */
6435FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6436{
6437 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6438 IEMOP_HLP_MIN_386();
6439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6440 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6441 return IEMOP_RAISE_INVALID_OPCODE();
6442 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6443}
6444
6445
6446/** Opcode 0x0f 0xb3. */
6447FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6448{
6449 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6450 IEMOP_HLP_MIN_386();
6451 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6452}
6453
6454
6455/** Opcode 0x0f 0xb4. */
6456FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6457{
6458 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6459 IEMOP_HLP_MIN_386();
6460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6461 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6462 return IEMOP_RAISE_INVALID_OPCODE();
6463 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6464}
6465
6466
6467/** Opcode 0x0f 0xb5. */
6468FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6469{
6470 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6471 IEMOP_HLP_MIN_386();
6472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6473 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6474 return IEMOP_RAISE_INVALID_OPCODE();
6475 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6476}
6477
6478
6479/** Opcode 0x0f 0xb6. */
6480FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6481{
6482 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6483 IEMOP_HLP_MIN_386();
6484
6485 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6486
6487 /*
6488 * If rm is denoting a register, no more instruction bytes.
6489 */
6490 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6491 {
6492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6493 switch (pVCpu->iem.s.enmEffOpSize)
6494 {
6495 case IEMMODE_16BIT:
6496 IEM_MC_BEGIN(0, 1);
6497 IEM_MC_LOCAL(uint16_t, u16Value);
6498 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6499 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6500 IEM_MC_ADVANCE_RIP();
6501 IEM_MC_END();
6502 return VINF_SUCCESS;
6503
6504 case IEMMODE_32BIT:
6505 IEM_MC_BEGIN(0, 1);
6506 IEM_MC_LOCAL(uint32_t, u32Value);
6507 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6508 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6509 IEM_MC_ADVANCE_RIP();
6510 IEM_MC_END();
6511 return VINF_SUCCESS;
6512
6513 case IEMMODE_64BIT:
6514 IEM_MC_BEGIN(0, 1);
6515 IEM_MC_LOCAL(uint64_t, u64Value);
6516 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6517 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6518 IEM_MC_ADVANCE_RIP();
6519 IEM_MC_END();
6520 return VINF_SUCCESS;
6521
6522 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6523 }
6524 }
6525 else
6526 {
6527 /*
6528 * We're loading a register from memory.
6529 */
6530 switch (pVCpu->iem.s.enmEffOpSize)
6531 {
6532 case IEMMODE_16BIT:
6533 IEM_MC_BEGIN(0, 2);
6534 IEM_MC_LOCAL(uint16_t, u16Value);
6535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6538 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6539 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6540 IEM_MC_ADVANCE_RIP();
6541 IEM_MC_END();
6542 return VINF_SUCCESS;
6543
6544 case IEMMODE_32BIT:
6545 IEM_MC_BEGIN(0, 2);
6546 IEM_MC_LOCAL(uint32_t, u32Value);
6547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6550 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6551 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6552 IEM_MC_ADVANCE_RIP();
6553 IEM_MC_END();
6554 return VINF_SUCCESS;
6555
6556 case IEMMODE_64BIT:
6557 IEM_MC_BEGIN(0, 2);
6558 IEM_MC_LOCAL(uint64_t, u64Value);
6559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6562 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6563 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6564 IEM_MC_ADVANCE_RIP();
6565 IEM_MC_END();
6566 return VINF_SUCCESS;
6567
6568 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6569 }
6570 }
6571}
6572
6573
6574/** Opcode 0x0f 0xb7. */
6575FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6576{
6577 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6578 IEMOP_HLP_MIN_386();
6579
6580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6581
6582 /** @todo Not entirely sure how the operand size prefix is handled here,
6583 * assuming that it will be ignored. Would be nice to have a few
6584 * test for this. */
6585 /*
6586 * If rm is denoting a register, no more instruction bytes.
6587 */
6588 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6589 {
6590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6591 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6592 {
6593 IEM_MC_BEGIN(0, 1);
6594 IEM_MC_LOCAL(uint32_t, u32Value);
6595 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6596 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6597 IEM_MC_ADVANCE_RIP();
6598 IEM_MC_END();
6599 }
6600 else
6601 {
6602 IEM_MC_BEGIN(0, 1);
6603 IEM_MC_LOCAL(uint64_t, u64Value);
6604 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6605 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6606 IEM_MC_ADVANCE_RIP();
6607 IEM_MC_END();
6608 }
6609 }
6610 else
6611 {
6612 /*
6613 * We're loading a register from memory.
6614 */
6615 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6616 {
6617 IEM_MC_BEGIN(0, 2);
6618 IEM_MC_LOCAL(uint32_t, u32Value);
6619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6622 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6623 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6624 IEM_MC_ADVANCE_RIP();
6625 IEM_MC_END();
6626 }
6627 else
6628 {
6629 IEM_MC_BEGIN(0, 2);
6630 IEM_MC_LOCAL(uint64_t, u64Value);
6631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6634 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6635 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6636 IEM_MC_ADVANCE_RIP();
6637 IEM_MC_END();
6638 }
6639 }
6640 return VINF_SUCCESS;
6641}
6642
6643
6644/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6645FNIEMOP_UD_STUB(iemOp_jmpe);
6646/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6647FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6648
6649
6650/** Opcode 0x0f 0xb9. */
6651FNIEMOP_DEF(iemOp_Grp10)
6652{
6653 Log(("iemOp_Grp10 -> #UD\n"));
6654 return IEMOP_RAISE_INVALID_OPCODE();
6655}
6656
6657
6658/** Opcode 0x0f 0xba. */
6659FNIEMOP_DEF(iemOp_Grp8)
6660{
6661 IEMOP_HLP_MIN_386();
6662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6663 PCIEMOPBINSIZES pImpl;
6664 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6665 {
6666 case 0: case 1: case 2: case 3:
6667 return IEMOP_RAISE_INVALID_OPCODE();
6668 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6669 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6670 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6671 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6672 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6673 }
6674 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6675
6676 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6677 {
6678 /* register destination. */
6679 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6681
6682 switch (pVCpu->iem.s.enmEffOpSize)
6683 {
6684 case IEMMODE_16BIT:
6685 IEM_MC_BEGIN(3, 0);
6686 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6687 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6688 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6689
6690 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6691 IEM_MC_REF_EFLAGS(pEFlags);
6692 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6693
6694 IEM_MC_ADVANCE_RIP();
6695 IEM_MC_END();
6696 return VINF_SUCCESS;
6697
6698 case IEMMODE_32BIT:
6699 IEM_MC_BEGIN(3, 0);
6700 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6701 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6702 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6703
6704 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6705 IEM_MC_REF_EFLAGS(pEFlags);
6706 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6707
6708 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6709 IEM_MC_ADVANCE_RIP();
6710 IEM_MC_END();
6711 return VINF_SUCCESS;
6712
6713 case IEMMODE_64BIT:
6714 IEM_MC_BEGIN(3, 0);
6715 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6716 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6717 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6718
6719 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6720 IEM_MC_REF_EFLAGS(pEFlags);
6721 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6722
6723 IEM_MC_ADVANCE_RIP();
6724 IEM_MC_END();
6725 return VINF_SUCCESS;
6726
6727 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6728 }
6729 }
6730 else
6731 {
6732 /* memory destination. */
6733
6734 uint32_t fAccess;
6735 if (pImpl->pfnLockedU16)
6736 fAccess = IEM_ACCESS_DATA_RW;
6737 else /* BT */
6738 fAccess = IEM_ACCESS_DATA_R;
6739
6740 /** @todo test negative bit offsets! */
6741 switch (pVCpu->iem.s.enmEffOpSize)
6742 {
6743 case IEMMODE_16BIT:
6744 IEM_MC_BEGIN(3, 1);
6745 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6746 IEM_MC_ARG(uint16_t, u16Src, 1);
6747 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6749
6750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6751 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6752 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6753 if (pImpl->pfnLockedU16)
6754 IEMOP_HLP_DONE_DECODING();
6755 else
6756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6757 IEM_MC_FETCH_EFLAGS(EFlags);
6758 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6759 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6760 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6761 else
6762 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6763 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6764
6765 IEM_MC_COMMIT_EFLAGS(EFlags);
6766 IEM_MC_ADVANCE_RIP();
6767 IEM_MC_END();
6768 return VINF_SUCCESS;
6769
6770 case IEMMODE_32BIT:
6771 IEM_MC_BEGIN(3, 1);
6772 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6773 IEM_MC_ARG(uint32_t, u32Src, 1);
6774 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6776
6777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6778 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6779 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6780 if (pImpl->pfnLockedU16)
6781 IEMOP_HLP_DONE_DECODING();
6782 else
6783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6784 IEM_MC_FETCH_EFLAGS(EFlags);
6785 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6786 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6787 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6788 else
6789 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6790 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6791
6792 IEM_MC_COMMIT_EFLAGS(EFlags);
6793 IEM_MC_ADVANCE_RIP();
6794 IEM_MC_END();
6795 return VINF_SUCCESS;
6796
6797 case IEMMODE_64BIT:
6798 IEM_MC_BEGIN(3, 1);
6799 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6800 IEM_MC_ARG(uint64_t, u64Src, 1);
6801 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6802 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6803
6804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6805 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6806 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6807 if (pImpl->pfnLockedU16)
6808 IEMOP_HLP_DONE_DECODING();
6809 else
6810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6811 IEM_MC_FETCH_EFLAGS(EFlags);
6812 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6813 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6814 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6815 else
6816 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6817 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6818
6819 IEM_MC_COMMIT_EFLAGS(EFlags);
6820 IEM_MC_ADVANCE_RIP();
6821 IEM_MC_END();
6822 return VINF_SUCCESS;
6823
6824 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6825 }
6826 }
6827
6828}
6829
6830
6831/** Opcode 0x0f 0xbb. */
6832FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6833{
6834 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6835 IEMOP_HLP_MIN_386();
6836 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6837}
6838
6839
6840/** Opcode 0x0f 0xbc. */
6841FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6842{
6843 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6844 IEMOP_HLP_MIN_386();
6845 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6846 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6847}
6848
6849
6850/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6851FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6852
6853
6854/** Opcode 0x0f 0xbd. */
6855FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6856{
6857 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6858 IEMOP_HLP_MIN_386();
6859 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6860 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6861}
6862
6863
6864/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6865FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6866
6867
6868/** Opcode 0x0f 0xbe. */
6869FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6870{
6871 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6872 IEMOP_HLP_MIN_386();
6873
6874 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6875
6876 /*
6877 * If rm is denoting a register, no more instruction bytes.
6878 */
6879 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6880 {
6881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6882 switch (pVCpu->iem.s.enmEffOpSize)
6883 {
6884 case IEMMODE_16BIT:
6885 IEM_MC_BEGIN(0, 1);
6886 IEM_MC_LOCAL(uint16_t, u16Value);
6887 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6888 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6889 IEM_MC_ADVANCE_RIP();
6890 IEM_MC_END();
6891 return VINF_SUCCESS;
6892
6893 case IEMMODE_32BIT:
6894 IEM_MC_BEGIN(0, 1);
6895 IEM_MC_LOCAL(uint32_t, u32Value);
6896 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6897 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6898 IEM_MC_ADVANCE_RIP();
6899 IEM_MC_END();
6900 return VINF_SUCCESS;
6901
6902 case IEMMODE_64BIT:
6903 IEM_MC_BEGIN(0, 1);
6904 IEM_MC_LOCAL(uint64_t, u64Value);
6905 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6906 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6907 IEM_MC_ADVANCE_RIP();
6908 IEM_MC_END();
6909 return VINF_SUCCESS;
6910
6911 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6912 }
6913 }
6914 else
6915 {
6916 /*
6917 * We're loading a register from memory.
6918 */
6919 switch (pVCpu->iem.s.enmEffOpSize)
6920 {
6921 case IEMMODE_16BIT:
6922 IEM_MC_BEGIN(0, 2);
6923 IEM_MC_LOCAL(uint16_t, u16Value);
6924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6927 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6928 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6929 IEM_MC_ADVANCE_RIP();
6930 IEM_MC_END();
6931 return VINF_SUCCESS;
6932
6933 case IEMMODE_32BIT:
6934 IEM_MC_BEGIN(0, 2);
6935 IEM_MC_LOCAL(uint32_t, u32Value);
6936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6939 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6940 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6941 IEM_MC_ADVANCE_RIP();
6942 IEM_MC_END();
6943 return VINF_SUCCESS;
6944
6945 case IEMMODE_64BIT:
6946 IEM_MC_BEGIN(0, 2);
6947 IEM_MC_LOCAL(uint64_t, u64Value);
6948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6951 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6952 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6953 IEM_MC_ADVANCE_RIP();
6954 IEM_MC_END();
6955 return VINF_SUCCESS;
6956
6957 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6958 }
6959 }
6960}
6961
6962
6963/** Opcode 0x0f 0xbf. */
6964FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6965{
6966 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6967 IEMOP_HLP_MIN_386();
6968
6969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6970
6971 /** @todo Not entirely sure how the operand size prefix is handled here,
6972 * assuming that it will be ignored. Would be nice to have a few
6973 * test for this. */
6974 /*
6975 * If rm is denoting a register, no more instruction bytes.
6976 */
6977 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6978 {
6979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6980 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6981 {
6982 IEM_MC_BEGIN(0, 1);
6983 IEM_MC_LOCAL(uint32_t, u32Value);
6984 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6985 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6986 IEM_MC_ADVANCE_RIP();
6987 IEM_MC_END();
6988 }
6989 else
6990 {
6991 IEM_MC_BEGIN(0, 1);
6992 IEM_MC_LOCAL(uint64_t, u64Value);
6993 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6994 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6995 IEM_MC_ADVANCE_RIP();
6996 IEM_MC_END();
6997 }
6998 }
6999 else
7000 {
7001 /*
7002 * We're loading a register from memory.
7003 */
7004 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7005 {
7006 IEM_MC_BEGIN(0, 2);
7007 IEM_MC_LOCAL(uint32_t, u32Value);
7008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7011 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7012 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7013 IEM_MC_ADVANCE_RIP();
7014 IEM_MC_END();
7015 }
7016 else
7017 {
7018 IEM_MC_BEGIN(0, 2);
7019 IEM_MC_LOCAL(uint64_t, u64Value);
7020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7023 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7024 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7025 IEM_MC_ADVANCE_RIP();
7026 IEM_MC_END();
7027 }
7028 }
7029 return VINF_SUCCESS;
7030}
7031
7032
7033/** Opcode 0x0f 0xc0. */
7034FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7035{
7036 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7037 IEMOP_HLP_MIN_486();
7038 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7039
7040 /*
7041 * If rm is denoting a register, no more instruction bytes.
7042 */
7043 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7044 {
7045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7046
7047 IEM_MC_BEGIN(3, 0);
7048 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7049 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7050 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7051
7052 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7053 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7054 IEM_MC_REF_EFLAGS(pEFlags);
7055 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7056
7057 IEM_MC_ADVANCE_RIP();
7058 IEM_MC_END();
7059 }
7060 else
7061 {
7062 /*
7063 * We're accessing memory.
7064 */
7065 IEM_MC_BEGIN(3, 3);
7066 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7067 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7068 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7069 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7071
7072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7073 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7074 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7075 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7076 IEM_MC_FETCH_EFLAGS(EFlags);
7077 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7078 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7079 else
7080 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7081
7082 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7083 IEM_MC_COMMIT_EFLAGS(EFlags);
7084 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7085 IEM_MC_ADVANCE_RIP();
7086 IEM_MC_END();
7087 return VINF_SUCCESS;
7088 }
7089 return VINF_SUCCESS;
7090}
7091
7092
7093/** Opcode 0x0f 0xc1. */
7094FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7095{
7096 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7097 IEMOP_HLP_MIN_486();
7098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7099
7100 /*
7101 * If rm is denoting a register, no more instruction bytes.
7102 */
7103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7104 {
7105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7106
7107 switch (pVCpu->iem.s.enmEffOpSize)
7108 {
7109 case IEMMODE_16BIT:
7110 IEM_MC_BEGIN(3, 0);
7111 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7112 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7113 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7114
7115 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7116 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7117 IEM_MC_REF_EFLAGS(pEFlags);
7118 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7119
7120 IEM_MC_ADVANCE_RIP();
7121 IEM_MC_END();
7122 return VINF_SUCCESS;
7123
7124 case IEMMODE_32BIT:
7125 IEM_MC_BEGIN(3, 0);
7126 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7127 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7128 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7129
7130 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7131 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7132 IEM_MC_REF_EFLAGS(pEFlags);
7133 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7134
7135 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7136 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7137 IEM_MC_ADVANCE_RIP();
7138 IEM_MC_END();
7139 return VINF_SUCCESS;
7140
7141 case IEMMODE_64BIT:
7142 IEM_MC_BEGIN(3, 0);
7143 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7144 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7145 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7146
7147 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7148 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7149 IEM_MC_REF_EFLAGS(pEFlags);
7150 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7151
7152 IEM_MC_ADVANCE_RIP();
7153 IEM_MC_END();
7154 return VINF_SUCCESS;
7155
7156 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7157 }
7158 }
7159 else
7160 {
7161 /*
7162 * We're accessing memory.
7163 */
7164 switch (pVCpu->iem.s.enmEffOpSize)
7165 {
7166 case IEMMODE_16BIT:
7167 IEM_MC_BEGIN(3, 3);
7168 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7169 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7170 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7171 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7173
7174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7175 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7176 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7177 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7178 IEM_MC_FETCH_EFLAGS(EFlags);
7179 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7180 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7181 else
7182 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7183
7184 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7185 IEM_MC_COMMIT_EFLAGS(EFlags);
7186 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7187 IEM_MC_ADVANCE_RIP();
7188 IEM_MC_END();
7189 return VINF_SUCCESS;
7190
7191 case IEMMODE_32BIT:
7192 IEM_MC_BEGIN(3, 3);
7193 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7194 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7195 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7196 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7198
7199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7200 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7201 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7202 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7203 IEM_MC_FETCH_EFLAGS(EFlags);
7204 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7205 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7206 else
7207 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7208
7209 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7210 IEM_MC_COMMIT_EFLAGS(EFlags);
7211 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7212 IEM_MC_ADVANCE_RIP();
7213 IEM_MC_END();
7214 return VINF_SUCCESS;
7215
7216 case IEMMODE_64BIT:
7217 IEM_MC_BEGIN(3, 3);
7218 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7219 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7220 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7221 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7223
7224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7225 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7226 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7227 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7228 IEM_MC_FETCH_EFLAGS(EFlags);
7229 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7230 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7231 else
7232 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7233
7234 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7235 IEM_MC_COMMIT_EFLAGS(EFlags);
7236 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7237 IEM_MC_ADVANCE_RIP();
7238 IEM_MC_END();
7239 return VINF_SUCCESS;
7240
7241 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7242 }
7243 }
7244}
7245
7246
7247/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
7248FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
7249/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
7250FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
7251/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
7252FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
7253/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
7254FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
7255
7256
7257/** Opcode 0x0f 0xc3. */
7258FNIEMOP_DEF(iemOp_movnti_My_Gy)
7259{
7260 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7261
7262 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7263
7264 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7265 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7266 {
7267 switch (pVCpu->iem.s.enmEffOpSize)
7268 {
7269 case IEMMODE_32BIT:
7270 IEM_MC_BEGIN(0, 2);
7271 IEM_MC_LOCAL(uint32_t, u32Value);
7272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7273
7274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7276 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7277 return IEMOP_RAISE_INVALID_OPCODE();
7278
7279 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7280 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7281 IEM_MC_ADVANCE_RIP();
7282 IEM_MC_END();
7283 break;
7284
7285 case IEMMODE_64BIT:
7286 IEM_MC_BEGIN(0, 2);
7287 IEM_MC_LOCAL(uint64_t, u64Value);
7288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7289
7290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7292 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7293 return IEMOP_RAISE_INVALID_OPCODE();
7294
7295 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7296 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7297 IEM_MC_ADVANCE_RIP();
7298 IEM_MC_END();
7299 break;
7300
7301 case IEMMODE_16BIT:
7302 /** @todo check this form. */
7303 return IEMOP_RAISE_INVALID_OPCODE();
7304 }
7305 }
7306 else
7307 return IEMOP_RAISE_INVALID_OPCODE();
7308 return VINF_SUCCESS;
7309}
7310/* Opcode 0x66 0x0f 0xc3 - invalid */
7311/* Opcode 0xf3 0x0f 0xc3 - invalid */
7312/* Opcode 0xf2 0x0f 0xc3 - invalid */
7313
7314/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
7315FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7316/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
7317FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
7318/* Opcode 0xf3 0x0f 0xc4 - invalid */
7319/* Opcode 0xf2 0x0f 0xc4 - invalid */
7320
7321/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7322FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7323/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
7324FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
7325/* Opcode 0xf3 0x0f 0xc5 - invalid */
7326/* Opcode 0xf2 0x0f 0xc5 - invalid */
7327
7328/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
7329FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
7330/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
7331FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
7332/* Opcode 0xf3 0x0f 0xc6 - invalid */
7333/* Opcode 0xf2 0x0f 0xc6 - invalid */
7334
7335
7336/** Opcode 0x0f 0xc7 !11/1. */
7337FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7338{
7339 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7340
7341 IEM_MC_BEGIN(4, 3);
7342 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7343 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7344 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7345 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7346 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7347 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7349
7350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7351 IEMOP_HLP_DONE_DECODING();
7352 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7353
7354 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7355 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7356 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7357
7358 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7359 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7360 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7361
7362 IEM_MC_FETCH_EFLAGS(EFlags);
7363 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7364 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7365 else
7366 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7367
7368 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7369 IEM_MC_COMMIT_EFLAGS(EFlags);
7370 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7371 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7372 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7373 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7374 IEM_MC_ENDIF();
7375 IEM_MC_ADVANCE_RIP();
7376
7377 IEM_MC_END();
7378 return VINF_SUCCESS;
7379}
7380
7381
7382/** Opcode REX.W 0x0f 0xc7 !11/1. */
7383FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7384{
7385 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7386 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7387 {
7388#if 0
7389 RT_NOREF(bRm);
7390 IEMOP_BITCH_ABOUT_STUB();
7391 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7392#else
7393 IEM_MC_BEGIN(4, 3);
7394 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7395 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7396 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7397 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7398 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7399 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7401
7402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7403 IEMOP_HLP_DONE_DECODING();
7404 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7405 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7406
7407 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7408 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7409 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7410
7411 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7412 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7413 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7414
7415 IEM_MC_FETCH_EFLAGS(EFlags);
7416# ifdef RT_ARCH_AMD64
7417 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7418 {
7419 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7420 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7421 else
7422 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7423 }
7424 else
7425# endif
7426 {
7427 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7428 accesses and not all all atomic, which works fine on in UNI CPU guest
7429 configuration (ignoring DMA). If guest SMP is active we have no choice
7430 but to use a rendezvous callback here. Sigh. */
7431 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7432 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7433 else
7434 {
7435 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7436 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7437 }
7438 }
7439
7440 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7441 IEM_MC_COMMIT_EFLAGS(EFlags);
7442 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7443 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7444 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7445 IEM_MC_ENDIF();
7446 IEM_MC_ADVANCE_RIP();
7447
7448 IEM_MC_END();
7449 return VINF_SUCCESS;
7450#endif
7451 }
7452 Log(("cmpxchg16b -> #UD\n"));
7453 return IEMOP_RAISE_INVALID_OPCODE();
7454}
7455
7456
7457/** Opcode 0x0f 0xc7 11/6. */
7458FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7459
7460/** Opcode 0x0f 0xc7 !11/6. */
7461FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7462
7463/** Opcode 0x66 0x0f 0xc7 !11/6. */
7464FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7465
7466/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7467FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7468
7469/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7470FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7471
7472
7473/** Opcode 0x0f 0xc7. */
7474FNIEMOP_DEF(iemOp_Grp9)
7475{
7476 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
7477 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7478 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7479 {
7480 case 0: case 2: case 3: case 4: case 5:
7481 return IEMOP_RAISE_INVALID_OPCODE();
7482 case 1:
7483 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
7484 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
7485 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
7486 return IEMOP_RAISE_INVALID_OPCODE();
7487 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7488 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7489 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7490 case 6:
7491 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7492 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
7493 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7494 {
7495 case 0:
7496 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
7497 case IEM_OP_PRF_SIZE_OP:
7498 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
7499 case IEM_OP_PRF_REPZ:
7500 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
7501 default:
7502 return IEMOP_RAISE_INVALID_OPCODE();
7503 }
7504 case 7:
7505 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7506 {
7507 case 0:
7508 case IEM_OP_PRF_REPZ:
7509 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
7510 default:
7511 return IEMOP_RAISE_INVALID_OPCODE();
7512 }
7513 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7514 }
7515}
7516
7517
7518/**
7519 * Common 'bswap register' helper.
7520 */
7521FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7522{
7523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7524 switch (pVCpu->iem.s.enmEffOpSize)
7525 {
7526 case IEMMODE_16BIT:
7527 IEM_MC_BEGIN(1, 0);
7528 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7529 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7530 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7531 IEM_MC_ADVANCE_RIP();
7532 IEM_MC_END();
7533 return VINF_SUCCESS;
7534
7535 case IEMMODE_32BIT:
7536 IEM_MC_BEGIN(1, 0);
7537 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7538 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7539 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7540 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7541 IEM_MC_ADVANCE_RIP();
7542 IEM_MC_END();
7543 return VINF_SUCCESS;
7544
7545 case IEMMODE_64BIT:
7546 IEM_MC_BEGIN(1, 0);
7547 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7548 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7549 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7550 IEM_MC_ADVANCE_RIP();
7551 IEM_MC_END();
7552 return VINF_SUCCESS;
7553
7554 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7555 }
7556}
7557
7558
7559/** Opcode 0x0f 0xc8. */
7560FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7561{
7562 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7563 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7564 prefix. REX.B is the correct prefix it appears. For a parallel
7565 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7566 IEMOP_HLP_MIN_486();
7567 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7568}
7569
7570
7571/** Opcode 0x0f 0xc9. */
7572FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7573{
7574 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7575 IEMOP_HLP_MIN_486();
7576 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7577}
7578
7579
7580/** Opcode 0x0f 0xca. */
7581FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7582{
7583 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7584 IEMOP_HLP_MIN_486();
7585 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7586}
7587
7588
7589/** Opcode 0x0f 0xcb. */
7590FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7591{
7592 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7593 IEMOP_HLP_MIN_486();
7594 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7595}
7596
7597
7598/** Opcode 0x0f 0xcc. */
7599FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7600{
7601 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7602 IEMOP_HLP_MIN_486();
7603 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7604}
7605
7606
7607/** Opcode 0x0f 0xcd. */
7608FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7609{
7610 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7611 IEMOP_HLP_MIN_486();
7612 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7613}
7614
7615
7616/** Opcode 0x0f 0xce. */
7617FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7618{
7619 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7620 IEMOP_HLP_MIN_486();
7621 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7622}
7623
7624
7625/** Opcode 0x0f 0xcf. */
7626FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7627{
7628 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7629 IEMOP_HLP_MIN_486();
7630 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7631}
7632
7633
7634/* Opcode 0x0f 0xd0 - invalid */
7635/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7636FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7637/* Opcode 0xf3 0x0f 0xd0 - invalid */
7638/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7639FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7640
7641/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7642FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7643/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7644FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7645/* Opcode 0xf3 0x0f 0xd1 - invalid */
7646/* Opcode 0xf2 0x0f 0xd1 - invalid */
7647
7648/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7649FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7650/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7651FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7652/* Opcode 0xf3 0x0f 0xd2 - invalid */
7653/* Opcode 0xf2 0x0f 0xd2 - invalid */
7654
7655/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7656FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7657/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7658FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7659/* Opcode 0xf3 0x0f 0xd3 - invalid */
7660/* Opcode 0xf2 0x0f 0xd3 - invalid */
7661
7662/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7663FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7664/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7665FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7666/* Opcode 0xf3 0x0f 0xd4 - invalid */
7667/* Opcode 0xf2 0x0f 0xd4 - invalid */
7668
7669/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7670FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7671/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7672FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7673/* Opcode 0xf3 0x0f 0xd5 - invalid */
7674/* Opcode 0xf2 0x0f 0xd5 - invalid */
7675
7676/* Opcode 0x0f 0xd6 - invalid */
7677/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7678FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7679/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7680FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7681/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7682FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7683#if 0
7684FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7685{
7686 /* Docs says register only. */
7687 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7688
7689 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7690 {
7691 case IEM_OP_PRF_SIZE_OP: /* SSE */
7692 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7693 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7694 IEM_MC_BEGIN(2, 0);
7695 IEM_MC_ARG(uint64_t *, pDst, 0);
7696 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7697 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7698 IEM_MC_PREPARE_SSE_USAGE();
7699 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7700 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7701 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7702 IEM_MC_ADVANCE_RIP();
7703 IEM_MC_END();
7704 return VINF_SUCCESS;
7705
7706 case 0: /* MMX */
7707 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7708 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7709 IEM_MC_BEGIN(2, 0);
7710 IEM_MC_ARG(uint64_t *, pDst, 0);
7711 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7712 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7713 IEM_MC_PREPARE_FPU_USAGE();
7714 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7715 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7716 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7717 IEM_MC_ADVANCE_RIP();
7718 IEM_MC_END();
7719 return VINF_SUCCESS;
7720
7721 default:
7722 return IEMOP_RAISE_INVALID_OPCODE();
7723 }
7724}
7725#endif
7726
7727
7728/** Opcode 0x0f 0xd7. */
7729FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
7730{
7731 /* Docs says register only. */
7732 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7733 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7734 return IEMOP_RAISE_INVALID_OPCODE();
7735
7736 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7737 /** @todo testcase: Check that the instruction implicitly clears the high
7738 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7739 * and opcode modifications are made to work with the whole width (not
7740 * just 128). */
7741 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7742 {
7743 case IEM_OP_PRF_SIZE_OP: /* SSE */
7744 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "pmovmskb Gd,Nq");
7745 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7746 IEM_MC_BEGIN(2, 0);
7747 IEM_MC_ARG(uint64_t *, pDst, 0);
7748 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7749 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7750 IEM_MC_PREPARE_SSE_USAGE();
7751 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7752 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7753 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7754 IEM_MC_ADVANCE_RIP();
7755 IEM_MC_END();
7756 return VINF_SUCCESS;
7757
7758 case 0: /* MMX */
7759 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7760 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7761 IEM_MC_BEGIN(2, 0);
7762 IEM_MC_ARG(uint64_t *, pDst, 0);
7763 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7764 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7765 IEM_MC_PREPARE_FPU_USAGE();
7766 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7767 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7768 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7769 IEM_MC_ADVANCE_RIP();
7770 IEM_MC_END();
7771 return VINF_SUCCESS;
7772
7773 default:
7774 return IEMOP_RAISE_INVALID_OPCODE();
7775 }
7776}
7777
7778
7779/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7780FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7781/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7782FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7783/* Opcode 0xf3 0x0f 0xd8 - invalid */
7784/* Opcode 0xf2 0x0f 0xd8 - invalid */
7785
7786/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7787FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7788/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7789FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7790/* Opcode 0xf3 0x0f 0xd9 - invalid */
7791/* Opcode 0xf2 0x0f 0xd9 - invalid */
7792
7793/** Opcode 0x0f 0xda - pminub Pq, Qq */
7794FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7795/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7796FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7797/* Opcode 0xf3 0x0f 0xda - invalid */
7798/* Opcode 0xf2 0x0f 0xda - invalid */
7799
7800/** Opcode 0x0f 0xdb - pand Pq, Qq */
7801FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7802/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7803FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7804/* Opcode 0xf3 0x0f 0xdb - invalid */
7805/* Opcode 0xf2 0x0f 0xdb - invalid */
7806
7807/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7808FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7809/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7810FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7811/* Opcode 0xf3 0x0f 0xdc - invalid */
7812/* Opcode 0xf2 0x0f 0xdc - invalid */
7813
7814/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7815FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7816/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7817FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7818/* Opcode 0xf3 0x0f 0xdd - invalid */
7819/* Opcode 0xf2 0x0f 0xdd - invalid */
7820
7821/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7822FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7823/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7824FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7825/* Opcode 0xf3 0x0f 0xde - invalid */
7826/* Opcode 0xf2 0x0f 0xde - invalid */
7827
7828/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7829FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7830/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7831FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7832/* Opcode 0xf3 0x0f 0xdf - invalid */
7833/* Opcode 0xf2 0x0f 0xdf - invalid */
7834
7835/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7836FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7837/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7838FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7839/* Opcode 0xf3 0x0f 0xe0 - invalid */
7840/* Opcode 0xf2 0x0f 0xe0 - invalid */
7841
7842/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7843FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7844/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7845FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7846/* Opcode 0xf3 0x0f 0xe1 - invalid */
7847/* Opcode 0xf2 0x0f 0xe1 - invalid */
7848
7849/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7850FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7851/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7852FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7853/* Opcode 0xf3 0x0f 0xe2 - invalid */
7854/* Opcode 0xf2 0x0f 0xe2 - invalid */
7855
7856/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7857FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7858/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7859FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7860/* Opcode 0xf3 0x0f 0xe3 - invalid */
7861/* Opcode 0xf2 0x0f 0xe3 - invalid */
7862
7863/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7864FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7865/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7866FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7867/* Opcode 0xf3 0x0f 0xe4 - invalid */
7868/* Opcode 0xf2 0x0f 0xe4 - invalid */
7869
7870/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7871FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7872/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7873FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7874/* Opcode 0xf3 0x0f 0xe5 - invalid */
7875/* Opcode 0xf2 0x0f 0xe5 - invalid */
7876
7877/* Opcode 0x0f 0xe6 - invalid */
7878/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7879FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7880/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7881FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7882/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7883FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7884
7885
7886/** Opcode 0x0f 0xe7. */
7887FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
7888{
7889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7890 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7891 {
7892 /*
7893 * Register, memory.
7894 */
7895/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
7896 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7897 {
7898
7899 case IEM_OP_PRF_SIZE_OP: /* SSE */
7900 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7901 IEM_MC_BEGIN(0, 2);
7902 IEM_MC_LOCAL(uint128_t, uSrc);
7903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7904
7905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7907 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7908 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7909
7910 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7911 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7912
7913 IEM_MC_ADVANCE_RIP();
7914 IEM_MC_END();
7915 break;
7916
7917 case 0: /* MMX */
7918 IEMOP_MNEMONIC(movntdq_Mdq_Vdq, "movntdq Mdq,Vdq");
7919 IEM_MC_BEGIN(0, 2);
7920 IEM_MC_LOCAL(uint64_t, uSrc);
7921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7922
7923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7925 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7926 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7927
7928 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7929 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7930
7931 IEM_MC_ADVANCE_RIP();
7932 IEM_MC_END();
7933 break;
7934
7935 default:
7936 return IEMOP_RAISE_INVALID_OPCODE();
7937 }
7938 }
7939 /* The register, register encoding is invalid. */
7940 else
7941 return IEMOP_RAISE_INVALID_OPCODE();
7942 return VINF_SUCCESS;
7943}
7944
7945
7946/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7947FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7948/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7949FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7950/* Opcode 0xf3 0x0f 0xe8 - invalid */
7951/* Opcode 0xf2 0x0f 0xe8 - invalid */
7952
7953/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7954FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7955/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7956FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7957/* Opcode 0xf3 0x0f 0xe9 - invalid */
7958/* Opcode 0xf2 0x0f 0xe9 - invalid */
7959
7960/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7961FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7962/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7963FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7964/* Opcode 0xf3 0x0f 0xea - invalid */
7965/* Opcode 0xf2 0x0f 0xea - invalid */
7966
7967/** Opcode 0x0f 0xeb - por Pq, Qq */
7968FNIEMOP_STUB(iemOp_por_Pq_Qq);
7969/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7970FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7971/* Opcode 0xf3 0x0f 0xeb - invalid */
7972/* Opcode 0xf2 0x0f 0xeb - invalid */
7973
7974/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7975FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7976/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7977FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7978/* Opcode 0xf3 0x0f 0xec - invalid */
7979/* Opcode 0xf2 0x0f 0xec - invalid */
7980
7981/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7982FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7983/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7984FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7985/* Opcode 0xf3 0x0f 0xed - invalid */
7986/* Opcode 0xf2 0x0f 0xed - invalid */
7987
7988/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7989FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7990/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7991FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7992/* Opcode 0xf3 0x0f 0xee - invalid */
7993/* Opcode 0xf2 0x0f 0xee - invalid */
7994
7995
7996/** Opcode 0x0f 0xef. */
7997FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
7998{
7999 IEMOP_MNEMONIC(pxor, "pxor");
8000 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8001}
8002/* Opcode 0xf3 0x0f 0xef - invalid */
8003/* Opcode 0xf2 0x0f 0xef - invalid */
8004
8005/* Opcode 0x0f 0xf0 - invalid */
8006/* Opcode 0x66 0x0f 0xf0 - invalid */
8007/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
8008FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
8009
8010/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8011FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8012/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
8013FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
8014/* Opcode 0xf2 0x0f 0xf1 - invalid */
8015
8016/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8017FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8018/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
8019FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
8020/* Opcode 0xf2 0x0f 0xf2 - invalid */
8021
8022/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8023FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8024/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
8025FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
8026/* Opcode 0xf2 0x0f 0xf3 - invalid */
8027
8028/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8029FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8030/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
8031FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
8032/* Opcode 0xf2 0x0f 0xf4 - invalid */
8033
8034/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8035FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8036/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
8037FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
8038/* Opcode 0xf2 0x0f 0xf5 - invalid */
8039
8040/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8041FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8042/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
8043FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
8044/* Opcode 0xf2 0x0f 0xf6 - invalid */
8045
8046/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8047FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8048/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
8049FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
8050/* Opcode 0xf2 0x0f 0xf7 - invalid */
8051
8052/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8053FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8054/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
8055FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
8056/* Opcode 0xf2 0x0f 0xf8 - invalid */
8057
8058/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8059FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8060/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
8061FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
8062/* Opcode 0xf2 0x0f 0xf9 - invalid */
8063
8064/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8065FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8066/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
8067FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
8068/* Opcode 0xf2 0x0f 0xfa - invalid */
8069
8070/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8071FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8072/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
8073FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
8074/* Opcode 0xf2 0x0f 0xfb - invalid */
8075
8076/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8077FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8078/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
8079FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
8080/* Opcode 0xf2 0x0f 0xfc - invalid */
8081
8082/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8083FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8084/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
8085FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
8086/* Opcode 0xf2 0x0f 0xfd - invalid */
8087
8088/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8089FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8090/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
8091FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
8092/* Opcode 0xf2 0x0f 0xfe - invalid */
8093
8094
8095/** Opcode **** 0x0f 0xff - UD0 */
8096FNIEMOP_DEF(iemOp_ud0)
8097{
8098 IEMOP_MNEMONIC(ud0, "ud0");
8099 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8100 {
8101 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8102#ifndef TST_IEM_CHECK_MC
8103 RTGCPTR GCPtrEff;
8104 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8105 if (rcStrict != VINF_SUCCESS)
8106 return rcStrict;
8107#endif
8108 IEMOP_HLP_DONE_DECODING();
8109 }
8110 return IEMOP_RAISE_INVALID_OPCODE();
8111}
8112
8113
8114
8115/** Repeats a_fn four times. For decoding tables. */
8116#define IEMOP_X4(a_fn) a_fn, a_fn, a_fn, a_fn
8117
8118IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8119{
8120 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8121 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8122 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8123 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8124 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8125 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8126 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8127 /* 0x06 */ IEMOP_X4(iemOp_clts),
8128 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8129 /* 0x08 */ IEMOP_X4(iemOp_invd),
8130 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8131 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8132 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8133 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8134 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8135 /* 0x0e */ IEMOP_X4(iemOp_femms),
8136 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8137
8138 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
8139 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
8140 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
8141 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8142 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8143 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8144 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
8145 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8146 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8147 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8148 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8149 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8150 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8151 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8152 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8153 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8154
8155 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8156 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8157 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8158 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8159 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8160 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8161 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8162 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8163 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8164 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM,
8165 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
8166 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8167 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
8168 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
8169 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8170 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8171
8172 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8173 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8174 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8175 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8176 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8177 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8178 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8179 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8180 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
8181 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8182 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
8183 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8184 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8185 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8186 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8187 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8188
8189 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8190 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8191 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8192 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8193 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8194 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8195 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8196 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8197 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8198 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8199 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8200 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8201 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8202 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8203 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8204 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8205
8206 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8207 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8208 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8209 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8210 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8211 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8212 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8213 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8214 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8215 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8216 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8217 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8218 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8219 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8220 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8221 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8222
8223 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8224 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8225 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8226 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8227 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8228 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8229 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8230 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8231 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8232 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8233 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8234 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8235 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8236 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8237 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8238 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8239
8240 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8241 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8242 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8243 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8244 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq, iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8245 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq, iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8246 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8247 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8248
8249 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8250 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8251 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8252 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8253 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8254 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8255 /* 0x7e */ IEMOP_X4(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq),
8256 /* 0x7f */ IEMOP_X4(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq),
8257
8258 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8259 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8260 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8261 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8262 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8263 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8264 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8265 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8266 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8267 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8268 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8269 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8270 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8271 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8272 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8273 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8274
8275 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8276 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8277 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8278 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8279 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8280 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8281 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8282 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8283 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8284 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8285 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8286 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8287 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8288 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8289 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8290 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8291
8292 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8293 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8294 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8295 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8296 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8297 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8298 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8299 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8300 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8301 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8302 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8303 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8304 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8305 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8306 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8307 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8308
8309 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8310 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8311 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8312 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8313 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8314 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8315 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8316 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8317 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8318 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8319 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8320 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8321 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8322 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8323 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8324 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8325
8326 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8327 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8328 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8329 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8330 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8331 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8332 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8333 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8334 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8335 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8336 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8337 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8338 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8339 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8340 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8341 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8342
8343 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8344 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8345 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8346 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8347 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8348 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8349 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8350 /* 0xd7 */ IEMOP_X4(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq),
8351 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8352 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8353 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8354 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8355 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8356 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8357 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8358 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8359
8360 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8361 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8362 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8363 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8364 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8365 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8366 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8367 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8368 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8369 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8370 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8371 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8372 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8373 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8374 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8375 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq, iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8376
8377 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8378 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8379 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8380 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8381 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8382 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8383 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8384 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8385 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8386 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8387 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8388 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8389 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8390 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8391 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8392 /* 0xff */ IEMOP_X4(iemOp_ud0),
8393};
8394AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8395/** @} */
8396
8397
8398/** @name One byte opcodes.
8399 *
8400 * @{
8401 */
8402
8403/** Opcode 0x00. */
8404FNIEMOP_DEF(iemOp_add_Eb_Gb)
8405{
8406 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
8407 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
8408}
8409
8410
8411/** Opcode 0x01. */
8412FNIEMOP_DEF(iemOp_add_Ev_Gv)
8413{
8414 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
8415 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
8416}
8417
8418
8419/** Opcode 0x02. */
8420FNIEMOP_DEF(iemOp_add_Gb_Eb)
8421{
8422 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
8423 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
8424}
8425
8426
8427/** Opcode 0x03. */
8428FNIEMOP_DEF(iemOp_add_Gv_Ev)
8429{
8430 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
8431 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
8432}
8433
8434
8435/** Opcode 0x04. */
8436FNIEMOP_DEF(iemOp_add_Al_Ib)
8437{
8438 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
8439 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
8440}
8441
8442
8443/** Opcode 0x05. */
8444FNIEMOP_DEF(iemOp_add_eAX_Iz)
8445{
8446 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
8447 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
8448}
8449
8450
8451/** Opcode 0x06. */
8452FNIEMOP_DEF(iemOp_push_ES)
8453{
8454 IEMOP_MNEMONIC(push_es, "push es");
8455 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
8456}
8457
8458
8459/** Opcode 0x07. */
8460FNIEMOP_DEF(iemOp_pop_ES)
8461{
8462 IEMOP_MNEMONIC(pop_es, "pop es");
8463 IEMOP_HLP_NO_64BIT();
8464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8465 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
8466}
8467
8468
8469/** Opcode 0x08. */
8470FNIEMOP_DEF(iemOp_or_Eb_Gb)
8471{
8472 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
8473 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8474 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
8475}
8476
8477
8478/** Opcode 0x09. */
8479FNIEMOP_DEF(iemOp_or_Ev_Gv)
8480{
8481 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
8482 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8483 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
8484}
8485
8486
8487/** Opcode 0x0a. */
8488FNIEMOP_DEF(iemOp_or_Gb_Eb)
8489{
8490 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
8491 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8492 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
8493}
8494
8495
8496/** Opcode 0x0b. */
8497FNIEMOP_DEF(iemOp_or_Gv_Ev)
8498{
8499 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
8500 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8501 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
8502}
8503
8504
8505/** Opcode 0x0c. */
8506FNIEMOP_DEF(iemOp_or_Al_Ib)
8507{
8508 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
8509 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8510 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
8511}
8512
8513
8514/** Opcode 0x0d. */
8515FNIEMOP_DEF(iemOp_or_eAX_Iz)
8516{
8517 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
8518 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8519 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
8520}
8521
8522
8523/** Opcode 0x0e. */
8524FNIEMOP_DEF(iemOp_push_CS)
8525{
8526 IEMOP_MNEMONIC(push_cs, "push cs");
8527 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
8528}
8529
8530
8531/** Opcode 0x0f. */
8532FNIEMOP_DEF(iemOp_2byteEscape)
8533{
8534#ifdef VBOX_STRICT
8535 static bool s_fTested = false;
8536 if (RT_LIKELY(s_fTested)) { /* likely */ }
8537 else
8538 {
8539 s_fTested = true;
8540 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
8541 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
8542 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
8543 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
8544 }
8545#endif
8546
8547 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8548
8549 /** @todo PUSH CS on 8086, undefined on 80186. */
8550 IEMOP_HLP_MIN_286();
8551 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
8552}
8553
8554/** Opcode 0x10. */
8555FNIEMOP_DEF(iemOp_adc_Eb_Gb)
8556{
8557 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
8558 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
8559}
8560
8561
8562/** Opcode 0x11. */
8563FNIEMOP_DEF(iemOp_adc_Ev_Gv)
8564{
8565 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
8566 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
8567}
8568
8569
8570/** Opcode 0x12. */
8571FNIEMOP_DEF(iemOp_adc_Gb_Eb)
8572{
8573 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
8574 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
8575}
8576
8577
8578/** Opcode 0x13. */
8579FNIEMOP_DEF(iemOp_adc_Gv_Ev)
8580{
8581 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
8582 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
8583}
8584
8585
8586/** Opcode 0x14. */
8587FNIEMOP_DEF(iemOp_adc_Al_Ib)
8588{
8589 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
8590 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
8591}
8592
8593
8594/** Opcode 0x15. */
8595FNIEMOP_DEF(iemOp_adc_eAX_Iz)
8596{
8597 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
8598 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
8599}
8600
8601
8602/** Opcode 0x16. */
8603FNIEMOP_DEF(iemOp_push_SS)
8604{
8605 IEMOP_MNEMONIC(push_ss, "push ss");
8606 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
8607}
8608
8609
8610/** Opcode 0x17. */
8611FNIEMOP_DEF(iemOp_pop_SS)
8612{
8613 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
8614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8615 IEMOP_HLP_NO_64BIT();
8616 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
8617}
8618
8619
8620/** Opcode 0x18. */
8621FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
8622{
8623 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
8624 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
8625}
8626
8627
8628/** Opcode 0x19. */
8629FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
8630{
8631 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
8632 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
8633}
8634
8635
8636/** Opcode 0x1a. */
8637FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
8638{
8639 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
8640 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
8641}
8642
8643
8644/** Opcode 0x1b. */
8645FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
8646{
8647 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
8648 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
8649}
8650
8651
8652/** Opcode 0x1c. */
8653FNIEMOP_DEF(iemOp_sbb_Al_Ib)
8654{
8655 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
8656 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
8657}
8658
8659
8660/** Opcode 0x1d. */
8661FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
8662{
8663 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
8664 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
8665}
8666
8667
8668/** Opcode 0x1e. */
8669FNIEMOP_DEF(iemOp_push_DS)
8670{
8671 IEMOP_MNEMONIC(push_ds, "push ds");
8672 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
8673}
8674
8675
8676/** Opcode 0x1f. */
8677FNIEMOP_DEF(iemOp_pop_DS)
8678{
8679 IEMOP_MNEMONIC(pop_ds, "pop ds");
8680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8681 IEMOP_HLP_NO_64BIT();
8682 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
8683}
8684
8685
8686/** Opcode 0x20. */
8687FNIEMOP_DEF(iemOp_and_Eb_Gb)
8688{
8689 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
8690 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8691 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
8692}
8693
8694
8695/** Opcode 0x21. */
8696FNIEMOP_DEF(iemOp_and_Ev_Gv)
8697{
8698 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
8699 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8700 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
8701}
8702
8703
8704/** Opcode 0x22. */
8705FNIEMOP_DEF(iemOp_and_Gb_Eb)
8706{
8707 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
8708 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8709 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
8710}
8711
8712
8713/** Opcode 0x23. */
8714FNIEMOP_DEF(iemOp_and_Gv_Ev)
8715{
8716 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
8717 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8718 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
8719}
8720
8721
8722/** Opcode 0x24. */
8723FNIEMOP_DEF(iemOp_and_Al_Ib)
8724{
8725 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
8726 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8727 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
8728}
8729
8730
8731/** Opcode 0x25. */
8732FNIEMOP_DEF(iemOp_and_eAX_Iz)
8733{
8734 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
8735 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8736 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
8737}
8738
8739
8740/** Opcode 0x26. */
8741FNIEMOP_DEF(iemOp_seg_ES)
8742{
8743 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
8744 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
8745 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
8746
8747 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8748 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8749}
8750
8751
8752/** Opcode 0x27. */
8753FNIEMOP_DEF(iemOp_daa)
8754{
8755 IEMOP_MNEMONIC(daa_AL, "daa AL");
8756 IEMOP_HLP_NO_64BIT();
8757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8758 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8759 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
8760}
8761
8762
8763/** Opcode 0x28. */
8764FNIEMOP_DEF(iemOp_sub_Eb_Gb)
8765{
8766 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
8767 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
8768}
8769
8770
8771/** Opcode 0x29. */
8772FNIEMOP_DEF(iemOp_sub_Ev_Gv)
8773{
8774 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
8775 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
8776}
8777
8778
8779/** Opcode 0x2a. */
8780FNIEMOP_DEF(iemOp_sub_Gb_Eb)
8781{
8782 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
8783 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
8784}
8785
8786
8787/** Opcode 0x2b. */
8788FNIEMOP_DEF(iemOp_sub_Gv_Ev)
8789{
8790 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
8791 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
8792}
8793
8794
8795/** Opcode 0x2c. */
8796FNIEMOP_DEF(iemOp_sub_Al_Ib)
8797{
8798 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
8799 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
8800}
8801
8802
8803/** Opcode 0x2d. */
8804FNIEMOP_DEF(iemOp_sub_eAX_Iz)
8805{
8806 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
8807 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
8808}
8809
8810
8811/** Opcode 0x2e. */
8812FNIEMOP_DEF(iemOp_seg_CS)
8813{
8814 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
8815 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
8816 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
8817
8818 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8819 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8820}
8821
8822
8823/** Opcode 0x2f. */
8824FNIEMOP_DEF(iemOp_das)
8825{
8826 IEMOP_MNEMONIC(das_AL, "das AL");
8827 IEMOP_HLP_NO_64BIT();
8828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8829 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8830 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
8831}
8832
8833
8834/** Opcode 0x30. */
8835FNIEMOP_DEF(iemOp_xor_Eb_Gb)
8836{
8837 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
8838 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8839 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
8840}
8841
8842
8843/** Opcode 0x31. */
8844FNIEMOP_DEF(iemOp_xor_Ev_Gv)
8845{
8846 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
8847 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8848 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
8849}
8850
8851
8852/** Opcode 0x32. */
8853FNIEMOP_DEF(iemOp_xor_Gb_Eb)
8854{
8855 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
8856 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8857 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
8858}
8859
8860
8861/** Opcode 0x33. */
8862FNIEMOP_DEF(iemOp_xor_Gv_Ev)
8863{
8864 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
8865 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8866 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
8867}
8868
8869
8870/** Opcode 0x34. */
8871FNIEMOP_DEF(iemOp_xor_Al_Ib)
8872{
8873 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
8874 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8875 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
8876}
8877
8878
8879/** Opcode 0x35. */
8880FNIEMOP_DEF(iemOp_xor_eAX_Iz)
8881{
8882 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
8883 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8884 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
8885}
8886
8887
8888/** Opcode 0x36. */
8889FNIEMOP_DEF(iemOp_seg_SS)
8890{
8891 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
8892 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
8893 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
8894
8895 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8896 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8897}
8898
8899
8900/** Opcode 0x37. */
8901FNIEMOP_STUB(iemOp_aaa);
8902
8903
8904/** Opcode 0x38. */
8905FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
8906{
8907 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
8908 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
8909}
8910
8911
8912/** Opcode 0x39. */
8913FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
8914{
8915 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
8916 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
8917}
8918
8919
8920/** Opcode 0x3a. */
8921FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
8922{
8923 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
8924 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
8925}
8926
8927
8928/** Opcode 0x3b. */
8929FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
8930{
8931 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
8932 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
8933}
8934
8935
8936/** Opcode 0x3c. */
8937FNIEMOP_DEF(iemOp_cmp_Al_Ib)
8938{
8939 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
8940 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
8941}
8942
8943
8944/** Opcode 0x3d. */
8945FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
8946{
8947 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
8948 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
8949}
8950
8951
8952/** Opcode 0x3e. */
8953FNIEMOP_DEF(iemOp_seg_DS)
8954{
8955 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
8956 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
8957 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
8958
8959 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8960 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8961}
8962
8963
8964/** Opcode 0x3f. */
8965FNIEMOP_STUB(iemOp_aas);
8966
8967/**
8968 * Common 'inc/dec/not/neg register' helper.
8969 */
8970FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
8971{
8972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8973 switch (pVCpu->iem.s.enmEffOpSize)
8974 {
8975 case IEMMODE_16BIT:
8976 IEM_MC_BEGIN(2, 0);
8977 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8978 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8979 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8980 IEM_MC_REF_EFLAGS(pEFlags);
8981 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
8982 IEM_MC_ADVANCE_RIP();
8983 IEM_MC_END();
8984 return VINF_SUCCESS;
8985
8986 case IEMMODE_32BIT:
8987 IEM_MC_BEGIN(2, 0);
8988 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8989 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8990 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8991 IEM_MC_REF_EFLAGS(pEFlags);
8992 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
8993 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8994 IEM_MC_ADVANCE_RIP();
8995 IEM_MC_END();
8996 return VINF_SUCCESS;
8997
8998 case IEMMODE_64BIT:
8999 IEM_MC_BEGIN(2, 0);
9000 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9001 IEM_MC_ARG(uint32_t *, pEFlags, 1);
9002 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9003 IEM_MC_REF_EFLAGS(pEFlags);
9004 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
9005 IEM_MC_ADVANCE_RIP();
9006 IEM_MC_END();
9007 return VINF_SUCCESS;
9008 }
9009 return VINF_SUCCESS;
9010}
9011
9012
9013/** Opcode 0x40. */
9014FNIEMOP_DEF(iemOp_inc_eAX)
9015{
9016 /*
9017 * This is a REX prefix in 64-bit mode.
9018 */
9019 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9020 {
9021 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
9022 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
9023
9024 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9025 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9026 }
9027
9028 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
9029 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
9030}
9031
9032
9033/** Opcode 0x41. */
9034FNIEMOP_DEF(iemOp_inc_eCX)
9035{
9036 /*
9037 * This is a REX prefix in 64-bit mode.
9038 */
9039 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9040 {
9041 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
9042 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
9043 pVCpu->iem.s.uRexB = 1 << 3;
9044
9045 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9046 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9047 }
9048
9049 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
9050 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
9051}
9052
9053
9054/** Opcode 0x42. */
9055FNIEMOP_DEF(iemOp_inc_eDX)
9056{
9057 /*
9058 * This is a REX prefix in 64-bit mode.
9059 */
9060 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9061 {
9062 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
9063 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
9064 pVCpu->iem.s.uRexIndex = 1 << 3;
9065
9066 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9067 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9068 }
9069
9070 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
9071 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
9072}
9073
9074
9075
9076/** Opcode 0x43. */
9077FNIEMOP_DEF(iemOp_inc_eBX)
9078{
9079 /*
9080 * This is a REX prefix in 64-bit mode.
9081 */
9082 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9083 {
9084 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
9085 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
9086 pVCpu->iem.s.uRexB = 1 << 3;
9087 pVCpu->iem.s.uRexIndex = 1 << 3;
9088
9089 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9090 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9091 }
9092
9093 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
9094 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
9095}
9096
9097
9098/** Opcode 0x44. */
9099FNIEMOP_DEF(iemOp_inc_eSP)
9100{
9101 /*
9102 * This is a REX prefix in 64-bit mode.
9103 */
9104 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9105 {
9106 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
9107 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
9108 pVCpu->iem.s.uRexReg = 1 << 3;
9109
9110 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9111 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9112 }
9113
9114 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
9115 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
9116}
9117
9118
9119/** Opcode 0x45. */
9120FNIEMOP_DEF(iemOp_inc_eBP)
9121{
9122 /*
9123 * This is a REX prefix in 64-bit mode.
9124 */
9125 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9126 {
9127 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
9128 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
9129 pVCpu->iem.s.uRexReg = 1 << 3;
9130 pVCpu->iem.s.uRexB = 1 << 3;
9131
9132 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9133 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9134 }
9135
9136 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
9137 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
9138}
9139
9140
9141/** Opcode 0x46. */
9142FNIEMOP_DEF(iemOp_inc_eSI)
9143{
9144 /*
9145 * This is a REX prefix in 64-bit mode.
9146 */
9147 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9148 {
9149 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
9150 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
9151 pVCpu->iem.s.uRexReg = 1 << 3;
9152 pVCpu->iem.s.uRexIndex = 1 << 3;
9153
9154 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9155 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9156 }
9157
9158 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
9159 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
9160}
9161
9162
9163/** Opcode 0x47. */
9164FNIEMOP_DEF(iemOp_inc_eDI)
9165{
9166 /*
9167 * This is a REX prefix in 64-bit mode.
9168 */
9169 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9170 {
9171 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
9172 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
9173 pVCpu->iem.s.uRexReg = 1 << 3;
9174 pVCpu->iem.s.uRexB = 1 << 3;
9175 pVCpu->iem.s.uRexIndex = 1 << 3;
9176
9177 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9178 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9179 }
9180
9181 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
9182 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
9183}
9184
9185
9186/** Opcode 0x48. */
9187FNIEMOP_DEF(iemOp_dec_eAX)
9188{
9189 /*
9190 * This is a REX prefix in 64-bit mode.
9191 */
9192 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9193 {
9194 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
9195 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
9196 iemRecalEffOpSize(pVCpu);
9197
9198 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9199 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9200 }
9201
9202 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
9203 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
9204}
9205
9206
9207/** Opcode 0x49. */
9208FNIEMOP_DEF(iemOp_dec_eCX)
9209{
9210 /*
9211 * This is a REX prefix in 64-bit mode.
9212 */
9213 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9214 {
9215 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
9216 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9217 pVCpu->iem.s.uRexB = 1 << 3;
9218 iemRecalEffOpSize(pVCpu);
9219
9220 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9221 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9222 }
9223
9224 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
9225 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
9226}
9227
9228
9229/** Opcode 0x4a. */
9230FNIEMOP_DEF(iemOp_dec_eDX)
9231{
9232 /*
9233 * This is a REX prefix in 64-bit mode.
9234 */
9235 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9236 {
9237 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
9238 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9239 pVCpu->iem.s.uRexIndex = 1 << 3;
9240 iemRecalEffOpSize(pVCpu);
9241
9242 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9243 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9244 }
9245
9246 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
9247 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
9248}
9249
9250
9251/** Opcode 0x4b. */
9252FNIEMOP_DEF(iemOp_dec_eBX)
9253{
9254 /*
9255 * This is a REX prefix in 64-bit mode.
9256 */
9257 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9258 {
9259 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
9260 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9261 pVCpu->iem.s.uRexB = 1 << 3;
9262 pVCpu->iem.s.uRexIndex = 1 << 3;
9263 iemRecalEffOpSize(pVCpu);
9264
9265 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9266 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9267 }
9268
9269 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
9270 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
9271}
9272
9273
9274/** Opcode 0x4c. */
9275FNIEMOP_DEF(iemOp_dec_eSP)
9276{
9277 /*
9278 * This is a REX prefix in 64-bit mode.
9279 */
9280 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9281 {
9282 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
9283 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
9284 pVCpu->iem.s.uRexReg = 1 << 3;
9285 iemRecalEffOpSize(pVCpu);
9286
9287 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9288 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9289 }
9290
9291 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
9292 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
9293}
9294
9295
9296/** Opcode 0x4d. */
9297FNIEMOP_DEF(iemOp_dec_eBP)
9298{
9299 /*
9300 * This is a REX prefix in 64-bit mode.
9301 */
9302 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9303 {
9304 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
9305 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9306 pVCpu->iem.s.uRexReg = 1 << 3;
9307 pVCpu->iem.s.uRexB = 1 << 3;
9308 iemRecalEffOpSize(pVCpu);
9309
9310 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9311 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9312 }
9313
9314 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
9315 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
9316}
9317
9318
9319/** Opcode 0x4e. */
9320FNIEMOP_DEF(iemOp_dec_eSI)
9321{
9322 /*
9323 * This is a REX prefix in 64-bit mode.
9324 */
9325 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9326 {
9327 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
9328 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9329 pVCpu->iem.s.uRexReg = 1 << 3;
9330 pVCpu->iem.s.uRexIndex = 1 << 3;
9331 iemRecalEffOpSize(pVCpu);
9332
9333 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9334 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9335 }
9336
9337 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
9338 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
9339}
9340
9341
9342/** Opcode 0x4f. */
9343FNIEMOP_DEF(iemOp_dec_eDI)
9344{
9345 /*
9346 * This is a REX prefix in 64-bit mode.
9347 */
9348 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9349 {
9350 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
9351 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9352 pVCpu->iem.s.uRexReg = 1 << 3;
9353 pVCpu->iem.s.uRexB = 1 << 3;
9354 pVCpu->iem.s.uRexIndex = 1 << 3;
9355 iemRecalEffOpSize(pVCpu);
9356
9357 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9358 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9359 }
9360
9361 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
9362 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
9363}
9364
9365
9366/**
9367 * Common 'push register' helper.
9368 */
9369FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
9370{
9371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9372 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9373 {
9374 iReg |= pVCpu->iem.s.uRexB;
9375 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9376 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9377 }
9378
9379 switch (pVCpu->iem.s.enmEffOpSize)
9380 {
9381 case IEMMODE_16BIT:
9382 IEM_MC_BEGIN(0, 1);
9383 IEM_MC_LOCAL(uint16_t, u16Value);
9384 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
9385 IEM_MC_PUSH_U16(u16Value);
9386 IEM_MC_ADVANCE_RIP();
9387 IEM_MC_END();
9388 break;
9389
9390 case IEMMODE_32BIT:
9391 IEM_MC_BEGIN(0, 1);
9392 IEM_MC_LOCAL(uint32_t, u32Value);
9393 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
9394 IEM_MC_PUSH_U32(u32Value);
9395 IEM_MC_ADVANCE_RIP();
9396 IEM_MC_END();
9397 break;
9398
9399 case IEMMODE_64BIT:
9400 IEM_MC_BEGIN(0, 1);
9401 IEM_MC_LOCAL(uint64_t, u64Value);
9402 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
9403 IEM_MC_PUSH_U64(u64Value);
9404 IEM_MC_ADVANCE_RIP();
9405 IEM_MC_END();
9406 break;
9407 }
9408
9409 return VINF_SUCCESS;
9410}
9411
9412
9413/** Opcode 0x50. */
9414FNIEMOP_DEF(iemOp_push_eAX)
9415{
9416 IEMOP_MNEMONIC(push_rAX, "push rAX");
9417 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
9418}
9419
9420
9421/** Opcode 0x51. */
9422FNIEMOP_DEF(iemOp_push_eCX)
9423{
9424 IEMOP_MNEMONIC(push_rCX, "push rCX");
9425 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
9426}
9427
9428
9429/** Opcode 0x52. */
9430FNIEMOP_DEF(iemOp_push_eDX)
9431{
9432 IEMOP_MNEMONIC(push_rDX, "push rDX");
9433 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
9434}
9435
9436
9437/** Opcode 0x53. */
9438FNIEMOP_DEF(iemOp_push_eBX)
9439{
9440 IEMOP_MNEMONIC(push_rBX, "push rBX");
9441 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
9442}
9443
9444
9445/** Opcode 0x54. */
9446FNIEMOP_DEF(iemOp_push_eSP)
9447{
9448 IEMOP_MNEMONIC(push_rSP, "push rSP");
9449 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
9450 {
9451 IEM_MC_BEGIN(0, 1);
9452 IEM_MC_LOCAL(uint16_t, u16Value);
9453 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
9454 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
9455 IEM_MC_PUSH_U16(u16Value);
9456 IEM_MC_ADVANCE_RIP();
9457 IEM_MC_END();
9458 }
9459 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
9460}
9461
9462
9463/** Opcode 0x55. */
9464FNIEMOP_DEF(iemOp_push_eBP)
9465{
9466 IEMOP_MNEMONIC(push_rBP, "push rBP");
9467 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
9468}
9469
9470
9471/** Opcode 0x56. */
9472FNIEMOP_DEF(iemOp_push_eSI)
9473{
9474 IEMOP_MNEMONIC(push_rSI, "push rSI");
9475 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
9476}
9477
9478
9479/** Opcode 0x57. */
9480FNIEMOP_DEF(iemOp_push_eDI)
9481{
9482 IEMOP_MNEMONIC(push_rDI, "push rDI");
9483 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
9484}
9485
9486
9487/**
9488 * Common 'pop register' helper.
9489 */
9490FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
9491{
9492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9493 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9494 {
9495 iReg |= pVCpu->iem.s.uRexB;
9496 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9497 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9498 }
9499
9500 switch (pVCpu->iem.s.enmEffOpSize)
9501 {
9502 case IEMMODE_16BIT:
9503 IEM_MC_BEGIN(0, 1);
9504 IEM_MC_LOCAL(uint16_t *, pu16Dst);
9505 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
9506 IEM_MC_POP_U16(pu16Dst);
9507 IEM_MC_ADVANCE_RIP();
9508 IEM_MC_END();
9509 break;
9510
9511 case IEMMODE_32BIT:
9512 IEM_MC_BEGIN(0, 1);
9513 IEM_MC_LOCAL(uint32_t *, pu32Dst);
9514 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9515 IEM_MC_POP_U32(pu32Dst);
9516 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
9517 IEM_MC_ADVANCE_RIP();
9518 IEM_MC_END();
9519 break;
9520
9521 case IEMMODE_64BIT:
9522 IEM_MC_BEGIN(0, 1);
9523 IEM_MC_LOCAL(uint64_t *, pu64Dst);
9524 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9525 IEM_MC_POP_U64(pu64Dst);
9526 IEM_MC_ADVANCE_RIP();
9527 IEM_MC_END();
9528 break;
9529 }
9530
9531 return VINF_SUCCESS;
9532}
9533
9534
9535/** Opcode 0x58. */
9536FNIEMOP_DEF(iemOp_pop_eAX)
9537{
9538 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
9539 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
9540}
9541
9542
9543/** Opcode 0x59. */
9544FNIEMOP_DEF(iemOp_pop_eCX)
9545{
9546 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
9547 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
9548}
9549
9550
9551/** Opcode 0x5a. */
9552FNIEMOP_DEF(iemOp_pop_eDX)
9553{
9554 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
9555 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
9556}
9557
9558
9559/** Opcode 0x5b. */
9560FNIEMOP_DEF(iemOp_pop_eBX)
9561{
9562 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
9563 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
9564}
9565
9566
9567/** Opcode 0x5c. */
9568FNIEMOP_DEF(iemOp_pop_eSP)
9569{
9570 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
9571 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9572 {
9573 if (pVCpu->iem.s.uRexB)
9574 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
9575 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9576 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9577 }
9578
9579 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
9580 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
9581 /** @todo add testcase for this instruction. */
9582 switch (pVCpu->iem.s.enmEffOpSize)
9583 {
9584 case IEMMODE_16BIT:
9585 IEM_MC_BEGIN(0, 1);
9586 IEM_MC_LOCAL(uint16_t, u16Dst);
9587 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
9588 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
9589 IEM_MC_ADVANCE_RIP();
9590 IEM_MC_END();
9591 break;
9592
9593 case IEMMODE_32BIT:
9594 IEM_MC_BEGIN(0, 1);
9595 IEM_MC_LOCAL(uint32_t, u32Dst);
9596 IEM_MC_POP_U32(&u32Dst);
9597 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
9598 IEM_MC_ADVANCE_RIP();
9599 IEM_MC_END();
9600 break;
9601
9602 case IEMMODE_64BIT:
9603 IEM_MC_BEGIN(0, 1);
9604 IEM_MC_LOCAL(uint64_t, u64Dst);
9605 IEM_MC_POP_U64(&u64Dst);
9606 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
9607 IEM_MC_ADVANCE_RIP();
9608 IEM_MC_END();
9609 break;
9610 }
9611
9612 return VINF_SUCCESS;
9613}
9614
9615
9616/** Opcode 0x5d. */
9617FNIEMOP_DEF(iemOp_pop_eBP)
9618{
9619 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
9620 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
9621}
9622
9623
9624/** Opcode 0x5e. */
9625FNIEMOP_DEF(iemOp_pop_eSI)
9626{
9627 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
9628 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
9629}
9630
9631
9632/** Opcode 0x5f. */
9633FNIEMOP_DEF(iemOp_pop_eDI)
9634{
9635 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
9636 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
9637}
9638
9639
9640/** Opcode 0x60. */
9641FNIEMOP_DEF(iemOp_pusha)
9642{
9643 IEMOP_MNEMONIC(pusha, "pusha");
9644 IEMOP_HLP_MIN_186();
9645 IEMOP_HLP_NO_64BIT();
9646 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9647 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
9648 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9649 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
9650}
9651
9652
9653/** Opcode 0x61. */
9654FNIEMOP_DEF(iemOp_popa)
9655{
9656 IEMOP_MNEMONIC(popa, "popa");
9657 IEMOP_HLP_MIN_186();
9658 IEMOP_HLP_NO_64BIT();
9659 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9660 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
9661 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9662 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
9663}
9664
9665
9666/** Opcode 0x62. */
9667FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
9668// IEMOP_HLP_MIN_186();
9669
9670
9671/** Opcode 0x63 - non-64-bit modes. */
9672FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
9673{
9674 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
9675 IEMOP_HLP_MIN_286();
9676 IEMOP_HLP_NO_REAL_OR_V86_MODE();
9677 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9678
9679 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9680 {
9681 /* Register */
9682 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9683 IEM_MC_BEGIN(3, 0);
9684 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9685 IEM_MC_ARG(uint16_t, u16Src, 1);
9686 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9687
9688 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9689 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
9690 IEM_MC_REF_EFLAGS(pEFlags);
9691 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9692
9693 IEM_MC_ADVANCE_RIP();
9694 IEM_MC_END();
9695 }
9696 else
9697 {
9698 /* Memory */
9699 IEM_MC_BEGIN(3, 2);
9700 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9701 IEM_MC_ARG(uint16_t, u16Src, 1);
9702 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9704
9705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9706 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9707 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9708 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9709 IEM_MC_FETCH_EFLAGS(EFlags);
9710 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9711
9712 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9713 IEM_MC_COMMIT_EFLAGS(EFlags);
9714 IEM_MC_ADVANCE_RIP();
9715 IEM_MC_END();
9716 }
9717 return VINF_SUCCESS;
9718
9719}
9720
9721
9722/** Opcode 0x63.
9723 * @note This is a weird one. It works like a regular move instruction if
9724 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
9725 * @todo This definitely needs a testcase to verify the odd cases. */
9726FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
9727{
9728 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
9729
9730 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
9731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9732
9733 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9734 {
9735 /*
9736 * Register to register.
9737 */
9738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9739 IEM_MC_BEGIN(0, 1);
9740 IEM_MC_LOCAL(uint64_t, u64Value);
9741 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9742 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9743 IEM_MC_ADVANCE_RIP();
9744 IEM_MC_END();
9745 }
9746 else
9747 {
9748 /*
9749 * We're loading a register from memory.
9750 */
9751 IEM_MC_BEGIN(0, 2);
9752 IEM_MC_LOCAL(uint64_t, u64Value);
9753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9756 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9757 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9758 IEM_MC_ADVANCE_RIP();
9759 IEM_MC_END();
9760 }
9761 return VINF_SUCCESS;
9762}
9763
9764
9765/** Opcode 0x64. */
9766FNIEMOP_DEF(iemOp_seg_FS)
9767{
9768 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
9769 IEMOP_HLP_MIN_386();
9770
9771 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
9772 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
9773
9774 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9775 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9776}
9777
9778
9779/** Opcode 0x65. */
9780FNIEMOP_DEF(iemOp_seg_GS)
9781{
9782 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
9783 IEMOP_HLP_MIN_386();
9784
9785 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
9786 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
9787
9788 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9789 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9790}
9791
9792
9793/** Opcode 0x66. */
9794FNIEMOP_DEF(iemOp_op_size)
9795{
9796 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
9797 IEMOP_HLP_MIN_386();
9798
9799 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
9800 iemRecalEffOpSize(pVCpu);
9801
9802 /* For the 4 entry opcode tables, the operand prefix doesn't not count
9803 when REPZ or REPNZ are present. */
9804 if (pVCpu->iem.s.idxPrefix == 0)
9805 pVCpu->iem.s.idxPrefix = 1;
9806
9807 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9808 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9809}
9810
9811
9812/** Opcode 0x67. */
9813FNIEMOP_DEF(iemOp_addr_size)
9814{
9815 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
9816 IEMOP_HLP_MIN_386();
9817
9818 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
9819 switch (pVCpu->iem.s.enmDefAddrMode)
9820 {
9821 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9822 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
9823 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9824 default: AssertFailed();
9825 }
9826
9827 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9828 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9829}
9830
9831
9832/** Opcode 0x68. */
9833FNIEMOP_DEF(iemOp_push_Iz)
9834{
9835 IEMOP_MNEMONIC(push_Iz, "push Iz");
9836 IEMOP_HLP_MIN_186();
9837 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9838 switch (pVCpu->iem.s.enmEffOpSize)
9839 {
9840 case IEMMODE_16BIT:
9841 {
9842 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9844 IEM_MC_BEGIN(0,0);
9845 IEM_MC_PUSH_U16(u16Imm);
9846 IEM_MC_ADVANCE_RIP();
9847 IEM_MC_END();
9848 return VINF_SUCCESS;
9849 }
9850
9851 case IEMMODE_32BIT:
9852 {
9853 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9855 IEM_MC_BEGIN(0,0);
9856 IEM_MC_PUSH_U32(u32Imm);
9857 IEM_MC_ADVANCE_RIP();
9858 IEM_MC_END();
9859 return VINF_SUCCESS;
9860 }
9861
9862 case IEMMODE_64BIT:
9863 {
9864 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9866 IEM_MC_BEGIN(0,0);
9867 IEM_MC_PUSH_U64(u64Imm);
9868 IEM_MC_ADVANCE_RIP();
9869 IEM_MC_END();
9870 return VINF_SUCCESS;
9871 }
9872
9873 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9874 }
9875}
9876
9877
9878/** Opcode 0x69. */
9879FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
9880{
9881 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
9882 IEMOP_HLP_MIN_186();
9883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9884 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9885
9886 switch (pVCpu->iem.s.enmEffOpSize)
9887 {
9888 case IEMMODE_16BIT:
9889 {
9890 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9891 {
9892 /* register operand */
9893 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9895
9896 IEM_MC_BEGIN(3, 1);
9897 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9898 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
9899 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9900 IEM_MC_LOCAL(uint16_t, u16Tmp);
9901
9902 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9903 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9904 IEM_MC_REF_EFLAGS(pEFlags);
9905 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9906 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9907
9908 IEM_MC_ADVANCE_RIP();
9909 IEM_MC_END();
9910 }
9911 else
9912 {
9913 /* memory operand */
9914 IEM_MC_BEGIN(3, 2);
9915 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9916 IEM_MC_ARG(uint16_t, u16Src, 1);
9917 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9918 IEM_MC_LOCAL(uint16_t, u16Tmp);
9919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9920
9921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9922 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9923 IEM_MC_ASSIGN(u16Src, u16Imm);
9924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9925 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9926 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9927 IEM_MC_REF_EFLAGS(pEFlags);
9928 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9929 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9930
9931 IEM_MC_ADVANCE_RIP();
9932 IEM_MC_END();
9933 }
9934 return VINF_SUCCESS;
9935 }
9936
9937 case IEMMODE_32BIT:
9938 {
9939 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9940 {
9941 /* register operand */
9942 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9944
9945 IEM_MC_BEGIN(3, 1);
9946 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9947 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
9948 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9949 IEM_MC_LOCAL(uint32_t, u32Tmp);
9950
9951 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9952 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9953 IEM_MC_REF_EFLAGS(pEFlags);
9954 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9955 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9956
9957 IEM_MC_ADVANCE_RIP();
9958 IEM_MC_END();
9959 }
9960 else
9961 {
9962 /* memory operand */
9963 IEM_MC_BEGIN(3, 2);
9964 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9965 IEM_MC_ARG(uint32_t, u32Src, 1);
9966 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9967 IEM_MC_LOCAL(uint32_t, u32Tmp);
9968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9969
9970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9971 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9972 IEM_MC_ASSIGN(u32Src, u32Imm);
9973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9974 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9975 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9976 IEM_MC_REF_EFLAGS(pEFlags);
9977 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9978 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9979
9980 IEM_MC_ADVANCE_RIP();
9981 IEM_MC_END();
9982 }
9983 return VINF_SUCCESS;
9984 }
9985
9986 case IEMMODE_64BIT:
9987 {
9988 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9989 {
9990 /* register operand */
9991 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9993
9994 IEM_MC_BEGIN(3, 1);
9995 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9996 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
9997 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9998 IEM_MC_LOCAL(uint64_t, u64Tmp);
9999
10000 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10001 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10002 IEM_MC_REF_EFLAGS(pEFlags);
10003 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10004 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10005
10006 IEM_MC_ADVANCE_RIP();
10007 IEM_MC_END();
10008 }
10009 else
10010 {
10011 /* memory operand */
10012 IEM_MC_BEGIN(3, 2);
10013 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10014 IEM_MC_ARG(uint64_t, u64Src, 1);
10015 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10016 IEM_MC_LOCAL(uint64_t, u64Tmp);
10017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10018
10019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10020 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10021 IEM_MC_ASSIGN(u64Src, u64Imm);
10022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10023 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10024 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10025 IEM_MC_REF_EFLAGS(pEFlags);
10026 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10027 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10028
10029 IEM_MC_ADVANCE_RIP();
10030 IEM_MC_END();
10031 }
10032 return VINF_SUCCESS;
10033 }
10034 }
10035 AssertFailedReturn(VERR_IEM_IPE_9);
10036}
10037
10038
10039/** Opcode 0x6a. */
10040FNIEMOP_DEF(iemOp_push_Ib)
10041{
10042 IEMOP_MNEMONIC(push_Ib, "push Ib");
10043 IEMOP_HLP_MIN_186();
10044 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10046 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10047
10048 IEM_MC_BEGIN(0,0);
10049 switch (pVCpu->iem.s.enmEffOpSize)
10050 {
10051 case IEMMODE_16BIT:
10052 IEM_MC_PUSH_U16(i8Imm);
10053 break;
10054 case IEMMODE_32BIT:
10055 IEM_MC_PUSH_U32(i8Imm);
10056 break;
10057 case IEMMODE_64BIT:
10058 IEM_MC_PUSH_U64(i8Imm);
10059 break;
10060 }
10061 IEM_MC_ADVANCE_RIP();
10062 IEM_MC_END();
10063 return VINF_SUCCESS;
10064}
10065
10066
10067/** Opcode 0x6b. */
10068FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
10069{
10070 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
10071 IEMOP_HLP_MIN_186();
10072 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10073 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10074
10075 switch (pVCpu->iem.s.enmEffOpSize)
10076 {
10077 case IEMMODE_16BIT:
10078 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10079 {
10080 /* register operand */
10081 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10083
10084 IEM_MC_BEGIN(3, 1);
10085 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10086 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
10087 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10088 IEM_MC_LOCAL(uint16_t, u16Tmp);
10089
10090 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10091 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
10092 IEM_MC_REF_EFLAGS(pEFlags);
10093 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
10094 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
10095
10096 IEM_MC_ADVANCE_RIP();
10097 IEM_MC_END();
10098 }
10099 else
10100 {
10101 /* memory operand */
10102 IEM_MC_BEGIN(3, 2);
10103 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10104 IEM_MC_ARG(uint16_t, u16Src, 1);
10105 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10106 IEM_MC_LOCAL(uint16_t, u16Tmp);
10107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10108
10109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10110 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
10111 IEM_MC_ASSIGN(u16Src, u16Imm);
10112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10113 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10114 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
10115 IEM_MC_REF_EFLAGS(pEFlags);
10116 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
10117 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
10118
10119 IEM_MC_ADVANCE_RIP();
10120 IEM_MC_END();
10121 }
10122 return VINF_SUCCESS;
10123
10124 case IEMMODE_32BIT:
10125 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10126 {
10127 /* register operand */
10128 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10130
10131 IEM_MC_BEGIN(3, 1);
10132 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10133 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
10134 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10135 IEM_MC_LOCAL(uint32_t, u32Tmp);
10136
10137 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10138 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10139 IEM_MC_REF_EFLAGS(pEFlags);
10140 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10141 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10142
10143 IEM_MC_ADVANCE_RIP();
10144 IEM_MC_END();
10145 }
10146 else
10147 {
10148 /* memory operand */
10149 IEM_MC_BEGIN(3, 2);
10150 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10151 IEM_MC_ARG(uint32_t, u32Src, 1);
10152 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10153 IEM_MC_LOCAL(uint32_t, u32Tmp);
10154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10155
10156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10157 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
10158 IEM_MC_ASSIGN(u32Src, u32Imm);
10159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10160 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10161 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10162 IEM_MC_REF_EFLAGS(pEFlags);
10163 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10164 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10165
10166 IEM_MC_ADVANCE_RIP();
10167 IEM_MC_END();
10168 }
10169 return VINF_SUCCESS;
10170
10171 case IEMMODE_64BIT:
10172 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10173 {
10174 /* register operand */
10175 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10177
10178 IEM_MC_BEGIN(3, 1);
10179 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10180 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
10181 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10182 IEM_MC_LOCAL(uint64_t, u64Tmp);
10183
10184 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10185 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10186 IEM_MC_REF_EFLAGS(pEFlags);
10187 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10188 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10189
10190 IEM_MC_ADVANCE_RIP();
10191 IEM_MC_END();
10192 }
10193 else
10194 {
10195 /* memory operand */
10196 IEM_MC_BEGIN(3, 2);
10197 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10198 IEM_MC_ARG(uint64_t, u64Src, 1);
10199 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10200 IEM_MC_LOCAL(uint64_t, u64Tmp);
10201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10202
10203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10204 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
10205 IEM_MC_ASSIGN(u64Src, u64Imm);
10206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10207 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10208 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10209 IEM_MC_REF_EFLAGS(pEFlags);
10210 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10211 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10212
10213 IEM_MC_ADVANCE_RIP();
10214 IEM_MC_END();
10215 }
10216 return VINF_SUCCESS;
10217 }
10218 AssertFailedReturn(VERR_IEM_IPE_8);
10219}
10220
10221
10222/** Opcode 0x6c. */
10223FNIEMOP_DEF(iemOp_insb_Yb_DX)
10224{
10225 IEMOP_HLP_MIN_186();
10226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10227 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10228 {
10229 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
10230 switch (pVCpu->iem.s.enmEffAddrMode)
10231 {
10232 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
10233 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
10234 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
10235 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10236 }
10237 }
10238 else
10239 {
10240 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
10241 switch (pVCpu->iem.s.enmEffAddrMode)
10242 {
10243 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
10244 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
10245 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
10246 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10247 }
10248 }
10249}
10250
10251
10252/** Opcode 0x6d. */
10253FNIEMOP_DEF(iemOp_inswd_Yv_DX)
10254{
10255 IEMOP_HLP_MIN_186();
10256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10257 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10258 {
10259 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
10260 switch (pVCpu->iem.s.enmEffOpSize)
10261 {
10262 case IEMMODE_16BIT:
10263 switch (pVCpu->iem.s.enmEffAddrMode)
10264 {
10265 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
10266 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
10267 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
10268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10269 }
10270 break;
10271 case IEMMODE_64BIT:
10272 case IEMMODE_32BIT:
10273 switch (pVCpu->iem.s.enmEffAddrMode)
10274 {
10275 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
10276 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
10277 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
10278 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10279 }
10280 break;
10281 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10282 }
10283 }
10284 else
10285 {
10286 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
10287 switch (pVCpu->iem.s.enmEffOpSize)
10288 {
10289 case IEMMODE_16BIT:
10290 switch (pVCpu->iem.s.enmEffAddrMode)
10291 {
10292 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
10293 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
10294 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
10295 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10296 }
10297 break;
10298 case IEMMODE_64BIT:
10299 case IEMMODE_32BIT:
10300 switch (pVCpu->iem.s.enmEffAddrMode)
10301 {
10302 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
10303 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
10304 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
10305 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10306 }
10307 break;
10308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10309 }
10310 }
10311}
10312
10313
10314/** Opcode 0x6e. */
10315FNIEMOP_DEF(iemOp_outsb_Yb_DX)
10316{
10317 IEMOP_HLP_MIN_186();
10318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10319 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10320 {
10321 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
10322 switch (pVCpu->iem.s.enmEffAddrMode)
10323 {
10324 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10325 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10326 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10327 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10328 }
10329 }
10330 else
10331 {
10332 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
10333 switch (pVCpu->iem.s.enmEffAddrMode)
10334 {
10335 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10336 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10337 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10338 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10339 }
10340 }
10341}
10342
10343
10344/** Opcode 0x6f. */
10345FNIEMOP_DEF(iemOp_outswd_Yv_DX)
10346{
10347 IEMOP_HLP_MIN_186();
10348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10349 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10350 {
10351 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
10352 switch (pVCpu->iem.s.enmEffOpSize)
10353 {
10354 case IEMMODE_16BIT:
10355 switch (pVCpu->iem.s.enmEffAddrMode)
10356 {
10357 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10358 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10359 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10361 }
10362 break;
10363 case IEMMODE_64BIT:
10364 case IEMMODE_32BIT:
10365 switch (pVCpu->iem.s.enmEffAddrMode)
10366 {
10367 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10368 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10369 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10370 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10371 }
10372 break;
10373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10374 }
10375 }
10376 else
10377 {
10378 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
10379 switch (pVCpu->iem.s.enmEffOpSize)
10380 {
10381 case IEMMODE_16BIT:
10382 switch (pVCpu->iem.s.enmEffAddrMode)
10383 {
10384 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10385 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10386 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10388 }
10389 break;
10390 case IEMMODE_64BIT:
10391 case IEMMODE_32BIT:
10392 switch (pVCpu->iem.s.enmEffAddrMode)
10393 {
10394 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10395 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10396 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10398 }
10399 break;
10400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10401 }
10402 }
10403}
10404
10405
10406/** Opcode 0x70. */
10407FNIEMOP_DEF(iemOp_jo_Jb)
10408{
10409 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
10410 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10412 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10413
10414 IEM_MC_BEGIN(0, 0);
10415 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10416 IEM_MC_REL_JMP_S8(i8Imm);
10417 } IEM_MC_ELSE() {
10418 IEM_MC_ADVANCE_RIP();
10419 } IEM_MC_ENDIF();
10420 IEM_MC_END();
10421 return VINF_SUCCESS;
10422}
10423
10424
10425/** Opcode 0x71. */
10426FNIEMOP_DEF(iemOp_jno_Jb)
10427{
10428 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
10429 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10431 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10432
10433 IEM_MC_BEGIN(0, 0);
10434 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10435 IEM_MC_ADVANCE_RIP();
10436 } IEM_MC_ELSE() {
10437 IEM_MC_REL_JMP_S8(i8Imm);
10438 } IEM_MC_ENDIF();
10439 IEM_MC_END();
10440 return VINF_SUCCESS;
10441}
10442
10443/** Opcode 0x72. */
10444FNIEMOP_DEF(iemOp_jc_Jb)
10445{
10446 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
10447 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10449 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10450
10451 IEM_MC_BEGIN(0, 0);
10452 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10453 IEM_MC_REL_JMP_S8(i8Imm);
10454 } IEM_MC_ELSE() {
10455 IEM_MC_ADVANCE_RIP();
10456 } IEM_MC_ENDIF();
10457 IEM_MC_END();
10458 return VINF_SUCCESS;
10459}
10460
10461
10462/** Opcode 0x73. */
10463FNIEMOP_DEF(iemOp_jnc_Jb)
10464{
10465 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
10466 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10468 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10469
10470 IEM_MC_BEGIN(0, 0);
10471 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10472 IEM_MC_ADVANCE_RIP();
10473 } IEM_MC_ELSE() {
10474 IEM_MC_REL_JMP_S8(i8Imm);
10475 } IEM_MC_ENDIF();
10476 IEM_MC_END();
10477 return VINF_SUCCESS;
10478}
10479
10480
10481/** Opcode 0x74. */
10482FNIEMOP_DEF(iemOp_je_Jb)
10483{
10484 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
10485 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10487 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10488
10489 IEM_MC_BEGIN(0, 0);
10490 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10491 IEM_MC_REL_JMP_S8(i8Imm);
10492 } IEM_MC_ELSE() {
10493 IEM_MC_ADVANCE_RIP();
10494 } IEM_MC_ENDIF();
10495 IEM_MC_END();
10496 return VINF_SUCCESS;
10497}
10498
10499
10500/** Opcode 0x75. */
10501FNIEMOP_DEF(iemOp_jne_Jb)
10502{
10503 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
10504 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10506 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10507
10508 IEM_MC_BEGIN(0, 0);
10509 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10510 IEM_MC_ADVANCE_RIP();
10511 } IEM_MC_ELSE() {
10512 IEM_MC_REL_JMP_S8(i8Imm);
10513 } IEM_MC_ENDIF();
10514 IEM_MC_END();
10515 return VINF_SUCCESS;
10516}
10517
10518
10519/** Opcode 0x76. */
10520FNIEMOP_DEF(iemOp_jbe_Jb)
10521{
10522 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
10523 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10525 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10526
10527 IEM_MC_BEGIN(0, 0);
10528 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10529 IEM_MC_REL_JMP_S8(i8Imm);
10530 } IEM_MC_ELSE() {
10531 IEM_MC_ADVANCE_RIP();
10532 } IEM_MC_ENDIF();
10533 IEM_MC_END();
10534 return VINF_SUCCESS;
10535}
10536
10537
10538/** Opcode 0x77. */
10539FNIEMOP_DEF(iemOp_jnbe_Jb)
10540{
10541 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
10542 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10544 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10545
10546 IEM_MC_BEGIN(0, 0);
10547 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10548 IEM_MC_ADVANCE_RIP();
10549 } IEM_MC_ELSE() {
10550 IEM_MC_REL_JMP_S8(i8Imm);
10551 } IEM_MC_ENDIF();
10552 IEM_MC_END();
10553 return VINF_SUCCESS;
10554}
10555
10556
10557/** Opcode 0x78. */
10558FNIEMOP_DEF(iemOp_js_Jb)
10559{
10560 IEMOP_MNEMONIC(js_Jb, "js Jb");
10561 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10563 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10564
10565 IEM_MC_BEGIN(0, 0);
10566 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10567 IEM_MC_REL_JMP_S8(i8Imm);
10568 } IEM_MC_ELSE() {
10569 IEM_MC_ADVANCE_RIP();
10570 } IEM_MC_ENDIF();
10571 IEM_MC_END();
10572 return VINF_SUCCESS;
10573}
10574
10575
10576/** Opcode 0x79. */
10577FNIEMOP_DEF(iemOp_jns_Jb)
10578{
10579 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
10580 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10582 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10583
10584 IEM_MC_BEGIN(0, 0);
10585 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10586 IEM_MC_ADVANCE_RIP();
10587 } IEM_MC_ELSE() {
10588 IEM_MC_REL_JMP_S8(i8Imm);
10589 } IEM_MC_ENDIF();
10590 IEM_MC_END();
10591 return VINF_SUCCESS;
10592}
10593
10594
10595/** Opcode 0x7a. */
10596FNIEMOP_DEF(iemOp_jp_Jb)
10597{
10598 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
10599 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10601 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10602
10603 IEM_MC_BEGIN(0, 0);
10604 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10605 IEM_MC_REL_JMP_S8(i8Imm);
10606 } IEM_MC_ELSE() {
10607 IEM_MC_ADVANCE_RIP();
10608 } IEM_MC_ENDIF();
10609 IEM_MC_END();
10610 return VINF_SUCCESS;
10611}
10612
10613
10614/** Opcode 0x7b. */
10615FNIEMOP_DEF(iemOp_jnp_Jb)
10616{
10617 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
10618 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10620 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10621
10622 IEM_MC_BEGIN(0, 0);
10623 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10624 IEM_MC_ADVANCE_RIP();
10625 } IEM_MC_ELSE() {
10626 IEM_MC_REL_JMP_S8(i8Imm);
10627 } IEM_MC_ENDIF();
10628 IEM_MC_END();
10629 return VINF_SUCCESS;
10630}
10631
10632
10633/** Opcode 0x7c. */
10634FNIEMOP_DEF(iemOp_jl_Jb)
10635{
10636 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
10637 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10639 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10640
10641 IEM_MC_BEGIN(0, 0);
10642 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10643 IEM_MC_REL_JMP_S8(i8Imm);
10644 } IEM_MC_ELSE() {
10645 IEM_MC_ADVANCE_RIP();
10646 } IEM_MC_ENDIF();
10647 IEM_MC_END();
10648 return VINF_SUCCESS;
10649}
10650
10651
10652/** Opcode 0x7d. */
10653FNIEMOP_DEF(iemOp_jnl_Jb)
10654{
10655 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
10656 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10658 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10659
10660 IEM_MC_BEGIN(0, 0);
10661 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10662 IEM_MC_ADVANCE_RIP();
10663 } IEM_MC_ELSE() {
10664 IEM_MC_REL_JMP_S8(i8Imm);
10665 } IEM_MC_ENDIF();
10666 IEM_MC_END();
10667 return VINF_SUCCESS;
10668}
10669
10670
10671/** Opcode 0x7e. */
10672FNIEMOP_DEF(iemOp_jle_Jb)
10673{
10674 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
10675 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10677 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10678
10679 IEM_MC_BEGIN(0, 0);
10680 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10681 IEM_MC_REL_JMP_S8(i8Imm);
10682 } IEM_MC_ELSE() {
10683 IEM_MC_ADVANCE_RIP();
10684 } IEM_MC_ENDIF();
10685 IEM_MC_END();
10686 return VINF_SUCCESS;
10687}
10688
10689
10690/** Opcode 0x7f. */
10691FNIEMOP_DEF(iemOp_jnle_Jb)
10692{
10693 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
10694 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10696 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10697
10698 IEM_MC_BEGIN(0, 0);
10699 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10700 IEM_MC_ADVANCE_RIP();
10701 } IEM_MC_ELSE() {
10702 IEM_MC_REL_JMP_S8(i8Imm);
10703 } IEM_MC_ENDIF();
10704 IEM_MC_END();
10705 return VINF_SUCCESS;
10706}
10707
10708
10709/** Opcode 0x80. */
10710FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
10711{
10712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10713 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10714 {
10715 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
10716 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
10717 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
10718 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
10719 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
10720 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
10721 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
10722 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
10723 }
10724 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10725
10726 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10727 {
10728 /* register target */
10729 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10731 IEM_MC_BEGIN(3, 0);
10732 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10733 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10734 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10735
10736 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10737 IEM_MC_REF_EFLAGS(pEFlags);
10738 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10739
10740 IEM_MC_ADVANCE_RIP();
10741 IEM_MC_END();
10742 }
10743 else
10744 {
10745 /* memory target */
10746 uint32_t fAccess;
10747 if (pImpl->pfnLockedU8)
10748 fAccess = IEM_ACCESS_DATA_RW;
10749 else /* CMP */
10750 fAccess = IEM_ACCESS_DATA_R;
10751 IEM_MC_BEGIN(3, 2);
10752 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10753 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10755
10756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10757 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10758 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10759 if (pImpl->pfnLockedU8)
10760 IEMOP_HLP_DONE_DECODING();
10761 else
10762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10763
10764 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10765 IEM_MC_FETCH_EFLAGS(EFlags);
10766 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10767 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10768 else
10769 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
10770
10771 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
10772 IEM_MC_COMMIT_EFLAGS(EFlags);
10773 IEM_MC_ADVANCE_RIP();
10774 IEM_MC_END();
10775 }
10776 return VINF_SUCCESS;
10777}
10778
10779
10780/** Opcode 0x81. */
10781FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
10782{
10783 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10784 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10785 {
10786 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
10787 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
10788 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
10789 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
10790 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
10791 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
10792 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
10793 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
10794 }
10795 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10796
10797 switch (pVCpu->iem.s.enmEffOpSize)
10798 {
10799 case IEMMODE_16BIT:
10800 {
10801 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10802 {
10803 /* register target */
10804 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10806 IEM_MC_BEGIN(3, 0);
10807 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10808 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
10809 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10810
10811 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10812 IEM_MC_REF_EFLAGS(pEFlags);
10813 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10814
10815 IEM_MC_ADVANCE_RIP();
10816 IEM_MC_END();
10817 }
10818 else
10819 {
10820 /* memory target */
10821 uint32_t fAccess;
10822 if (pImpl->pfnLockedU16)
10823 fAccess = IEM_ACCESS_DATA_RW;
10824 else /* CMP, TEST */
10825 fAccess = IEM_ACCESS_DATA_R;
10826 IEM_MC_BEGIN(3, 2);
10827 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10828 IEM_MC_ARG(uint16_t, u16Src, 1);
10829 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10831
10832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10833 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10834 IEM_MC_ASSIGN(u16Src, u16Imm);
10835 if (pImpl->pfnLockedU16)
10836 IEMOP_HLP_DONE_DECODING();
10837 else
10838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10839 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10840 IEM_MC_FETCH_EFLAGS(EFlags);
10841 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10842 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10843 else
10844 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10845
10846 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10847 IEM_MC_COMMIT_EFLAGS(EFlags);
10848 IEM_MC_ADVANCE_RIP();
10849 IEM_MC_END();
10850 }
10851 break;
10852 }
10853
10854 case IEMMODE_32BIT:
10855 {
10856 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10857 {
10858 /* register target */
10859 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10861 IEM_MC_BEGIN(3, 0);
10862 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10863 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
10864 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10865
10866 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10867 IEM_MC_REF_EFLAGS(pEFlags);
10868 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10869 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10870
10871 IEM_MC_ADVANCE_RIP();
10872 IEM_MC_END();
10873 }
10874 else
10875 {
10876 /* memory target */
10877 uint32_t fAccess;
10878 if (pImpl->pfnLockedU32)
10879 fAccess = IEM_ACCESS_DATA_RW;
10880 else /* CMP, TEST */
10881 fAccess = IEM_ACCESS_DATA_R;
10882 IEM_MC_BEGIN(3, 2);
10883 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10884 IEM_MC_ARG(uint32_t, u32Src, 1);
10885 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10887
10888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10889 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10890 IEM_MC_ASSIGN(u32Src, u32Imm);
10891 if (pImpl->pfnLockedU32)
10892 IEMOP_HLP_DONE_DECODING();
10893 else
10894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10895 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10896 IEM_MC_FETCH_EFLAGS(EFlags);
10897 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10898 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10899 else
10900 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10901
10902 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10903 IEM_MC_COMMIT_EFLAGS(EFlags);
10904 IEM_MC_ADVANCE_RIP();
10905 IEM_MC_END();
10906 }
10907 break;
10908 }
10909
10910 case IEMMODE_64BIT:
10911 {
10912 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10913 {
10914 /* register target */
10915 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10917 IEM_MC_BEGIN(3, 0);
10918 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10919 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
10920 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10921
10922 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10923 IEM_MC_REF_EFLAGS(pEFlags);
10924 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10925
10926 IEM_MC_ADVANCE_RIP();
10927 IEM_MC_END();
10928 }
10929 else
10930 {
10931 /* memory target */
10932 uint32_t fAccess;
10933 if (pImpl->pfnLockedU64)
10934 fAccess = IEM_ACCESS_DATA_RW;
10935 else /* CMP */
10936 fAccess = IEM_ACCESS_DATA_R;
10937 IEM_MC_BEGIN(3, 2);
10938 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10939 IEM_MC_ARG(uint64_t, u64Src, 1);
10940 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10942
10943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10944 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10945 if (pImpl->pfnLockedU64)
10946 IEMOP_HLP_DONE_DECODING();
10947 else
10948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10949 IEM_MC_ASSIGN(u64Src, u64Imm);
10950 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10951 IEM_MC_FETCH_EFLAGS(EFlags);
10952 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10953 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10954 else
10955 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10956
10957 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10958 IEM_MC_COMMIT_EFLAGS(EFlags);
10959 IEM_MC_ADVANCE_RIP();
10960 IEM_MC_END();
10961 }
10962 break;
10963 }
10964 }
10965 return VINF_SUCCESS;
10966}
10967
10968
10969/** Opcode 0x82. */
10970FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
10971{
10972 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
10973 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
10974}
10975
10976
10977/** Opcode 0x83. */
10978FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
10979{
10980 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10981 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10982 {
10983 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
10984 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
10985 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
10986 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
10987 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
10988 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
10989 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
10990 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
10991 }
10992 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
10993 to the 386 even if absent in the intel reference manuals and some
10994 3rd party opcode listings. */
10995 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10996
10997 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10998 {
10999 /*
11000 * Register target
11001 */
11002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11003 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11004 switch (pVCpu->iem.s.enmEffOpSize)
11005 {
11006 case IEMMODE_16BIT:
11007 {
11008 IEM_MC_BEGIN(3, 0);
11009 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11010 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
11011 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11012
11013 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11014 IEM_MC_REF_EFLAGS(pEFlags);
11015 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11016
11017 IEM_MC_ADVANCE_RIP();
11018 IEM_MC_END();
11019 break;
11020 }
11021
11022 case IEMMODE_32BIT:
11023 {
11024 IEM_MC_BEGIN(3, 0);
11025 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11026 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
11027 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11028
11029 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11030 IEM_MC_REF_EFLAGS(pEFlags);
11031 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11032 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11033
11034 IEM_MC_ADVANCE_RIP();
11035 IEM_MC_END();
11036 break;
11037 }
11038
11039 case IEMMODE_64BIT:
11040 {
11041 IEM_MC_BEGIN(3, 0);
11042 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11043 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
11044 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11045
11046 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11047 IEM_MC_REF_EFLAGS(pEFlags);
11048 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11049
11050 IEM_MC_ADVANCE_RIP();
11051 IEM_MC_END();
11052 break;
11053 }
11054 }
11055 }
11056 else
11057 {
11058 /*
11059 * Memory target.
11060 */
11061 uint32_t fAccess;
11062 if (pImpl->pfnLockedU16)
11063 fAccess = IEM_ACCESS_DATA_RW;
11064 else /* CMP */
11065 fAccess = IEM_ACCESS_DATA_R;
11066
11067 switch (pVCpu->iem.s.enmEffOpSize)
11068 {
11069 case IEMMODE_16BIT:
11070 {
11071 IEM_MC_BEGIN(3, 2);
11072 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11073 IEM_MC_ARG(uint16_t, u16Src, 1);
11074 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11076
11077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11078 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11079 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
11080 if (pImpl->pfnLockedU16)
11081 IEMOP_HLP_DONE_DECODING();
11082 else
11083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11084 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11085 IEM_MC_FETCH_EFLAGS(EFlags);
11086 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11087 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11088 else
11089 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
11090
11091 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
11092 IEM_MC_COMMIT_EFLAGS(EFlags);
11093 IEM_MC_ADVANCE_RIP();
11094 IEM_MC_END();
11095 break;
11096 }
11097
11098 case IEMMODE_32BIT:
11099 {
11100 IEM_MC_BEGIN(3, 2);
11101 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11102 IEM_MC_ARG(uint32_t, u32Src, 1);
11103 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11105
11106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11107 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11108 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
11109 if (pImpl->pfnLockedU32)
11110 IEMOP_HLP_DONE_DECODING();
11111 else
11112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11113 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11114 IEM_MC_FETCH_EFLAGS(EFlags);
11115 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11116 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11117 else
11118 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
11119
11120 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
11121 IEM_MC_COMMIT_EFLAGS(EFlags);
11122 IEM_MC_ADVANCE_RIP();
11123 IEM_MC_END();
11124 break;
11125 }
11126
11127 case IEMMODE_64BIT:
11128 {
11129 IEM_MC_BEGIN(3, 2);
11130 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11131 IEM_MC_ARG(uint64_t, u64Src, 1);
11132 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11134
11135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11136 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11137 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
11138 if (pImpl->pfnLockedU64)
11139 IEMOP_HLP_DONE_DECODING();
11140 else
11141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11142 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11143 IEM_MC_FETCH_EFLAGS(EFlags);
11144 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11145 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11146 else
11147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
11148
11149 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
11150 IEM_MC_COMMIT_EFLAGS(EFlags);
11151 IEM_MC_ADVANCE_RIP();
11152 IEM_MC_END();
11153 break;
11154 }
11155 }
11156 }
11157 return VINF_SUCCESS;
11158}
11159
11160
11161/** Opcode 0x84. */
11162FNIEMOP_DEF(iemOp_test_Eb_Gb)
11163{
11164 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
11165 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11166 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
11167}
11168
11169
11170/** Opcode 0x85. */
11171FNIEMOP_DEF(iemOp_test_Ev_Gv)
11172{
11173 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
11174 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11175 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
11176}
11177
11178
11179/** Opcode 0x86. */
11180FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
11181{
11182 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11183 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
11184
11185 /*
11186 * If rm is denoting a register, no more instruction bytes.
11187 */
11188 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11189 {
11190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11191
11192 IEM_MC_BEGIN(0, 2);
11193 IEM_MC_LOCAL(uint8_t, uTmp1);
11194 IEM_MC_LOCAL(uint8_t, uTmp2);
11195
11196 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11197 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11198 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11199 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11200
11201 IEM_MC_ADVANCE_RIP();
11202 IEM_MC_END();
11203 }
11204 else
11205 {
11206 /*
11207 * We're accessing memory.
11208 */
11209/** @todo the register must be committed separately! */
11210 IEM_MC_BEGIN(2, 2);
11211 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
11212 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11214
11215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11216 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11217 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11218 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
11219 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
11220
11221 IEM_MC_ADVANCE_RIP();
11222 IEM_MC_END();
11223 }
11224 return VINF_SUCCESS;
11225}
11226
11227
11228/** Opcode 0x87. */
11229FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
11230{
11231 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
11232 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11233
11234 /*
11235 * If rm is denoting a register, no more instruction bytes.
11236 */
11237 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11238 {
11239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11240
11241 switch (pVCpu->iem.s.enmEffOpSize)
11242 {
11243 case IEMMODE_16BIT:
11244 IEM_MC_BEGIN(0, 2);
11245 IEM_MC_LOCAL(uint16_t, uTmp1);
11246 IEM_MC_LOCAL(uint16_t, uTmp2);
11247
11248 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11249 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11250 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11251 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11252
11253 IEM_MC_ADVANCE_RIP();
11254 IEM_MC_END();
11255 return VINF_SUCCESS;
11256
11257 case IEMMODE_32BIT:
11258 IEM_MC_BEGIN(0, 2);
11259 IEM_MC_LOCAL(uint32_t, uTmp1);
11260 IEM_MC_LOCAL(uint32_t, uTmp2);
11261
11262 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11263 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11264 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11265 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11266
11267 IEM_MC_ADVANCE_RIP();
11268 IEM_MC_END();
11269 return VINF_SUCCESS;
11270
11271 case IEMMODE_64BIT:
11272 IEM_MC_BEGIN(0, 2);
11273 IEM_MC_LOCAL(uint64_t, uTmp1);
11274 IEM_MC_LOCAL(uint64_t, uTmp2);
11275
11276 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11277 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11278 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11279 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11280
11281 IEM_MC_ADVANCE_RIP();
11282 IEM_MC_END();
11283 return VINF_SUCCESS;
11284
11285 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11286 }
11287 }
11288 else
11289 {
11290 /*
11291 * We're accessing memory.
11292 */
11293 switch (pVCpu->iem.s.enmEffOpSize)
11294 {
11295/** @todo the register must be committed separately! */
11296 case IEMMODE_16BIT:
11297 IEM_MC_BEGIN(2, 2);
11298 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
11299 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11301
11302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11303 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11304 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11305 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
11306 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
11307
11308 IEM_MC_ADVANCE_RIP();
11309 IEM_MC_END();
11310 return VINF_SUCCESS;
11311
11312 case IEMMODE_32BIT:
11313 IEM_MC_BEGIN(2, 2);
11314 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
11315 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11317
11318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11319 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11320 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11321 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
11322 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
11323
11324 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11325 IEM_MC_ADVANCE_RIP();
11326 IEM_MC_END();
11327 return VINF_SUCCESS;
11328
11329 case IEMMODE_64BIT:
11330 IEM_MC_BEGIN(2, 2);
11331 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
11332 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11334
11335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11336 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11337 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11338 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
11339 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
11340
11341 IEM_MC_ADVANCE_RIP();
11342 IEM_MC_END();
11343 return VINF_SUCCESS;
11344
11345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11346 }
11347 }
11348}
11349
11350
11351/** Opcode 0x88. */
11352FNIEMOP_DEF(iemOp_mov_Eb_Gb)
11353{
11354 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
11355
11356 uint8_t bRm;
11357 IEM_OPCODE_GET_NEXT_U8(&bRm);
11358
11359 /*
11360 * If rm is denoting a register, no more instruction bytes.
11361 */
11362 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11363 {
11364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11365 IEM_MC_BEGIN(0, 1);
11366 IEM_MC_LOCAL(uint8_t, u8Value);
11367 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11368 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
11369 IEM_MC_ADVANCE_RIP();
11370 IEM_MC_END();
11371 }
11372 else
11373 {
11374 /*
11375 * We're writing a register to memory.
11376 */
11377 IEM_MC_BEGIN(0, 2);
11378 IEM_MC_LOCAL(uint8_t, u8Value);
11379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11382 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11383 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
11384 IEM_MC_ADVANCE_RIP();
11385 IEM_MC_END();
11386 }
11387 return VINF_SUCCESS;
11388
11389}
11390
11391
11392/** Opcode 0x89. */
11393FNIEMOP_DEF(iemOp_mov_Ev_Gv)
11394{
11395 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
11396
11397 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11398
11399 /*
11400 * If rm is denoting a register, no more instruction bytes.
11401 */
11402 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11403 {
11404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11405 switch (pVCpu->iem.s.enmEffOpSize)
11406 {
11407 case IEMMODE_16BIT:
11408 IEM_MC_BEGIN(0, 1);
11409 IEM_MC_LOCAL(uint16_t, u16Value);
11410 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11411 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11412 IEM_MC_ADVANCE_RIP();
11413 IEM_MC_END();
11414 break;
11415
11416 case IEMMODE_32BIT:
11417 IEM_MC_BEGIN(0, 1);
11418 IEM_MC_LOCAL(uint32_t, u32Value);
11419 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11420 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11421 IEM_MC_ADVANCE_RIP();
11422 IEM_MC_END();
11423 break;
11424
11425 case IEMMODE_64BIT:
11426 IEM_MC_BEGIN(0, 1);
11427 IEM_MC_LOCAL(uint64_t, u64Value);
11428 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11429 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11430 IEM_MC_ADVANCE_RIP();
11431 IEM_MC_END();
11432 break;
11433 }
11434 }
11435 else
11436 {
11437 /*
11438 * We're writing a register to memory.
11439 */
11440 switch (pVCpu->iem.s.enmEffOpSize)
11441 {
11442 case IEMMODE_16BIT:
11443 IEM_MC_BEGIN(0, 2);
11444 IEM_MC_LOCAL(uint16_t, u16Value);
11445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11448 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11449 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11450 IEM_MC_ADVANCE_RIP();
11451 IEM_MC_END();
11452 break;
11453
11454 case IEMMODE_32BIT:
11455 IEM_MC_BEGIN(0, 2);
11456 IEM_MC_LOCAL(uint32_t, u32Value);
11457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11460 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11461 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11462 IEM_MC_ADVANCE_RIP();
11463 IEM_MC_END();
11464 break;
11465
11466 case IEMMODE_64BIT:
11467 IEM_MC_BEGIN(0, 2);
11468 IEM_MC_LOCAL(uint64_t, u64Value);
11469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11472 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11473 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11474 IEM_MC_ADVANCE_RIP();
11475 IEM_MC_END();
11476 break;
11477 }
11478 }
11479 return VINF_SUCCESS;
11480}
11481
11482
11483/** Opcode 0x8a. */
11484FNIEMOP_DEF(iemOp_mov_Gb_Eb)
11485{
11486 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
11487
11488 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11489
11490 /*
11491 * If rm is denoting a register, no more instruction bytes.
11492 */
11493 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11494 {
11495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11496 IEM_MC_BEGIN(0, 1);
11497 IEM_MC_LOCAL(uint8_t, u8Value);
11498 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11499 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11500 IEM_MC_ADVANCE_RIP();
11501 IEM_MC_END();
11502 }
11503 else
11504 {
11505 /*
11506 * We're loading a register from memory.
11507 */
11508 IEM_MC_BEGIN(0, 2);
11509 IEM_MC_LOCAL(uint8_t, u8Value);
11510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11513 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11514 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11515 IEM_MC_ADVANCE_RIP();
11516 IEM_MC_END();
11517 }
11518 return VINF_SUCCESS;
11519}
11520
11521
11522/** Opcode 0x8b. */
11523FNIEMOP_DEF(iemOp_mov_Gv_Ev)
11524{
11525 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
11526
11527 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11528
11529 /*
11530 * If rm is denoting a register, no more instruction bytes.
11531 */
11532 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11533 {
11534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11535 switch (pVCpu->iem.s.enmEffOpSize)
11536 {
11537 case IEMMODE_16BIT:
11538 IEM_MC_BEGIN(0, 1);
11539 IEM_MC_LOCAL(uint16_t, u16Value);
11540 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11541 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11542 IEM_MC_ADVANCE_RIP();
11543 IEM_MC_END();
11544 break;
11545
11546 case IEMMODE_32BIT:
11547 IEM_MC_BEGIN(0, 1);
11548 IEM_MC_LOCAL(uint32_t, u32Value);
11549 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11550 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11551 IEM_MC_ADVANCE_RIP();
11552 IEM_MC_END();
11553 break;
11554
11555 case IEMMODE_64BIT:
11556 IEM_MC_BEGIN(0, 1);
11557 IEM_MC_LOCAL(uint64_t, u64Value);
11558 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11559 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11560 IEM_MC_ADVANCE_RIP();
11561 IEM_MC_END();
11562 break;
11563 }
11564 }
11565 else
11566 {
11567 /*
11568 * We're loading a register from memory.
11569 */
11570 switch (pVCpu->iem.s.enmEffOpSize)
11571 {
11572 case IEMMODE_16BIT:
11573 IEM_MC_BEGIN(0, 2);
11574 IEM_MC_LOCAL(uint16_t, u16Value);
11575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11578 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11579 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11580 IEM_MC_ADVANCE_RIP();
11581 IEM_MC_END();
11582 break;
11583
11584 case IEMMODE_32BIT:
11585 IEM_MC_BEGIN(0, 2);
11586 IEM_MC_LOCAL(uint32_t, u32Value);
11587 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11590 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11591 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11592 IEM_MC_ADVANCE_RIP();
11593 IEM_MC_END();
11594 break;
11595
11596 case IEMMODE_64BIT:
11597 IEM_MC_BEGIN(0, 2);
11598 IEM_MC_LOCAL(uint64_t, u64Value);
11599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11602 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11603 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11604 IEM_MC_ADVANCE_RIP();
11605 IEM_MC_END();
11606 break;
11607 }
11608 }
11609 return VINF_SUCCESS;
11610}
11611
11612
11613/** Opcode 0x63. */
11614FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
11615{
11616 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
11617 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
11618 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11619 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
11620 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
11621}
11622
11623
11624/** Opcode 0x8c. */
11625FNIEMOP_DEF(iemOp_mov_Ev_Sw)
11626{
11627 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
11628
11629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11630
11631 /*
11632 * Check that the destination register exists. The REX.R prefix is ignored.
11633 */
11634 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11635 if ( iSegReg > X86_SREG_GS)
11636 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11637
11638 /*
11639 * If rm is denoting a register, no more instruction bytes.
11640 * In that case, the operand size is respected and the upper bits are
11641 * cleared (starting with some pentium).
11642 */
11643 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11644 {
11645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11646 switch (pVCpu->iem.s.enmEffOpSize)
11647 {
11648 case IEMMODE_16BIT:
11649 IEM_MC_BEGIN(0, 1);
11650 IEM_MC_LOCAL(uint16_t, u16Value);
11651 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11652 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11653 IEM_MC_ADVANCE_RIP();
11654 IEM_MC_END();
11655 break;
11656
11657 case IEMMODE_32BIT:
11658 IEM_MC_BEGIN(0, 1);
11659 IEM_MC_LOCAL(uint32_t, u32Value);
11660 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
11661 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11662 IEM_MC_ADVANCE_RIP();
11663 IEM_MC_END();
11664 break;
11665
11666 case IEMMODE_64BIT:
11667 IEM_MC_BEGIN(0, 1);
11668 IEM_MC_LOCAL(uint64_t, u64Value);
11669 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
11670 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11671 IEM_MC_ADVANCE_RIP();
11672 IEM_MC_END();
11673 break;
11674 }
11675 }
11676 else
11677 {
11678 /*
11679 * We're saving the register to memory. The access is word sized
11680 * regardless of operand size prefixes.
11681 */
11682#if 0 /* not necessary */
11683 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11684#endif
11685 IEM_MC_BEGIN(0, 2);
11686 IEM_MC_LOCAL(uint16_t, u16Value);
11687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11690 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11691 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11692 IEM_MC_ADVANCE_RIP();
11693 IEM_MC_END();
11694 }
11695 return VINF_SUCCESS;
11696}
11697
11698
11699
11700
11701/** Opcode 0x8d. */
11702FNIEMOP_DEF(iemOp_lea_Gv_M)
11703{
11704 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
11705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11706 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11707 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
11708
11709 switch (pVCpu->iem.s.enmEffOpSize)
11710 {
11711 case IEMMODE_16BIT:
11712 IEM_MC_BEGIN(0, 2);
11713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11714 IEM_MC_LOCAL(uint16_t, u16Cast);
11715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11717 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
11718 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
11719 IEM_MC_ADVANCE_RIP();
11720 IEM_MC_END();
11721 return VINF_SUCCESS;
11722
11723 case IEMMODE_32BIT:
11724 IEM_MC_BEGIN(0, 2);
11725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11726 IEM_MC_LOCAL(uint32_t, u32Cast);
11727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11729 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
11730 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
11731 IEM_MC_ADVANCE_RIP();
11732 IEM_MC_END();
11733 return VINF_SUCCESS;
11734
11735 case IEMMODE_64BIT:
11736 IEM_MC_BEGIN(0, 1);
11737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11740 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
11741 IEM_MC_ADVANCE_RIP();
11742 IEM_MC_END();
11743 return VINF_SUCCESS;
11744 }
11745 AssertFailedReturn(VERR_IEM_IPE_7);
11746}
11747
11748
11749/** Opcode 0x8e. */
11750FNIEMOP_DEF(iemOp_mov_Sw_Ev)
11751{
11752 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
11753
11754 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11755
11756 /*
11757 * The practical operand size is 16-bit.
11758 */
11759#if 0 /* not necessary */
11760 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11761#endif
11762
11763 /*
11764 * Check that the destination register exists and can be used with this
11765 * instruction. The REX.R prefix is ignored.
11766 */
11767 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11768 if ( iSegReg == X86_SREG_CS
11769 || iSegReg > X86_SREG_GS)
11770 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11771
11772 /*
11773 * If rm is denoting a register, no more instruction bytes.
11774 */
11775 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11776 {
11777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11778 IEM_MC_BEGIN(2, 0);
11779 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11780 IEM_MC_ARG(uint16_t, u16Value, 1);
11781 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11782 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11783 IEM_MC_END();
11784 }
11785 else
11786 {
11787 /*
11788 * We're loading the register from memory. The access is word sized
11789 * regardless of operand size prefixes.
11790 */
11791 IEM_MC_BEGIN(2, 1);
11792 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11793 IEM_MC_ARG(uint16_t, u16Value, 1);
11794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11797 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11798 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11799 IEM_MC_END();
11800 }
11801 return VINF_SUCCESS;
11802}
11803
11804
11805/** Opcode 0x8f /0. */
11806FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
11807{
11808 /* This bugger is rather annoying as it requires rSP to be updated before
11809 doing the effective address calculations. Will eventually require a
11810 split between the R/M+SIB decoding and the effective address
11811 calculation - which is something that is required for any attempt at
11812 reusing this code for a recompiler. It may also be good to have if we
11813 need to delay #UD exception caused by invalid lock prefixes.
11814
11815 For now, we'll do a mostly safe interpreter-only implementation here. */
11816 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
11817 * now until tests show it's checked.. */
11818 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
11819
11820 /* Register access is relatively easy and can share code. */
11821 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11822 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11823
11824 /*
11825 * Memory target.
11826 *
11827 * Intel says that RSP is incremented before it's used in any effective
11828 * address calcuations. This means some serious extra annoyance here since
11829 * we decode and calculate the effective address in one step and like to
11830 * delay committing registers till everything is done.
11831 *
11832 * So, we'll decode and calculate the effective address twice. This will
11833 * require some recoding if turned into a recompiler.
11834 */
11835 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
11836
11837#ifndef TST_IEM_CHECK_MC
11838 /* Calc effective address with modified ESP. */
11839/** @todo testcase */
11840 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
11841 RTGCPTR GCPtrEff;
11842 VBOXSTRICTRC rcStrict;
11843 switch (pVCpu->iem.s.enmEffOpSize)
11844 {
11845 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
11846 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
11847 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
11848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11849 }
11850 if (rcStrict != VINF_SUCCESS)
11851 return rcStrict;
11852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11853
11854 /* Perform the operation - this should be CImpl. */
11855 RTUINT64U TmpRsp;
11856 TmpRsp.u = pCtx->rsp;
11857 switch (pVCpu->iem.s.enmEffOpSize)
11858 {
11859 case IEMMODE_16BIT:
11860 {
11861 uint16_t u16Value;
11862 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
11863 if (rcStrict == VINF_SUCCESS)
11864 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
11865 break;
11866 }
11867
11868 case IEMMODE_32BIT:
11869 {
11870 uint32_t u32Value;
11871 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
11872 if (rcStrict == VINF_SUCCESS)
11873 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
11874 break;
11875 }
11876
11877 case IEMMODE_64BIT:
11878 {
11879 uint64_t u64Value;
11880 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
11881 if (rcStrict == VINF_SUCCESS)
11882 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
11883 break;
11884 }
11885
11886 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11887 }
11888 if (rcStrict == VINF_SUCCESS)
11889 {
11890 pCtx->rsp = TmpRsp.u;
11891 iemRegUpdateRipAndClearRF(pVCpu);
11892 }
11893 return rcStrict;
11894
11895#else
11896 return VERR_IEM_IPE_2;
11897#endif
11898}
11899
11900
11901/** Opcode 0x8f. */
11902FNIEMOP_DEF(iemOp_Grp1A)
11903{
11904 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11905 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
11906 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
11907
11908 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
11909 /** @todo XOP decoding. */
11910 IEMOP_MNEMONIC(xop_amd, "3-byte-xop");
11911 return IEMOP_RAISE_INVALID_OPCODE();
11912}
11913
11914
11915/**
11916 * Common 'xchg reg,rAX' helper.
11917 */
11918FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
11919{
11920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11921
11922 iReg |= pVCpu->iem.s.uRexB;
11923 switch (pVCpu->iem.s.enmEffOpSize)
11924 {
11925 case IEMMODE_16BIT:
11926 IEM_MC_BEGIN(0, 2);
11927 IEM_MC_LOCAL(uint16_t, u16Tmp1);
11928 IEM_MC_LOCAL(uint16_t, u16Tmp2);
11929 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
11930 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
11931 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
11932 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
11933 IEM_MC_ADVANCE_RIP();
11934 IEM_MC_END();
11935 return VINF_SUCCESS;
11936
11937 case IEMMODE_32BIT:
11938 IEM_MC_BEGIN(0, 2);
11939 IEM_MC_LOCAL(uint32_t, u32Tmp1);
11940 IEM_MC_LOCAL(uint32_t, u32Tmp2);
11941 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
11942 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
11943 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
11944 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
11945 IEM_MC_ADVANCE_RIP();
11946 IEM_MC_END();
11947 return VINF_SUCCESS;
11948
11949 case IEMMODE_64BIT:
11950 IEM_MC_BEGIN(0, 2);
11951 IEM_MC_LOCAL(uint64_t, u64Tmp1);
11952 IEM_MC_LOCAL(uint64_t, u64Tmp2);
11953 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
11954 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
11955 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
11956 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
11957 IEM_MC_ADVANCE_RIP();
11958 IEM_MC_END();
11959 return VINF_SUCCESS;
11960
11961 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11962 }
11963}
11964
11965
11966/** Opcode 0x90. */
11967FNIEMOP_DEF(iemOp_nop)
11968{
11969 /* R8/R8D and RAX/EAX can be exchanged. */
11970 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
11971 {
11972 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
11973 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
11974 }
11975
11976 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
11977 IEMOP_MNEMONIC(pause, "pause");
11978 else
11979 IEMOP_MNEMONIC(nop, "nop");
11980 IEM_MC_BEGIN(0, 0);
11981 IEM_MC_ADVANCE_RIP();
11982 IEM_MC_END();
11983 return VINF_SUCCESS;
11984}
11985
11986
11987/** Opcode 0x91. */
11988FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
11989{
11990 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
11991 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
11992}
11993
11994
11995/** Opcode 0x92. */
11996FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
11997{
11998 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
11999 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
12000}
12001
12002
12003/** Opcode 0x93. */
12004FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
12005{
12006 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
12007 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
12008}
12009
12010
12011/** Opcode 0x94. */
12012FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
12013{
12014 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
12015 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
12016}
12017
12018
12019/** Opcode 0x95. */
12020FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
12021{
12022 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
12023 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
12024}
12025
12026
12027/** Opcode 0x96. */
12028FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
12029{
12030 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
12031 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
12032}
12033
12034
12035/** Opcode 0x97. */
12036FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
12037{
12038 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
12039 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
12040}
12041
12042
12043/** Opcode 0x98. */
12044FNIEMOP_DEF(iemOp_cbw)
12045{
12046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12047 switch (pVCpu->iem.s.enmEffOpSize)
12048 {
12049 case IEMMODE_16BIT:
12050 IEMOP_MNEMONIC(cbw, "cbw");
12051 IEM_MC_BEGIN(0, 1);
12052 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
12053 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
12054 } IEM_MC_ELSE() {
12055 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
12056 } IEM_MC_ENDIF();
12057 IEM_MC_ADVANCE_RIP();
12058 IEM_MC_END();
12059 return VINF_SUCCESS;
12060
12061 case IEMMODE_32BIT:
12062 IEMOP_MNEMONIC(cwde, "cwde");
12063 IEM_MC_BEGIN(0, 1);
12064 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
12065 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
12066 } IEM_MC_ELSE() {
12067 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
12068 } IEM_MC_ENDIF();
12069 IEM_MC_ADVANCE_RIP();
12070 IEM_MC_END();
12071 return VINF_SUCCESS;
12072
12073 case IEMMODE_64BIT:
12074 IEMOP_MNEMONIC(cdqe, "cdqe");
12075 IEM_MC_BEGIN(0, 1);
12076 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
12077 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
12078 } IEM_MC_ELSE() {
12079 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
12080 } IEM_MC_ENDIF();
12081 IEM_MC_ADVANCE_RIP();
12082 IEM_MC_END();
12083 return VINF_SUCCESS;
12084
12085 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12086 }
12087}
12088
12089
12090/** Opcode 0x99. */
12091FNIEMOP_DEF(iemOp_cwd)
12092{
12093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12094 switch (pVCpu->iem.s.enmEffOpSize)
12095 {
12096 case IEMMODE_16BIT:
12097 IEMOP_MNEMONIC(cwd, "cwd");
12098 IEM_MC_BEGIN(0, 1);
12099 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
12100 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
12101 } IEM_MC_ELSE() {
12102 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
12103 } IEM_MC_ENDIF();
12104 IEM_MC_ADVANCE_RIP();
12105 IEM_MC_END();
12106 return VINF_SUCCESS;
12107
12108 case IEMMODE_32BIT:
12109 IEMOP_MNEMONIC(cdq, "cdq");
12110 IEM_MC_BEGIN(0, 1);
12111 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
12112 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
12113 } IEM_MC_ELSE() {
12114 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
12115 } IEM_MC_ENDIF();
12116 IEM_MC_ADVANCE_RIP();
12117 IEM_MC_END();
12118 return VINF_SUCCESS;
12119
12120 case IEMMODE_64BIT:
12121 IEMOP_MNEMONIC(cqo, "cqo");
12122 IEM_MC_BEGIN(0, 1);
12123 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
12124 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
12125 } IEM_MC_ELSE() {
12126 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
12127 } IEM_MC_ENDIF();
12128 IEM_MC_ADVANCE_RIP();
12129 IEM_MC_END();
12130 return VINF_SUCCESS;
12131
12132 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12133 }
12134}
12135
12136
12137/** Opcode 0x9a. */
12138FNIEMOP_DEF(iemOp_call_Ap)
12139{
12140 IEMOP_MNEMONIC(call_Ap, "call Ap");
12141 IEMOP_HLP_NO_64BIT();
12142
12143 /* Decode the far pointer address and pass it on to the far call C implementation. */
12144 uint32_t offSeg;
12145 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12146 IEM_OPCODE_GET_NEXT_U32(&offSeg);
12147 else
12148 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
12149 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
12150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12151 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
12152}
12153
12154
12155/** Opcode 0x9b. (aka fwait) */
12156FNIEMOP_DEF(iemOp_wait)
12157{
12158 IEMOP_MNEMONIC(wait, "wait");
12159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12160
12161 IEM_MC_BEGIN(0, 0);
12162 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
12163 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12164 IEM_MC_ADVANCE_RIP();
12165 IEM_MC_END();
12166 return VINF_SUCCESS;
12167}
12168
12169
12170/** Opcode 0x9c. */
12171FNIEMOP_DEF(iemOp_pushf_Fv)
12172{
12173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12174 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12175 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
12176}
12177
12178
12179/** Opcode 0x9d. */
12180FNIEMOP_DEF(iemOp_popf_Fv)
12181{
12182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12183 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12184 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
12185}
12186
12187
12188/** Opcode 0x9e. */
12189FNIEMOP_DEF(iemOp_sahf)
12190{
12191 IEMOP_MNEMONIC(sahf, "sahf");
12192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12193 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12194 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
12195 return IEMOP_RAISE_INVALID_OPCODE();
12196 IEM_MC_BEGIN(0, 2);
12197 IEM_MC_LOCAL(uint32_t, u32Flags);
12198 IEM_MC_LOCAL(uint32_t, EFlags);
12199 IEM_MC_FETCH_EFLAGS(EFlags);
12200 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
12201 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
12202 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
12203 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
12204 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
12205 IEM_MC_COMMIT_EFLAGS(EFlags);
12206 IEM_MC_ADVANCE_RIP();
12207 IEM_MC_END();
12208 return VINF_SUCCESS;
12209}
12210
12211
12212/** Opcode 0x9f. */
12213FNIEMOP_DEF(iemOp_lahf)
12214{
12215 IEMOP_MNEMONIC(lahf, "lahf");
12216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12217 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12218 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
12219 return IEMOP_RAISE_INVALID_OPCODE();
12220 IEM_MC_BEGIN(0, 1);
12221 IEM_MC_LOCAL(uint8_t, u8Flags);
12222 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
12223 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
12224 IEM_MC_ADVANCE_RIP();
12225 IEM_MC_END();
12226 return VINF_SUCCESS;
12227}
12228
12229
12230/**
12231 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
12232 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
12233 * prefixes. Will return on failures.
12234 * @param a_GCPtrMemOff The variable to store the offset in.
12235 */
12236#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
12237 do \
12238 { \
12239 switch (pVCpu->iem.s.enmEffAddrMode) \
12240 { \
12241 case IEMMODE_16BIT: \
12242 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
12243 break; \
12244 case IEMMODE_32BIT: \
12245 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
12246 break; \
12247 case IEMMODE_64BIT: \
12248 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
12249 break; \
12250 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12251 } \
12252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12253 } while (0)
12254
12255/** Opcode 0xa0. */
12256FNIEMOP_DEF(iemOp_mov_Al_Ob)
12257{
12258 /*
12259 * Get the offset and fend of lock prefixes.
12260 */
12261 RTGCPTR GCPtrMemOff;
12262 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12263
12264 /*
12265 * Fetch AL.
12266 */
12267 IEM_MC_BEGIN(0,1);
12268 IEM_MC_LOCAL(uint8_t, u8Tmp);
12269 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12270 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12271 IEM_MC_ADVANCE_RIP();
12272 IEM_MC_END();
12273 return VINF_SUCCESS;
12274}
12275
12276
12277/** Opcode 0xa1. */
12278FNIEMOP_DEF(iemOp_mov_rAX_Ov)
12279{
12280 /*
12281 * Get the offset and fend of lock prefixes.
12282 */
12283 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
12284 RTGCPTR GCPtrMemOff;
12285 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12286
12287 /*
12288 * Fetch rAX.
12289 */
12290 switch (pVCpu->iem.s.enmEffOpSize)
12291 {
12292 case IEMMODE_16BIT:
12293 IEM_MC_BEGIN(0,1);
12294 IEM_MC_LOCAL(uint16_t, u16Tmp);
12295 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12296 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12297 IEM_MC_ADVANCE_RIP();
12298 IEM_MC_END();
12299 return VINF_SUCCESS;
12300
12301 case IEMMODE_32BIT:
12302 IEM_MC_BEGIN(0,1);
12303 IEM_MC_LOCAL(uint32_t, u32Tmp);
12304 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12305 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
12306 IEM_MC_ADVANCE_RIP();
12307 IEM_MC_END();
12308 return VINF_SUCCESS;
12309
12310 case IEMMODE_64BIT:
12311 IEM_MC_BEGIN(0,1);
12312 IEM_MC_LOCAL(uint64_t, u64Tmp);
12313 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12314 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
12315 IEM_MC_ADVANCE_RIP();
12316 IEM_MC_END();
12317 return VINF_SUCCESS;
12318
12319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12320 }
12321}
12322
12323
12324/** Opcode 0xa2. */
12325FNIEMOP_DEF(iemOp_mov_Ob_AL)
12326{
12327 /*
12328 * Get the offset and fend of lock prefixes.
12329 */
12330 RTGCPTR GCPtrMemOff;
12331 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12332
12333 /*
12334 * Store AL.
12335 */
12336 IEM_MC_BEGIN(0,1);
12337 IEM_MC_LOCAL(uint8_t, u8Tmp);
12338 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
12339 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
12340 IEM_MC_ADVANCE_RIP();
12341 IEM_MC_END();
12342 return VINF_SUCCESS;
12343}
12344
12345
12346/** Opcode 0xa3. */
12347FNIEMOP_DEF(iemOp_mov_Ov_rAX)
12348{
12349 /*
12350 * Get the offset and fend of lock prefixes.
12351 */
12352 RTGCPTR GCPtrMemOff;
12353 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12354
12355 /*
12356 * Store rAX.
12357 */
12358 switch (pVCpu->iem.s.enmEffOpSize)
12359 {
12360 case IEMMODE_16BIT:
12361 IEM_MC_BEGIN(0,1);
12362 IEM_MC_LOCAL(uint16_t, u16Tmp);
12363 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
12364 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
12365 IEM_MC_ADVANCE_RIP();
12366 IEM_MC_END();
12367 return VINF_SUCCESS;
12368
12369 case IEMMODE_32BIT:
12370 IEM_MC_BEGIN(0,1);
12371 IEM_MC_LOCAL(uint32_t, u32Tmp);
12372 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
12373 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
12374 IEM_MC_ADVANCE_RIP();
12375 IEM_MC_END();
12376 return VINF_SUCCESS;
12377
12378 case IEMMODE_64BIT:
12379 IEM_MC_BEGIN(0,1);
12380 IEM_MC_LOCAL(uint64_t, u64Tmp);
12381 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
12382 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
12383 IEM_MC_ADVANCE_RIP();
12384 IEM_MC_END();
12385 return VINF_SUCCESS;
12386
12387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12388 }
12389}
12390
12391/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
12392#define IEM_MOVS_CASE(ValBits, AddrBits) \
12393 IEM_MC_BEGIN(0, 2); \
12394 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12395 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12396 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12397 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12398 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12399 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12400 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12401 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12402 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12403 } IEM_MC_ELSE() { \
12404 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12405 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12406 } IEM_MC_ENDIF(); \
12407 IEM_MC_ADVANCE_RIP(); \
12408 IEM_MC_END();
12409
12410/** Opcode 0xa4. */
12411FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
12412{
12413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12414
12415 /*
12416 * Use the C implementation if a repeat prefix is encountered.
12417 */
12418 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12419 {
12420 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
12421 switch (pVCpu->iem.s.enmEffAddrMode)
12422 {
12423 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
12424 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
12425 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
12426 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12427 }
12428 }
12429 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
12430
12431 /*
12432 * Sharing case implementation with movs[wdq] below.
12433 */
12434 switch (pVCpu->iem.s.enmEffAddrMode)
12435 {
12436 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
12437 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
12438 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
12439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12440 }
12441 return VINF_SUCCESS;
12442}
12443
12444
12445/** Opcode 0xa5. */
12446FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
12447{
12448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12449
12450 /*
12451 * Use the C implementation if a repeat prefix is encountered.
12452 */
12453 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12454 {
12455 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
12456 switch (pVCpu->iem.s.enmEffOpSize)
12457 {
12458 case IEMMODE_16BIT:
12459 switch (pVCpu->iem.s.enmEffAddrMode)
12460 {
12461 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
12462 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
12463 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
12464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12465 }
12466 break;
12467 case IEMMODE_32BIT:
12468 switch (pVCpu->iem.s.enmEffAddrMode)
12469 {
12470 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
12471 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
12472 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
12473 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12474 }
12475 case IEMMODE_64BIT:
12476 switch (pVCpu->iem.s.enmEffAddrMode)
12477 {
12478 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
12479 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
12480 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
12481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12482 }
12483 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12484 }
12485 }
12486 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
12487
12488 /*
12489 * Annoying double switch here.
12490 * Using ugly macro for implementing the cases, sharing it with movsb.
12491 */
12492 switch (pVCpu->iem.s.enmEffOpSize)
12493 {
12494 case IEMMODE_16BIT:
12495 switch (pVCpu->iem.s.enmEffAddrMode)
12496 {
12497 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
12498 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
12499 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
12500 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12501 }
12502 break;
12503
12504 case IEMMODE_32BIT:
12505 switch (pVCpu->iem.s.enmEffAddrMode)
12506 {
12507 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
12508 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
12509 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
12510 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12511 }
12512 break;
12513
12514 case IEMMODE_64BIT:
12515 switch (pVCpu->iem.s.enmEffAddrMode)
12516 {
12517 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12518 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
12519 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
12520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12521 }
12522 break;
12523 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12524 }
12525 return VINF_SUCCESS;
12526}
12527
12528#undef IEM_MOVS_CASE
12529
12530/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
12531#define IEM_CMPS_CASE(ValBits, AddrBits) \
12532 IEM_MC_BEGIN(3, 3); \
12533 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
12534 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
12535 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12536 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
12537 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12538 \
12539 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12540 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
12541 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12542 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
12543 IEM_MC_REF_LOCAL(puValue1, uValue1); \
12544 IEM_MC_REF_EFLAGS(pEFlags); \
12545 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
12546 \
12547 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12548 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12549 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12550 } IEM_MC_ELSE() { \
12551 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12552 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12553 } IEM_MC_ENDIF(); \
12554 IEM_MC_ADVANCE_RIP(); \
12555 IEM_MC_END(); \
12556
12557/** Opcode 0xa6. */
12558FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
12559{
12560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12561
12562 /*
12563 * Use the C implementation if a repeat prefix is encountered.
12564 */
12565 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12566 {
12567 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
12568 switch (pVCpu->iem.s.enmEffAddrMode)
12569 {
12570 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12571 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12572 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12573 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12574 }
12575 }
12576 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12577 {
12578 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
12579 switch (pVCpu->iem.s.enmEffAddrMode)
12580 {
12581 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12582 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12583 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12585 }
12586 }
12587 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
12588
12589 /*
12590 * Sharing case implementation with cmps[wdq] below.
12591 */
12592 switch (pVCpu->iem.s.enmEffAddrMode)
12593 {
12594 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
12595 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
12596 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
12597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12598 }
12599 return VINF_SUCCESS;
12600
12601}
12602
12603
12604/** Opcode 0xa7. */
12605FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
12606{
12607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12608
12609 /*
12610 * Use the C implementation if a repeat prefix is encountered.
12611 */
12612 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12613 {
12614 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
12615 switch (pVCpu->iem.s.enmEffOpSize)
12616 {
12617 case IEMMODE_16BIT:
12618 switch (pVCpu->iem.s.enmEffAddrMode)
12619 {
12620 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12621 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12622 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12623 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12624 }
12625 break;
12626 case IEMMODE_32BIT:
12627 switch (pVCpu->iem.s.enmEffAddrMode)
12628 {
12629 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12630 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12631 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12633 }
12634 case IEMMODE_64BIT:
12635 switch (pVCpu->iem.s.enmEffAddrMode)
12636 {
12637 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
12638 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12639 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12640 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12641 }
12642 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12643 }
12644 }
12645
12646 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12647 {
12648 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
12649 switch (pVCpu->iem.s.enmEffOpSize)
12650 {
12651 case IEMMODE_16BIT:
12652 switch (pVCpu->iem.s.enmEffAddrMode)
12653 {
12654 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12655 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12656 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12657 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12658 }
12659 break;
12660 case IEMMODE_32BIT:
12661 switch (pVCpu->iem.s.enmEffAddrMode)
12662 {
12663 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12664 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12665 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12666 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12667 }
12668 case IEMMODE_64BIT:
12669 switch (pVCpu->iem.s.enmEffAddrMode)
12670 {
12671 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
12672 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12673 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12674 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12675 }
12676 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12677 }
12678 }
12679
12680 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
12681
12682 /*
12683 * Annoying double switch here.
12684 * Using ugly macro for implementing the cases, sharing it with cmpsb.
12685 */
12686 switch (pVCpu->iem.s.enmEffOpSize)
12687 {
12688 case IEMMODE_16BIT:
12689 switch (pVCpu->iem.s.enmEffAddrMode)
12690 {
12691 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
12692 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
12693 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
12694 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12695 }
12696 break;
12697
12698 case IEMMODE_32BIT:
12699 switch (pVCpu->iem.s.enmEffAddrMode)
12700 {
12701 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
12702 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
12703 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
12704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12705 }
12706 break;
12707
12708 case IEMMODE_64BIT:
12709 switch (pVCpu->iem.s.enmEffAddrMode)
12710 {
12711 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12712 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
12713 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
12714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12715 }
12716 break;
12717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12718 }
12719 return VINF_SUCCESS;
12720
12721}
12722
12723#undef IEM_CMPS_CASE
12724
12725/** Opcode 0xa8. */
12726FNIEMOP_DEF(iemOp_test_AL_Ib)
12727{
12728 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
12729 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12730 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
12731}
12732
12733
12734/** Opcode 0xa9. */
12735FNIEMOP_DEF(iemOp_test_eAX_Iz)
12736{
12737 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
12738 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12739 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
12740}
12741
12742
12743/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
12744#define IEM_STOS_CASE(ValBits, AddrBits) \
12745 IEM_MC_BEGIN(0, 2); \
12746 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12747 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12748 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
12749 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12750 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12751 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12752 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12753 } IEM_MC_ELSE() { \
12754 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12755 } IEM_MC_ENDIF(); \
12756 IEM_MC_ADVANCE_RIP(); \
12757 IEM_MC_END(); \
12758
12759/** Opcode 0xaa. */
12760FNIEMOP_DEF(iemOp_stosb_Yb_AL)
12761{
12762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12763
12764 /*
12765 * Use the C implementation if a repeat prefix is encountered.
12766 */
12767 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12768 {
12769 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
12770 switch (pVCpu->iem.s.enmEffAddrMode)
12771 {
12772 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
12773 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
12774 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
12775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12776 }
12777 }
12778 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
12779
12780 /*
12781 * Sharing case implementation with stos[wdq] below.
12782 */
12783 switch (pVCpu->iem.s.enmEffAddrMode)
12784 {
12785 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
12786 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
12787 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
12788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12789 }
12790 return VINF_SUCCESS;
12791}
12792
12793
12794/** Opcode 0xab. */
12795FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
12796{
12797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12798
12799 /*
12800 * Use the C implementation if a repeat prefix is encountered.
12801 */
12802 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12803 {
12804 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
12805 switch (pVCpu->iem.s.enmEffOpSize)
12806 {
12807 case IEMMODE_16BIT:
12808 switch (pVCpu->iem.s.enmEffAddrMode)
12809 {
12810 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
12811 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
12812 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
12813 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12814 }
12815 break;
12816 case IEMMODE_32BIT:
12817 switch (pVCpu->iem.s.enmEffAddrMode)
12818 {
12819 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
12820 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
12821 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
12822 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12823 }
12824 case IEMMODE_64BIT:
12825 switch (pVCpu->iem.s.enmEffAddrMode)
12826 {
12827 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
12828 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
12829 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
12830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12831 }
12832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12833 }
12834 }
12835 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
12836
12837 /*
12838 * Annoying double switch here.
12839 * Using ugly macro for implementing the cases, sharing it with stosb.
12840 */
12841 switch (pVCpu->iem.s.enmEffOpSize)
12842 {
12843 case IEMMODE_16BIT:
12844 switch (pVCpu->iem.s.enmEffAddrMode)
12845 {
12846 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
12847 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
12848 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
12849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12850 }
12851 break;
12852
12853 case IEMMODE_32BIT:
12854 switch (pVCpu->iem.s.enmEffAddrMode)
12855 {
12856 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
12857 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
12858 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
12859 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12860 }
12861 break;
12862
12863 case IEMMODE_64BIT:
12864 switch (pVCpu->iem.s.enmEffAddrMode)
12865 {
12866 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12867 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
12868 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
12869 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12870 }
12871 break;
12872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12873 }
12874 return VINF_SUCCESS;
12875}
12876
12877#undef IEM_STOS_CASE
12878
12879/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
12880#define IEM_LODS_CASE(ValBits, AddrBits) \
12881 IEM_MC_BEGIN(0, 2); \
12882 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12883 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12884 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12885 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12886 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
12887 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12888 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12889 } IEM_MC_ELSE() { \
12890 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12891 } IEM_MC_ENDIF(); \
12892 IEM_MC_ADVANCE_RIP(); \
12893 IEM_MC_END();
12894
12895/** Opcode 0xac. */
12896FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
12897{
12898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12899
12900 /*
12901 * Use the C implementation if a repeat prefix is encountered.
12902 */
12903 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12904 {
12905 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
12906 switch (pVCpu->iem.s.enmEffAddrMode)
12907 {
12908 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
12909 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
12910 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
12911 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12912 }
12913 }
12914 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
12915
12916 /*
12917 * Sharing case implementation with stos[wdq] below.
12918 */
12919 switch (pVCpu->iem.s.enmEffAddrMode)
12920 {
12921 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
12922 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
12923 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
12924 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12925 }
12926 return VINF_SUCCESS;
12927}
12928
12929
12930/** Opcode 0xad. */
12931FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
12932{
12933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12934
12935 /*
12936 * Use the C implementation if a repeat prefix is encountered.
12937 */
12938 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12939 {
12940 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
12941 switch (pVCpu->iem.s.enmEffOpSize)
12942 {
12943 case IEMMODE_16BIT:
12944 switch (pVCpu->iem.s.enmEffAddrMode)
12945 {
12946 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
12947 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
12948 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
12949 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12950 }
12951 break;
12952 case IEMMODE_32BIT:
12953 switch (pVCpu->iem.s.enmEffAddrMode)
12954 {
12955 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
12956 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
12957 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
12958 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12959 }
12960 case IEMMODE_64BIT:
12961 switch (pVCpu->iem.s.enmEffAddrMode)
12962 {
12963 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
12964 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
12965 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
12966 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12967 }
12968 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12969 }
12970 }
12971 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
12972
12973 /*
12974 * Annoying double switch here.
12975 * Using ugly macro for implementing the cases, sharing it with lodsb.
12976 */
12977 switch (pVCpu->iem.s.enmEffOpSize)
12978 {
12979 case IEMMODE_16BIT:
12980 switch (pVCpu->iem.s.enmEffAddrMode)
12981 {
12982 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
12983 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
12984 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
12985 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12986 }
12987 break;
12988
12989 case IEMMODE_32BIT:
12990 switch (pVCpu->iem.s.enmEffAddrMode)
12991 {
12992 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
12993 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
12994 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
12995 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12996 }
12997 break;
12998
12999 case IEMMODE_64BIT:
13000 switch (pVCpu->iem.s.enmEffAddrMode)
13001 {
13002 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
13003 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
13004 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
13005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13006 }
13007 break;
13008 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13009 }
13010 return VINF_SUCCESS;
13011}
13012
13013#undef IEM_LODS_CASE
13014
13015/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
13016#define IEM_SCAS_CASE(ValBits, AddrBits) \
13017 IEM_MC_BEGIN(3, 2); \
13018 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
13019 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
13020 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
13021 IEM_MC_LOCAL(RTGCPTR, uAddr); \
13022 \
13023 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
13024 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
13025 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
13026 IEM_MC_REF_EFLAGS(pEFlags); \
13027 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
13028 \
13029 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
13030 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
13031 } IEM_MC_ELSE() { \
13032 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
13033 } IEM_MC_ENDIF(); \
13034 IEM_MC_ADVANCE_RIP(); \
13035 IEM_MC_END();
13036
13037/** Opcode 0xae. */
13038FNIEMOP_DEF(iemOp_scasb_AL_Xb)
13039{
13040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13041
13042 /*
13043 * Use the C implementation if a repeat prefix is encountered.
13044 */
13045 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
13046 {
13047 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
13048 switch (pVCpu->iem.s.enmEffAddrMode)
13049 {
13050 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
13051 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
13052 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
13053 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13054 }
13055 }
13056 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
13057 {
13058 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
13059 switch (pVCpu->iem.s.enmEffAddrMode)
13060 {
13061 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
13062 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
13063 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
13064 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13065 }
13066 }
13067 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
13068
13069 /*
13070 * Sharing case implementation with stos[wdq] below.
13071 */
13072 switch (pVCpu->iem.s.enmEffAddrMode)
13073 {
13074 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
13075 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
13076 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
13077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13078 }
13079 return VINF_SUCCESS;
13080}
13081
13082
13083/** Opcode 0xaf. */
13084FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
13085{
13086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13087
13088 /*
13089 * Use the C implementation if a repeat prefix is encountered.
13090 */
13091 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
13092 {
13093 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
13094 switch (pVCpu->iem.s.enmEffOpSize)
13095 {
13096 case IEMMODE_16BIT:
13097 switch (pVCpu->iem.s.enmEffAddrMode)
13098 {
13099 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
13100 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
13101 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
13102 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13103 }
13104 break;
13105 case IEMMODE_32BIT:
13106 switch (pVCpu->iem.s.enmEffAddrMode)
13107 {
13108 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
13109 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
13110 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
13111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13112 }
13113 case IEMMODE_64BIT:
13114 switch (pVCpu->iem.s.enmEffAddrMode)
13115 {
13116 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
13117 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
13118 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
13119 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13120 }
13121 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13122 }
13123 }
13124 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
13125 {
13126 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
13127 switch (pVCpu->iem.s.enmEffOpSize)
13128 {
13129 case IEMMODE_16BIT:
13130 switch (pVCpu->iem.s.enmEffAddrMode)
13131 {
13132 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
13133 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
13134 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
13135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13136 }
13137 break;
13138 case IEMMODE_32BIT:
13139 switch (pVCpu->iem.s.enmEffAddrMode)
13140 {
13141 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
13142 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
13143 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
13144 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13145 }
13146 case IEMMODE_64BIT:
13147 switch (pVCpu->iem.s.enmEffAddrMode)
13148 {
13149 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
13150 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
13151 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
13152 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13153 }
13154 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13155 }
13156 }
13157 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
13158
13159 /*
13160 * Annoying double switch here.
13161 * Using ugly macro for implementing the cases, sharing it with scasb.
13162 */
13163 switch (pVCpu->iem.s.enmEffOpSize)
13164 {
13165 case IEMMODE_16BIT:
13166 switch (pVCpu->iem.s.enmEffAddrMode)
13167 {
13168 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
13169 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
13170 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
13171 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13172 }
13173 break;
13174
13175 case IEMMODE_32BIT:
13176 switch (pVCpu->iem.s.enmEffAddrMode)
13177 {
13178 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
13179 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
13180 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
13181 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13182 }
13183 break;
13184
13185 case IEMMODE_64BIT:
13186 switch (pVCpu->iem.s.enmEffAddrMode)
13187 {
13188 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
13189 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
13190 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
13191 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13192 }
13193 break;
13194 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13195 }
13196 return VINF_SUCCESS;
13197}
13198
13199#undef IEM_SCAS_CASE
13200
13201/**
13202 * Common 'mov r8, imm8' helper.
13203 */
13204FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
13205{
13206 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13208
13209 IEM_MC_BEGIN(0, 1);
13210 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
13211 IEM_MC_STORE_GREG_U8(iReg, u8Value);
13212 IEM_MC_ADVANCE_RIP();
13213 IEM_MC_END();
13214
13215 return VINF_SUCCESS;
13216}
13217
13218
13219/** Opcode 0xb0. */
13220FNIEMOP_DEF(iemOp_mov_AL_Ib)
13221{
13222 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
13223 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13224}
13225
13226
13227/** Opcode 0xb1. */
13228FNIEMOP_DEF(iemOp_CL_Ib)
13229{
13230 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
13231 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13232}
13233
13234
13235/** Opcode 0xb2. */
13236FNIEMOP_DEF(iemOp_DL_Ib)
13237{
13238 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
13239 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13240}
13241
13242
13243/** Opcode 0xb3. */
13244FNIEMOP_DEF(iemOp_BL_Ib)
13245{
13246 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
13247 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13248}
13249
13250
13251/** Opcode 0xb4. */
13252FNIEMOP_DEF(iemOp_mov_AH_Ib)
13253{
13254 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
13255 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13256}
13257
13258
13259/** Opcode 0xb5. */
13260FNIEMOP_DEF(iemOp_CH_Ib)
13261{
13262 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
13263 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13264}
13265
13266
13267/** Opcode 0xb6. */
13268FNIEMOP_DEF(iemOp_DH_Ib)
13269{
13270 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
13271 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13272}
13273
13274
13275/** Opcode 0xb7. */
13276FNIEMOP_DEF(iemOp_BH_Ib)
13277{
13278 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
13279 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13280}
13281
13282
13283/**
13284 * Common 'mov regX,immX' helper.
13285 */
13286FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
13287{
13288 switch (pVCpu->iem.s.enmEffOpSize)
13289 {
13290 case IEMMODE_16BIT:
13291 {
13292 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13294
13295 IEM_MC_BEGIN(0, 1);
13296 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
13297 IEM_MC_STORE_GREG_U16(iReg, u16Value);
13298 IEM_MC_ADVANCE_RIP();
13299 IEM_MC_END();
13300 break;
13301 }
13302
13303 case IEMMODE_32BIT:
13304 {
13305 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13307
13308 IEM_MC_BEGIN(0, 1);
13309 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
13310 IEM_MC_STORE_GREG_U32(iReg, u32Value);
13311 IEM_MC_ADVANCE_RIP();
13312 IEM_MC_END();
13313 break;
13314 }
13315 case IEMMODE_64BIT:
13316 {
13317 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
13318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13319
13320 IEM_MC_BEGIN(0, 1);
13321 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
13322 IEM_MC_STORE_GREG_U64(iReg, u64Value);
13323 IEM_MC_ADVANCE_RIP();
13324 IEM_MC_END();
13325 break;
13326 }
13327 }
13328
13329 return VINF_SUCCESS;
13330}
13331
13332
13333/** Opcode 0xb8. */
13334FNIEMOP_DEF(iemOp_eAX_Iv)
13335{
13336 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
13337 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13338}
13339
13340
13341/** Opcode 0xb9. */
13342FNIEMOP_DEF(iemOp_eCX_Iv)
13343{
13344 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
13345 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13346}
13347
13348
13349/** Opcode 0xba. */
13350FNIEMOP_DEF(iemOp_eDX_Iv)
13351{
13352 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
13353 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13354}
13355
13356
13357/** Opcode 0xbb. */
13358FNIEMOP_DEF(iemOp_eBX_Iv)
13359{
13360 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
13361 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13362}
13363
13364
13365/** Opcode 0xbc. */
13366FNIEMOP_DEF(iemOp_eSP_Iv)
13367{
13368 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
13369 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13370}
13371
13372
13373/** Opcode 0xbd. */
13374FNIEMOP_DEF(iemOp_eBP_Iv)
13375{
13376 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
13377 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13378}
13379
13380
13381/** Opcode 0xbe. */
13382FNIEMOP_DEF(iemOp_eSI_Iv)
13383{
13384 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
13385 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13386}
13387
13388
13389/** Opcode 0xbf. */
13390FNIEMOP_DEF(iemOp_eDI_Iv)
13391{
13392 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
13393 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13394}
13395
13396
13397/** Opcode 0xc0. */
13398FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
13399{
13400 IEMOP_HLP_MIN_186();
13401 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13402 PCIEMOPSHIFTSIZES pImpl;
13403 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13404 {
13405 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
13406 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
13407 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
13408 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
13409 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
13410 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
13411 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
13412 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13413 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13414 }
13415 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13416
13417 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13418 {
13419 /* register */
13420 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13422 IEM_MC_BEGIN(3, 0);
13423 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13424 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13425 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13426 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13427 IEM_MC_REF_EFLAGS(pEFlags);
13428 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13429 IEM_MC_ADVANCE_RIP();
13430 IEM_MC_END();
13431 }
13432 else
13433 {
13434 /* memory */
13435 IEM_MC_BEGIN(3, 2);
13436 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13437 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13438 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13440
13441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13442 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13443 IEM_MC_ASSIGN(cShiftArg, cShift);
13444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13445 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13446 IEM_MC_FETCH_EFLAGS(EFlags);
13447 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13448
13449 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13450 IEM_MC_COMMIT_EFLAGS(EFlags);
13451 IEM_MC_ADVANCE_RIP();
13452 IEM_MC_END();
13453 }
13454 return VINF_SUCCESS;
13455}
13456
13457
13458/** Opcode 0xc1. */
13459FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
13460{
13461 IEMOP_HLP_MIN_186();
13462 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13463 PCIEMOPSHIFTSIZES pImpl;
13464 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13465 {
13466 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
13467 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
13468 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
13469 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
13470 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
13471 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
13472 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
13473 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13474 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13475 }
13476 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13477
13478 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13479 {
13480 /* register */
13481 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13483 switch (pVCpu->iem.s.enmEffOpSize)
13484 {
13485 case IEMMODE_16BIT:
13486 IEM_MC_BEGIN(3, 0);
13487 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13488 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13489 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13490 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13491 IEM_MC_REF_EFLAGS(pEFlags);
13492 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13493 IEM_MC_ADVANCE_RIP();
13494 IEM_MC_END();
13495 return VINF_SUCCESS;
13496
13497 case IEMMODE_32BIT:
13498 IEM_MC_BEGIN(3, 0);
13499 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13500 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13501 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13502 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13503 IEM_MC_REF_EFLAGS(pEFlags);
13504 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13505 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13506 IEM_MC_ADVANCE_RIP();
13507 IEM_MC_END();
13508 return VINF_SUCCESS;
13509
13510 case IEMMODE_64BIT:
13511 IEM_MC_BEGIN(3, 0);
13512 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13513 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13514 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13515 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13516 IEM_MC_REF_EFLAGS(pEFlags);
13517 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13518 IEM_MC_ADVANCE_RIP();
13519 IEM_MC_END();
13520 return VINF_SUCCESS;
13521
13522 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13523 }
13524 }
13525 else
13526 {
13527 /* memory */
13528 switch (pVCpu->iem.s.enmEffOpSize)
13529 {
13530 case IEMMODE_16BIT:
13531 IEM_MC_BEGIN(3, 2);
13532 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13533 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13534 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13536
13537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13538 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13539 IEM_MC_ASSIGN(cShiftArg, cShift);
13540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13541 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13542 IEM_MC_FETCH_EFLAGS(EFlags);
13543 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13544
13545 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13546 IEM_MC_COMMIT_EFLAGS(EFlags);
13547 IEM_MC_ADVANCE_RIP();
13548 IEM_MC_END();
13549 return VINF_SUCCESS;
13550
13551 case IEMMODE_32BIT:
13552 IEM_MC_BEGIN(3, 2);
13553 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13554 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13555 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13557
13558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13559 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13560 IEM_MC_ASSIGN(cShiftArg, cShift);
13561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13562 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13563 IEM_MC_FETCH_EFLAGS(EFlags);
13564 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13565
13566 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13567 IEM_MC_COMMIT_EFLAGS(EFlags);
13568 IEM_MC_ADVANCE_RIP();
13569 IEM_MC_END();
13570 return VINF_SUCCESS;
13571
13572 case IEMMODE_64BIT:
13573 IEM_MC_BEGIN(3, 2);
13574 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13575 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13576 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13578
13579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13580 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13581 IEM_MC_ASSIGN(cShiftArg, cShift);
13582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13583 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13584 IEM_MC_FETCH_EFLAGS(EFlags);
13585 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13586
13587 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13588 IEM_MC_COMMIT_EFLAGS(EFlags);
13589 IEM_MC_ADVANCE_RIP();
13590 IEM_MC_END();
13591 return VINF_SUCCESS;
13592
13593 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13594 }
13595 }
13596}
13597
13598
13599/** Opcode 0xc2. */
13600FNIEMOP_DEF(iemOp_retn_Iw)
13601{
13602 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
13603 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13605 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13606 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
13607}
13608
13609
13610/** Opcode 0xc3. */
13611FNIEMOP_DEF(iemOp_retn)
13612{
13613 IEMOP_MNEMONIC(retn, "retn");
13614 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13616 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
13617}
13618
13619
13620/** Opcode 0xc4. */
13621FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
13622{
13623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13624 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
13625 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13626 {
13627 IEMOP_MNEMONIC(vex2_prefix, "2-byte-vex");
13628 /* The LES instruction is invalid 64-bit mode. In legacy and
13629 compatability mode it is invalid with MOD=3.
13630 The use as a VEX prefix is made possible by assigning the inverted
13631 REX.R to the top MOD bit, and the top bit in the inverted register
13632 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
13633 to accessing registers 0..7 in this VEX form. */
13634 /** @todo VEX: Just use new tables for it. */
13635 return IEMOP_RAISE_INVALID_OPCODE();
13636 }
13637 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
13638 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
13639}
13640
13641
13642/** Opcode 0xc5. */
13643FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
13644{
13645 /* The LDS instruction is invalid 64-bit mode. In legacy and
13646 compatability mode it is invalid with MOD=3.
13647 The use as a VEX prefix is made possible by assigning the inverted
13648 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
13649 outside of 64-bit mode. VEX is not available in real or v86 mode. */
13650 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13651 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
13652 {
13653 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
13654 {
13655 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
13656 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
13657 }
13658 IEMOP_HLP_NO_REAL_OR_V86_MODE();
13659 }
13660
13661 IEMOP_MNEMONIC(vex3_prefix, "3-byte-vex");
13662 /** @todo Test when exctly the VEX conformance checks kick in during
13663 * instruction decoding and fetching (using \#PF). */
13664 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
13665 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
13666 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
13667#if 0 /* will make sense of this next week... */
13668 if ( !(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
13669 &&
13670 )
13671 {
13672
13673 }
13674#endif
13675
13676 /** @todo VEX: Just use new tables for it. */
13677 return IEMOP_RAISE_INVALID_OPCODE();
13678}
13679
13680
13681/** Opcode 0xc6. */
13682FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
13683{
13684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13685 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13686 return IEMOP_RAISE_INVALID_OPCODE();
13687 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
13688
13689 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13690 {
13691 /* register access */
13692 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13694 IEM_MC_BEGIN(0, 0);
13695 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
13696 IEM_MC_ADVANCE_RIP();
13697 IEM_MC_END();
13698 }
13699 else
13700 {
13701 /* memory access. */
13702 IEM_MC_BEGIN(0, 1);
13703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13705 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13707 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
13708 IEM_MC_ADVANCE_RIP();
13709 IEM_MC_END();
13710 }
13711 return VINF_SUCCESS;
13712}
13713
13714
13715/** Opcode 0xc7. */
13716FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
13717{
13718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13719 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13720 return IEMOP_RAISE_INVALID_OPCODE();
13721 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
13722
13723 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13724 {
13725 /* register access */
13726 switch (pVCpu->iem.s.enmEffOpSize)
13727 {
13728 case IEMMODE_16BIT:
13729 IEM_MC_BEGIN(0, 0);
13730 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13732 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
13733 IEM_MC_ADVANCE_RIP();
13734 IEM_MC_END();
13735 return VINF_SUCCESS;
13736
13737 case IEMMODE_32BIT:
13738 IEM_MC_BEGIN(0, 0);
13739 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13741 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
13742 IEM_MC_ADVANCE_RIP();
13743 IEM_MC_END();
13744 return VINF_SUCCESS;
13745
13746 case IEMMODE_64BIT:
13747 IEM_MC_BEGIN(0, 0);
13748 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13750 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
13751 IEM_MC_ADVANCE_RIP();
13752 IEM_MC_END();
13753 return VINF_SUCCESS;
13754
13755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13756 }
13757 }
13758 else
13759 {
13760 /* memory access. */
13761 switch (pVCpu->iem.s.enmEffOpSize)
13762 {
13763 case IEMMODE_16BIT:
13764 IEM_MC_BEGIN(0, 1);
13765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13767 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13769 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
13770 IEM_MC_ADVANCE_RIP();
13771 IEM_MC_END();
13772 return VINF_SUCCESS;
13773
13774 case IEMMODE_32BIT:
13775 IEM_MC_BEGIN(0, 1);
13776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13778 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13780 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
13781 IEM_MC_ADVANCE_RIP();
13782 IEM_MC_END();
13783 return VINF_SUCCESS;
13784
13785 case IEMMODE_64BIT:
13786 IEM_MC_BEGIN(0, 1);
13787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13789 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13791 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
13792 IEM_MC_ADVANCE_RIP();
13793 IEM_MC_END();
13794 return VINF_SUCCESS;
13795
13796 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13797 }
13798 }
13799}
13800
13801
13802
13803
13804/** Opcode 0xc8. */
13805FNIEMOP_DEF(iemOp_enter_Iw_Ib)
13806{
13807 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
13808 IEMOP_HLP_MIN_186();
13809 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13810 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
13811 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
13812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13813 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
13814}
13815
13816
13817/** Opcode 0xc9. */
13818FNIEMOP_DEF(iemOp_leave)
13819{
13820 IEMOP_MNEMONIC(leave, "leave");
13821 IEMOP_HLP_MIN_186();
13822 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13824 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
13825}
13826
13827
13828/** Opcode 0xca. */
13829FNIEMOP_DEF(iemOp_retf_Iw)
13830{
13831 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
13832 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13834 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13835 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
13836}
13837
13838
13839/** Opcode 0xcb. */
13840FNIEMOP_DEF(iemOp_retf)
13841{
13842 IEMOP_MNEMONIC(retf, "retf");
13843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13844 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13845 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
13846}
13847
13848
13849/** Opcode 0xcc. */
13850FNIEMOP_DEF(iemOp_int_3)
13851{
13852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13853 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
13854}
13855
13856
13857/** Opcode 0xcd. */
13858FNIEMOP_DEF(iemOp_int_Ib)
13859{
13860 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
13861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13862 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
13863}
13864
13865
13866/** Opcode 0xce. */
13867FNIEMOP_DEF(iemOp_into)
13868{
13869 IEMOP_MNEMONIC(into, "into");
13870 IEMOP_HLP_NO_64BIT();
13871
13872 IEM_MC_BEGIN(2, 0);
13873 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
13874 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
13875 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
13876 IEM_MC_END();
13877 return VINF_SUCCESS;
13878}
13879
13880
13881/** Opcode 0xcf. */
13882FNIEMOP_DEF(iemOp_iret)
13883{
13884 IEMOP_MNEMONIC(iret, "iret");
13885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13886 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
13887}
13888
13889
13890/** Opcode 0xd0. */
13891FNIEMOP_DEF(iemOp_Grp2_Eb_1)
13892{
13893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13894 PCIEMOPSHIFTSIZES pImpl;
13895 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13896 {
13897 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
13898 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
13899 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
13900 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
13901 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
13902 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
13903 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
13904 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13905 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13906 }
13907 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13908
13909 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13910 {
13911 /* register */
13912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13913 IEM_MC_BEGIN(3, 0);
13914 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13915 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13916 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13917 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13918 IEM_MC_REF_EFLAGS(pEFlags);
13919 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13920 IEM_MC_ADVANCE_RIP();
13921 IEM_MC_END();
13922 }
13923 else
13924 {
13925 /* memory */
13926 IEM_MC_BEGIN(3, 2);
13927 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13928 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13929 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13931
13932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13934 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13935 IEM_MC_FETCH_EFLAGS(EFlags);
13936 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13937
13938 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13939 IEM_MC_COMMIT_EFLAGS(EFlags);
13940 IEM_MC_ADVANCE_RIP();
13941 IEM_MC_END();
13942 }
13943 return VINF_SUCCESS;
13944}
13945
13946
13947
13948/** Opcode 0xd1. */
13949FNIEMOP_DEF(iemOp_Grp2_Ev_1)
13950{
13951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13952 PCIEMOPSHIFTSIZES pImpl;
13953 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13954 {
13955 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
13956 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
13957 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
13958 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
13959 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
13960 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
13961 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
13962 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13963 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13964 }
13965 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13966
13967 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13968 {
13969 /* register */
13970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13971 switch (pVCpu->iem.s.enmEffOpSize)
13972 {
13973 case IEMMODE_16BIT:
13974 IEM_MC_BEGIN(3, 0);
13975 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13976 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13977 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13978 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13979 IEM_MC_REF_EFLAGS(pEFlags);
13980 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13981 IEM_MC_ADVANCE_RIP();
13982 IEM_MC_END();
13983 return VINF_SUCCESS;
13984
13985 case IEMMODE_32BIT:
13986 IEM_MC_BEGIN(3, 0);
13987 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13988 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13989 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13990 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13991 IEM_MC_REF_EFLAGS(pEFlags);
13992 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13993 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13994 IEM_MC_ADVANCE_RIP();
13995 IEM_MC_END();
13996 return VINF_SUCCESS;
13997
13998 case IEMMODE_64BIT:
13999 IEM_MC_BEGIN(3, 0);
14000 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14001 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
14002 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14003 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14004 IEM_MC_REF_EFLAGS(pEFlags);
14005 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14006 IEM_MC_ADVANCE_RIP();
14007 IEM_MC_END();
14008 return VINF_SUCCESS;
14009
14010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14011 }
14012 }
14013 else
14014 {
14015 /* memory */
14016 switch (pVCpu->iem.s.enmEffOpSize)
14017 {
14018 case IEMMODE_16BIT:
14019 IEM_MC_BEGIN(3, 2);
14020 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14021 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
14022 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14024
14025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14027 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14028 IEM_MC_FETCH_EFLAGS(EFlags);
14029 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14030
14031 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
14032 IEM_MC_COMMIT_EFLAGS(EFlags);
14033 IEM_MC_ADVANCE_RIP();
14034 IEM_MC_END();
14035 return VINF_SUCCESS;
14036
14037 case IEMMODE_32BIT:
14038 IEM_MC_BEGIN(3, 2);
14039 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14040 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
14041 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14043
14044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14046 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14047 IEM_MC_FETCH_EFLAGS(EFlags);
14048 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14049
14050 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
14051 IEM_MC_COMMIT_EFLAGS(EFlags);
14052 IEM_MC_ADVANCE_RIP();
14053 IEM_MC_END();
14054 return VINF_SUCCESS;
14055
14056 case IEMMODE_64BIT:
14057 IEM_MC_BEGIN(3, 2);
14058 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14059 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
14060 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14061 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14062
14063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14065 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14066 IEM_MC_FETCH_EFLAGS(EFlags);
14067 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14068
14069 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
14070 IEM_MC_COMMIT_EFLAGS(EFlags);
14071 IEM_MC_ADVANCE_RIP();
14072 IEM_MC_END();
14073 return VINF_SUCCESS;
14074
14075 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14076 }
14077 }
14078}
14079
14080
14081/** Opcode 0xd2. */
14082FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
14083{
14084 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14085 PCIEMOPSHIFTSIZES pImpl;
14086 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14087 {
14088 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
14089 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
14090 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
14091 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
14092 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
14093 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
14094 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
14095 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14096 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
14097 }
14098 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
14099
14100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14101 {
14102 /* register */
14103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14104 IEM_MC_BEGIN(3, 0);
14105 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14106 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14107 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14108 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14109 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14110 IEM_MC_REF_EFLAGS(pEFlags);
14111 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
14112 IEM_MC_ADVANCE_RIP();
14113 IEM_MC_END();
14114 }
14115 else
14116 {
14117 /* memory */
14118 IEM_MC_BEGIN(3, 2);
14119 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14120 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14121 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14123
14124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14126 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14127 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14128 IEM_MC_FETCH_EFLAGS(EFlags);
14129 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
14130
14131 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
14132 IEM_MC_COMMIT_EFLAGS(EFlags);
14133 IEM_MC_ADVANCE_RIP();
14134 IEM_MC_END();
14135 }
14136 return VINF_SUCCESS;
14137}
14138
14139
14140/** Opcode 0xd3. */
14141FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
14142{
14143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14144 PCIEMOPSHIFTSIZES pImpl;
14145 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14146 {
14147 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
14148 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
14149 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
14150 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
14151 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
14152 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
14153 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
14154 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14155 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
14156 }
14157 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
14158
14159 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14160 {
14161 /* register */
14162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14163 switch (pVCpu->iem.s.enmEffOpSize)
14164 {
14165 case IEMMODE_16BIT:
14166 IEM_MC_BEGIN(3, 0);
14167 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14168 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14169 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14170 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14171 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14172 IEM_MC_REF_EFLAGS(pEFlags);
14173 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14174 IEM_MC_ADVANCE_RIP();
14175 IEM_MC_END();
14176 return VINF_SUCCESS;
14177
14178 case IEMMODE_32BIT:
14179 IEM_MC_BEGIN(3, 0);
14180 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14181 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14182 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14183 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14184 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14185 IEM_MC_REF_EFLAGS(pEFlags);
14186 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14187 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
14188 IEM_MC_ADVANCE_RIP();
14189 IEM_MC_END();
14190 return VINF_SUCCESS;
14191
14192 case IEMMODE_64BIT:
14193 IEM_MC_BEGIN(3, 0);
14194 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14195 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14196 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14197 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14198 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14199 IEM_MC_REF_EFLAGS(pEFlags);
14200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14201 IEM_MC_ADVANCE_RIP();
14202 IEM_MC_END();
14203 return VINF_SUCCESS;
14204
14205 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14206 }
14207 }
14208 else
14209 {
14210 /* memory */
14211 switch (pVCpu->iem.s.enmEffOpSize)
14212 {
14213 case IEMMODE_16BIT:
14214 IEM_MC_BEGIN(3, 2);
14215 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14216 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14217 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14218 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14219
14220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14222 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14223 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14224 IEM_MC_FETCH_EFLAGS(EFlags);
14225 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14226
14227 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
14228 IEM_MC_COMMIT_EFLAGS(EFlags);
14229 IEM_MC_ADVANCE_RIP();
14230 IEM_MC_END();
14231 return VINF_SUCCESS;
14232
14233 case IEMMODE_32BIT:
14234 IEM_MC_BEGIN(3, 2);
14235 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14236 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14237 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14239
14240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14242 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14243 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14244 IEM_MC_FETCH_EFLAGS(EFlags);
14245 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14246
14247 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
14248 IEM_MC_COMMIT_EFLAGS(EFlags);
14249 IEM_MC_ADVANCE_RIP();
14250 IEM_MC_END();
14251 return VINF_SUCCESS;
14252
14253 case IEMMODE_64BIT:
14254 IEM_MC_BEGIN(3, 2);
14255 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14256 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14257 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14259
14260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14262 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14263 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14264 IEM_MC_FETCH_EFLAGS(EFlags);
14265 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14266
14267 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
14268 IEM_MC_COMMIT_EFLAGS(EFlags);
14269 IEM_MC_ADVANCE_RIP();
14270 IEM_MC_END();
14271 return VINF_SUCCESS;
14272
14273 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14274 }
14275 }
14276}
14277
14278/** Opcode 0xd4. */
14279FNIEMOP_DEF(iemOp_aam_Ib)
14280{
14281 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
14282 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14284 IEMOP_HLP_NO_64BIT();
14285 if (!bImm)
14286 return IEMOP_RAISE_DIVIDE_ERROR();
14287 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
14288}
14289
14290
14291/** Opcode 0xd5. */
14292FNIEMOP_DEF(iemOp_aad_Ib)
14293{
14294 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
14295 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14297 IEMOP_HLP_NO_64BIT();
14298 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
14299}
14300
14301
14302/** Opcode 0xd6. */
14303FNIEMOP_DEF(iemOp_salc)
14304{
14305 IEMOP_MNEMONIC(salc, "salc");
14306 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
14307 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14309 IEMOP_HLP_NO_64BIT();
14310
14311 IEM_MC_BEGIN(0, 0);
14312 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
14313 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
14314 } IEM_MC_ELSE() {
14315 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
14316 } IEM_MC_ENDIF();
14317 IEM_MC_ADVANCE_RIP();
14318 IEM_MC_END();
14319 return VINF_SUCCESS;
14320}
14321
14322
14323/** Opcode 0xd7. */
14324FNIEMOP_DEF(iemOp_xlat)
14325{
14326 IEMOP_MNEMONIC(xlat, "xlat");
14327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14328 switch (pVCpu->iem.s.enmEffAddrMode)
14329 {
14330 case IEMMODE_16BIT:
14331 IEM_MC_BEGIN(2, 0);
14332 IEM_MC_LOCAL(uint8_t, u8Tmp);
14333 IEM_MC_LOCAL(uint16_t, u16Addr);
14334 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
14335 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
14336 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
14337 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14338 IEM_MC_ADVANCE_RIP();
14339 IEM_MC_END();
14340 return VINF_SUCCESS;
14341
14342 case IEMMODE_32BIT:
14343 IEM_MC_BEGIN(2, 0);
14344 IEM_MC_LOCAL(uint8_t, u8Tmp);
14345 IEM_MC_LOCAL(uint32_t, u32Addr);
14346 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
14347 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
14348 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
14349 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14350 IEM_MC_ADVANCE_RIP();
14351 IEM_MC_END();
14352 return VINF_SUCCESS;
14353
14354 case IEMMODE_64BIT:
14355 IEM_MC_BEGIN(2, 0);
14356 IEM_MC_LOCAL(uint8_t, u8Tmp);
14357 IEM_MC_LOCAL(uint64_t, u64Addr);
14358 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
14359 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
14360 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
14361 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14362 IEM_MC_ADVANCE_RIP();
14363 IEM_MC_END();
14364 return VINF_SUCCESS;
14365
14366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14367 }
14368}
14369
14370
14371/**
14372 * Common worker for FPU instructions working on ST0 and STn, and storing the
14373 * result in ST0.
14374 *
14375 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14376 */
14377FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14378{
14379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14380
14381 IEM_MC_BEGIN(3, 1);
14382 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14383 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14384 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14385 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14386
14387 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14388 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14389 IEM_MC_PREPARE_FPU_USAGE();
14390 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14391 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14392 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14393 IEM_MC_ELSE()
14394 IEM_MC_FPU_STACK_UNDERFLOW(0);
14395 IEM_MC_ENDIF();
14396 IEM_MC_ADVANCE_RIP();
14397
14398 IEM_MC_END();
14399 return VINF_SUCCESS;
14400}
14401
14402
14403/**
14404 * Common worker for FPU instructions working on ST0 and STn, and only affecting
14405 * flags.
14406 *
14407 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14408 */
14409FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14410{
14411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14412
14413 IEM_MC_BEGIN(3, 1);
14414 IEM_MC_LOCAL(uint16_t, u16Fsw);
14415 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14416 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14417 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14418
14419 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14420 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14421 IEM_MC_PREPARE_FPU_USAGE();
14422 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14423 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14424 IEM_MC_UPDATE_FSW(u16Fsw);
14425 IEM_MC_ELSE()
14426 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14427 IEM_MC_ENDIF();
14428 IEM_MC_ADVANCE_RIP();
14429
14430 IEM_MC_END();
14431 return VINF_SUCCESS;
14432}
14433
14434
14435/**
14436 * Common worker for FPU instructions working on ST0 and STn, only affecting
14437 * flags, and popping when done.
14438 *
14439 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14440 */
14441FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14442{
14443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14444
14445 IEM_MC_BEGIN(3, 1);
14446 IEM_MC_LOCAL(uint16_t, u16Fsw);
14447 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14448 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14449 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14450
14451 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14452 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14453 IEM_MC_PREPARE_FPU_USAGE();
14454 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14455 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14456 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14457 IEM_MC_ELSE()
14458 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
14459 IEM_MC_ENDIF();
14460 IEM_MC_ADVANCE_RIP();
14461
14462 IEM_MC_END();
14463 return VINF_SUCCESS;
14464}
14465
14466
14467/** Opcode 0xd8 11/0. */
14468FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
14469{
14470 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
14471 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
14472}
14473
14474
14475/** Opcode 0xd8 11/1. */
14476FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
14477{
14478 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
14479 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
14480}
14481
14482
14483/** Opcode 0xd8 11/2. */
14484FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
14485{
14486 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
14487 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
14488}
14489
14490
14491/** Opcode 0xd8 11/3. */
14492FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
14493{
14494 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
14495 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
14496}
14497
14498
14499/** Opcode 0xd8 11/4. */
14500FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
14501{
14502 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
14503 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
14504}
14505
14506
14507/** Opcode 0xd8 11/5. */
14508FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
14509{
14510 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
14511 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
14512}
14513
14514
14515/** Opcode 0xd8 11/6. */
14516FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
14517{
14518 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
14519 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
14520}
14521
14522
14523/** Opcode 0xd8 11/7. */
14524FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
14525{
14526 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
14527 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
14528}
14529
14530
14531/**
14532 * Common worker for FPU instructions working on ST0 and an m32r, and storing
14533 * the result in ST0.
14534 *
14535 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14536 */
14537FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
14538{
14539 IEM_MC_BEGIN(3, 3);
14540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14541 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14542 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14543 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14544 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14545 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14546
14547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14549
14550 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14551 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14552 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14553
14554 IEM_MC_PREPARE_FPU_USAGE();
14555 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14556 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
14557 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14558 IEM_MC_ELSE()
14559 IEM_MC_FPU_STACK_UNDERFLOW(0);
14560 IEM_MC_ENDIF();
14561 IEM_MC_ADVANCE_RIP();
14562
14563 IEM_MC_END();
14564 return VINF_SUCCESS;
14565}
14566
14567
14568/** Opcode 0xd8 !11/0. */
14569FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
14570{
14571 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
14572 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
14573}
14574
14575
14576/** Opcode 0xd8 !11/1. */
14577FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
14578{
14579 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
14580 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
14581}
14582
14583
14584/** Opcode 0xd8 !11/2. */
14585FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
14586{
14587 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
14588
14589 IEM_MC_BEGIN(3, 3);
14590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14591 IEM_MC_LOCAL(uint16_t, u16Fsw);
14592 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14593 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14594 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14595 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14596
14597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14599
14600 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14601 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14602 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14603
14604 IEM_MC_PREPARE_FPU_USAGE();
14605 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14606 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14607 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14608 IEM_MC_ELSE()
14609 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14610 IEM_MC_ENDIF();
14611 IEM_MC_ADVANCE_RIP();
14612
14613 IEM_MC_END();
14614 return VINF_SUCCESS;
14615}
14616
14617
14618/** Opcode 0xd8 !11/3. */
14619FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
14620{
14621 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
14622
14623 IEM_MC_BEGIN(3, 3);
14624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14625 IEM_MC_LOCAL(uint16_t, u16Fsw);
14626 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14627 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14628 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14629 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14630
14631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14633
14634 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14635 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14636 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14637
14638 IEM_MC_PREPARE_FPU_USAGE();
14639 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14640 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14641 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14642 IEM_MC_ELSE()
14643 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14644 IEM_MC_ENDIF();
14645 IEM_MC_ADVANCE_RIP();
14646
14647 IEM_MC_END();
14648 return VINF_SUCCESS;
14649}
14650
14651
14652/** Opcode 0xd8 !11/4. */
14653FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
14654{
14655 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
14656 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
14657}
14658
14659
14660/** Opcode 0xd8 !11/5. */
14661FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
14662{
14663 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
14664 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
14665}
14666
14667
14668/** Opcode 0xd8 !11/6. */
14669FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
14670{
14671 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
14672 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
14673}
14674
14675
14676/** Opcode 0xd8 !11/7. */
14677FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
14678{
14679 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
14680 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
14681}
14682
14683
14684/** Opcode 0xd8. */
14685FNIEMOP_DEF(iemOp_EscF0)
14686{
14687 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14688 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
14689
14690 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14691 {
14692 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14693 {
14694 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
14695 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
14696 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
14697 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
14698 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
14699 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
14700 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
14701 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
14702 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14703 }
14704 }
14705 else
14706 {
14707 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14708 {
14709 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
14710 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
14711 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
14712 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
14713 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
14714 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
14715 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
14716 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
14717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14718 }
14719 }
14720}
14721
14722
14723/** Opcode 0xd9 /0 mem32real
14724 * @sa iemOp_fld_m64r */
14725FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
14726{
14727 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
14728
14729 IEM_MC_BEGIN(2, 3);
14730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14731 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14732 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
14733 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14734 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
14735
14736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14738
14739 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14740 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14741 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14742
14743 IEM_MC_PREPARE_FPU_USAGE();
14744 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14745 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
14746 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14747 IEM_MC_ELSE()
14748 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14749 IEM_MC_ENDIF();
14750 IEM_MC_ADVANCE_RIP();
14751
14752 IEM_MC_END();
14753 return VINF_SUCCESS;
14754}
14755
14756
14757/** Opcode 0xd9 !11/2 mem32real */
14758FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
14759{
14760 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
14761 IEM_MC_BEGIN(3, 2);
14762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14763 IEM_MC_LOCAL(uint16_t, u16Fsw);
14764 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14765 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14766 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14767
14768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14770 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14771 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14772
14773 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14774 IEM_MC_PREPARE_FPU_USAGE();
14775 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14776 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14777 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14778 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14779 IEM_MC_ELSE()
14780 IEM_MC_IF_FCW_IM()
14781 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14782 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14783 IEM_MC_ENDIF();
14784 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14785 IEM_MC_ENDIF();
14786 IEM_MC_ADVANCE_RIP();
14787
14788 IEM_MC_END();
14789 return VINF_SUCCESS;
14790}
14791
14792
14793/** Opcode 0xd9 !11/3 */
14794FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
14795{
14796 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
14797 IEM_MC_BEGIN(3, 2);
14798 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14799 IEM_MC_LOCAL(uint16_t, u16Fsw);
14800 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14801 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14802 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14803
14804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14806 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14807 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14808
14809 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14810 IEM_MC_PREPARE_FPU_USAGE();
14811 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14812 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14813 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14814 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14815 IEM_MC_ELSE()
14816 IEM_MC_IF_FCW_IM()
14817 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14818 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14819 IEM_MC_ENDIF();
14820 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14821 IEM_MC_ENDIF();
14822 IEM_MC_ADVANCE_RIP();
14823
14824 IEM_MC_END();
14825 return VINF_SUCCESS;
14826}
14827
14828
14829/** Opcode 0xd9 !11/4 */
14830FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
14831{
14832 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
14833 IEM_MC_BEGIN(3, 0);
14834 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14835 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14836 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
14837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14839 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14840 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14841 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14842 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
14843 IEM_MC_END();
14844 return VINF_SUCCESS;
14845}
14846
14847
14848/** Opcode 0xd9 !11/5 */
14849FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
14850{
14851 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
14852 IEM_MC_BEGIN(1, 1);
14853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14854 IEM_MC_ARG(uint16_t, u16Fsw, 0);
14855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14857 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14858 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14859 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14860 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
14861 IEM_MC_END();
14862 return VINF_SUCCESS;
14863}
14864
14865
14866/** Opcode 0xd9 !11/6 */
14867FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
14868{
14869 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
14870 IEM_MC_BEGIN(3, 0);
14871 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14872 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14873 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
14874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14876 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14877 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14878 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14879 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
14880 IEM_MC_END();
14881 return VINF_SUCCESS;
14882}
14883
14884
14885/** Opcode 0xd9 !11/7 */
14886FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
14887{
14888 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
14889 IEM_MC_BEGIN(2, 0);
14890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14891 IEM_MC_LOCAL(uint16_t, u16Fcw);
14892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14894 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14895 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14896 IEM_MC_FETCH_FCW(u16Fcw);
14897 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
14898 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14899 IEM_MC_END();
14900 return VINF_SUCCESS;
14901}
14902
14903
14904/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
14905FNIEMOP_DEF(iemOp_fnop)
14906{
14907 IEMOP_MNEMONIC(fnop, "fnop");
14908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14909
14910 IEM_MC_BEGIN(0, 0);
14911 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14912 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14913 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14914 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
14915 * intel optimizations. Investigate. */
14916 IEM_MC_UPDATE_FPU_OPCODE_IP();
14917 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14918 IEM_MC_END();
14919 return VINF_SUCCESS;
14920}
14921
14922
14923/** Opcode 0xd9 11/0 stN */
14924FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
14925{
14926 IEMOP_MNEMONIC(fld_stN, "fld stN");
14927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14928
14929 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14930 * indicates that it does. */
14931 IEM_MC_BEGIN(0, 2);
14932 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14933 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14934 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14935 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14936
14937 IEM_MC_PREPARE_FPU_USAGE();
14938 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
14939 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14940 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14941 IEM_MC_ELSE()
14942 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
14943 IEM_MC_ENDIF();
14944
14945 IEM_MC_ADVANCE_RIP();
14946 IEM_MC_END();
14947
14948 return VINF_SUCCESS;
14949}
14950
14951
14952/** Opcode 0xd9 11/3 stN */
14953FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
14954{
14955 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
14956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14957
14958 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14959 * indicates that it does. */
14960 IEM_MC_BEGIN(1, 3);
14961 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
14962 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
14963 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14964 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
14965 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14966 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14967
14968 IEM_MC_PREPARE_FPU_USAGE();
14969 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14970 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
14971 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
14972 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14973 IEM_MC_ELSE()
14974 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
14975 IEM_MC_ENDIF();
14976
14977 IEM_MC_ADVANCE_RIP();
14978 IEM_MC_END();
14979
14980 return VINF_SUCCESS;
14981}
14982
14983
14984/** Opcode 0xd9 11/4, 0xdd 11/2. */
14985FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
14986{
14987 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
14988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14989
14990 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
14991 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
14992 if (!iDstReg)
14993 {
14994 IEM_MC_BEGIN(0, 1);
14995 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
14996 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14997 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14998
14999 IEM_MC_PREPARE_FPU_USAGE();
15000 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
15001 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
15002 IEM_MC_ELSE()
15003 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
15004 IEM_MC_ENDIF();
15005
15006 IEM_MC_ADVANCE_RIP();
15007 IEM_MC_END();
15008 }
15009 else
15010 {
15011 IEM_MC_BEGIN(0, 2);
15012 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15013 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15014 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15015 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15016
15017 IEM_MC_PREPARE_FPU_USAGE();
15018 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15019 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15020 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
15021 IEM_MC_ELSE()
15022 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
15023 IEM_MC_ENDIF();
15024
15025 IEM_MC_ADVANCE_RIP();
15026 IEM_MC_END();
15027 }
15028 return VINF_SUCCESS;
15029}
15030
15031
15032/**
15033 * Common worker for FPU instructions working on ST0 and replaces it with the
15034 * result, i.e. unary operators.
15035 *
15036 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15037 */
15038FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
15039{
15040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15041
15042 IEM_MC_BEGIN(2, 1);
15043 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15044 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15045 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
15046
15047 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15048 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15049 IEM_MC_PREPARE_FPU_USAGE();
15050 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15051 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
15052 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15053 IEM_MC_ELSE()
15054 IEM_MC_FPU_STACK_UNDERFLOW(0);
15055 IEM_MC_ENDIF();
15056 IEM_MC_ADVANCE_RIP();
15057
15058 IEM_MC_END();
15059 return VINF_SUCCESS;
15060}
15061
15062
15063/** Opcode 0xd9 0xe0. */
15064FNIEMOP_DEF(iemOp_fchs)
15065{
15066 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
15067 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
15068}
15069
15070
15071/** Opcode 0xd9 0xe1. */
15072FNIEMOP_DEF(iemOp_fabs)
15073{
15074 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
15075 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
15076}
15077
15078
15079/**
15080 * Common worker for FPU instructions working on ST0 and only returns FSW.
15081 *
15082 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15083 */
15084FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
15085{
15086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15087
15088 IEM_MC_BEGIN(2, 1);
15089 IEM_MC_LOCAL(uint16_t, u16Fsw);
15090 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15091 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
15092
15093 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15094 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15095 IEM_MC_PREPARE_FPU_USAGE();
15096 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15097 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
15098 IEM_MC_UPDATE_FSW(u16Fsw);
15099 IEM_MC_ELSE()
15100 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
15101 IEM_MC_ENDIF();
15102 IEM_MC_ADVANCE_RIP();
15103
15104 IEM_MC_END();
15105 return VINF_SUCCESS;
15106}
15107
15108
15109/** Opcode 0xd9 0xe4. */
15110FNIEMOP_DEF(iemOp_ftst)
15111{
15112 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
15113 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
15114}
15115
15116
15117/** Opcode 0xd9 0xe5. */
15118FNIEMOP_DEF(iemOp_fxam)
15119{
15120 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
15121 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
15122}
15123
15124
15125/**
15126 * Common worker for FPU instructions pushing a constant onto the FPU stack.
15127 *
15128 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15129 */
15130FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
15131{
15132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15133
15134 IEM_MC_BEGIN(1, 1);
15135 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15136 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15137
15138 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15139 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15140 IEM_MC_PREPARE_FPU_USAGE();
15141 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15142 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
15143 IEM_MC_PUSH_FPU_RESULT(FpuRes);
15144 IEM_MC_ELSE()
15145 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
15146 IEM_MC_ENDIF();
15147 IEM_MC_ADVANCE_RIP();
15148
15149 IEM_MC_END();
15150 return VINF_SUCCESS;
15151}
15152
15153
15154/** Opcode 0xd9 0xe8. */
15155FNIEMOP_DEF(iemOp_fld1)
15156{
15157 IEMOP_MNEMONIC(fld1, "fld1");
15158 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
15159}
15160
15161
15162/** Opcode 0xd9 0xe9. */
15163FNIEMOP_DEF(iemOp_fldl2t)
15164{
15165 IEMOP_MNEMONIC(fldl2t, "fldl2t");
15166 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
15167}
15168
15169
15170/** Opcode 0xd9 0xea. */
15171FNIEMOP_DEF(iemOp_fldl2e)
15172{
15173 IEMOP_MNEMONIC(fldl2e, "fldl2e");
15174 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
15175}
15176
15177/** Opcode 0xd9 0xeb. */
15178FNIEMOP_DEF(iemOp_fldpi)
15179{
15180 IEMOP_MNEMONIC(fldpi, "fldpi");
15181 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
15182}
15183
15184
15185/** Opcode 0xd9 0xec. */
15186FNIEMOP_DEF(iemOp_fldlg2)
15187{
15188 IEMOP_MNEMONIC(fldlg2, "fldlg2");
15189 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
15190}
15191
15192/** Opcode 0xd9 0xed. */
15193FNIEMOP_DEF(iemOp_fldln2)
15194{
15195 IEMOP_MNEMONIC(fldln2, "fldln2");
15196 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
15197}
15198
15199
15200/** Opcode 0xd9 0xee. */
15201FNIEMOP_DEF(iemOp_fldz)
15202{
15203 IEMOP_MNEMONIC(fldz, "fldz");
15204 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
15205}
15206
15207
15208/** Opcode 0xd9 0xf0. */
15209FNIEMOP_DEF(iemOp_f2xm1)
15210{
15211 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
15212 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
15213}
15214
15215
15216/**
15217 * Common worker for FPU instructions working on STn and ST0, storing the result
15218 * in STn, and popping the stack unless IE, DE or ZE was raised.
15219 *
15220 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15221 */
15222FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15223{
15224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15225
15226 IEM_MC_BEGIN(3, 1);
15227 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15228 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15229 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15230 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15231
15232 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15233 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15234
15235 IEM_MC_PREPARE_FPU_USAGE();
15236 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15237 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15238 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
15239 IEM_MC_ELSE()
15240 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
15241 IEM_MC_ENDIF();
15242 IEM_MC_ADVANCE_RIP();
15243
15244 IEM_MC_END();
15245 return VINF_SUCCESS;
15246}
15247
15248
15249/** Opcode 0xd9 0xf1. */
15250FNIEMOP_DEF(iemOp_fyl2x)
15251{
15252 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
15253 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
15254}
15255
15256
15257/**
15258 * Common worker for FPU instructions working on ST0 and having two outputs, one
15259 * replacing ST0 and one pushed onto the stack.
15260 *
15261 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15262 */
15263FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
15264{
15265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15266
15267 IEM_MC_BEGIN(2, 1);
15268 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
15269 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
15270 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
15271
15272 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15273 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15274 IEM_MC_PREPARE_FPU_USAGE();
15275 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15276 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
15277 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
15278 IEM_MC_ELSE()
15279 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
15280 IEM_MC_ENDIF();
15281 IEM_MC_ADVANCE_RIP();
15282
15283 IEM_MC_END();
15284 return VINF_SUCCESS;
15285}
15286
15287
15288/** Opcode 0xd9 0xf2. */
15289FNIEMOP_DEF(iemOp_fptan)
15290{
15291 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
15292 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
15293}
15294
15295
15296/** Opcode 0xd9 0xf3. */
15297FNIEMOP_DEF(iemOp_fpatan)
15298{
15299 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
15300 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
15301}
15302
15303
15304/** Opcode 0xd9 0xf4. */
15305FNIEMOP_DEF(iemOp_fxtract)
15306{
15307 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
15308 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
15309}
15310
15311
15312/** Opcode 0xd9 0xf5. */
15313FNIEMOP_DEF(iemOp_fprem1)
15314{
15315 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
15316 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
15317}
15318
15319
15320/** Opcode 0xd9 0xf6. */
15321FNIEMOP_DEF(iemOp_fdecstp)
15322{
15323 IEMOP_MNEMONIC(fdecstp, "fdecstp");
15324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15325 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15326 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15327 * FINCSTP and FDECSTP. */
15328
15329 IEM_MC_BEGIN(0,0);
15330
15331 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15332 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15333
15334 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15335 IEM_MC_FPU_STACK_DEC_TOP();
15336 IEM_MC_UPDATE_FSW_CONST(0);
15337
15338 IEM_MC_ADVANCE_RIP();
15339 IEM_MC_END();
15340 return VINF_SUCCESS;
15341}
15342
15343
15344/** Opcode 0xd9 0xf7. */
15345FNIEMOP_DEF(iemOp_fincstp)
15346{
15347 IEMOP_MNEMONIC(fincstp, "fincstp");
15348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15349 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15350 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15351 * FINCSTP and FDECSTP. */
15352
15353 IEM_MC_BEGIN(0,0);
15354
15355 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15356 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15357
15358 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15359 IEM_MC_FPU_STACK_INC_TOP();
15360 IEM_MC_UPDATE_FSW_CONST(0);
15361
15362 IEM_MC_ADVANCE_RIP();
15363 IEM_MC_END();
15364 return VINF_SUCCESS;
15365}
15366
15367
15368/** Opcode 0xd9 0xf8. */
15369FNIEMOP_DEF(iemOp_fprem)
15370{
15371 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
15372 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
15373}
15374
15375
15376/** Opcode 0xd9 0xf9. */
15377FNIEMOP_DEF(iemOp_fyl2xp1)
15378{
15379 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
15380 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
15381}
15382
15383
15384/** Opcode 0xd9 0xfa. */
15385FNIEMOP_DEF(iemOp_fsqrt)
15386{
15387 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
15388 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
15389}
15390
15391
15392/** Opcode 0xd9 0xfb. */
15393FNIEMOP_DEF(iemOp_fsincos)
15394{
15395 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
15396 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
15397}
15398
15399
15400/** Opcode 0xd9 0xfc. */
15401FNIEMOP_DEF(iemOp_frndint)
15402{
15403 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
15404 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
15405}
15406
15407
15408/** Opcode 0xd9 0xfd. */
15409FNIEMOP_DEF(iemOp_fscale)
15410{
15411 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
15412 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
15413}
15414
15415
15416/** Opcode 0xd9 0xfe. */
15417FNIEMOP_DEF(iemOp_fsin)
15418{
15419 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
15420 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
15421}
15422
15423
15424/** Opcode 0xd9 0xff. */
15425FNIEMOP_DEF(iemOp_fcos)
15426{
15427 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
15428 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
15429}
15430
15431
15432/** Used by iemOp_EscF1. */
15433IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
15434{
15435 /* 0xe0 */ iemOp_fchs,
15436 /* 0xe1 */ iemOp_fabs,
15437 /* 0xe2 */ iemOp_Invalid,
15438 /* 0xe3 */ iemOp_Invalid,
15439 /* 0xe4 */ iemOp_ftst,
15440 /* 0xe5 */ iemOp_fxam,
15441 /* 0xe6 */ iemOp_Invalid,
15442 /* 0xe7 */ iemOp_Invalid,
15443 /* 0xe8 */ iemOp_fld1,
15444 /* 0xe9 */ iemOp_fldl2t,
15445 /* 0xea */ iemOp_fldl2e,
15446 /* 0xeb */ iemOp_fldpi,
15447 /* 0xec */ iemOp_fldlg2,
15448 /* 0xed */ iemOp_fldln2,
15449 /* 0xee */ iemOp_fldz,
15450 /* 0xef */ iemOp_Invalid,
15451 /* 0xf0 */ iemOp_f2xm1,
15452 /* 0xf1 */ iemOp_fyl2x,
15453 /* 0xf2 */ iemOp_fptan,
15454 /* 0xf3 */ iemOp_fpatan,
15455 /* 0xf4 */ iemOp_fxtract,
15456 /* 0xf5 */ iemOp_fprem1,
15457 /* 0xf6 */ iemOp_fdecstp,
15458 /* 0xf7 */ iemOp_fincstp,
15459 /* 0xf8 */ iemOp_fprem,
15460 /* 0xf9 */ iemOp_fyl2xp1,
15461 /* 0xfa */ iemOp_fsqrt,
15462 /* 0xfb */ iemOp_fsincos,
15463 /* 0xfc */ iemOp_frndint,
15464 /* 0xfd */ iemOp_fscale,
15465 /* 0xfe */ iemOp_fsin,
15466 /* 0xff */ iemOp_fcos
15467};
15468
15469
15470/** Opcode 0xd9. */
15471FNIEMOP_DEF(iemOp_EscF1)
15472{
15473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15474 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
15475
15476 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15477 {
15478 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15479 {
15480 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
15481 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
15482 case 2:
15483 if (bRm == 0xd0)
15484 return FNIEMOP_CALL(iemOp_fnop);
15485 return IEMOP_RAISE_INVALID_OPCODE();
15486 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
15487 case 4:
15488 case 5:
15489 case 6:
15490 case 7:
15491 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
15492 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
15493 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15494 }
15495 }
15496 else
15497 {
15498 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15499 {
15500 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
15501 case 1: return IEMOP_RAISE_INVALID_OPCODE();
15502 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
15503 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
15504 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
15505 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
15506 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
15507 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
15508 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15509 }
15510 }
15511}
15512
15513
15514/** Opcode 0xda 11/0. */
15515FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
15516{
15517 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
15518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15519
15520 IEM_MC_BEGIN(0, 1);
15521 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15522
15523 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15524 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15525
15526 IEM_MC_PREPARE_FPU_USAGE();
15527 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15528 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
15529 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15530 IEM_MC_ENDIF();
15531 IEM_MC_UPDATE_FPU_OPCODE_IP();
15532 IEM_MC_ELSE()
15533 IEM_MC_FPU_STACK_UNDERFLOW(0);
15534 IEM_MC_ENDIF();
15535 IEM_MC_ADVANCE_RIP();
15536
15537 IEM_MC_END();
15538 return VINF_SUCCESS;
15539}
15540
15541
15542/** Opcode 0xda 11/1. */
15543FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
15544{
15545 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
15546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15547
15548 IEM_MC_BEGIN(0, 1);
15549 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15550
15551 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15552 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15553
15554 IEM_MC_PREPARE_FPU_USAGE();
15555 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15556 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
15557 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15558 IEM_MC_ENDIF();
15559 IEM_MC_UPDATE_FPU_OPCODE_IP();
15560 IEM_MC_ELSE()
15561 IEM_MC_FPU_STACK_UNDERFLOW(0);
15562 IEM_MC_ENDIF();
15563 IEM_MC_ADVANCE_RIP();
15564
15565 IEM_MC_END();
15566 return VINF_SUCCESS;
15567}
15568
15569
15570/** Opcode 0xda 11/2. */
15571FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
15572{
15573 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
15574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15575
15576 IEM_MC_BEGIN(0, 1);
15577 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15578
15579 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15580 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15581
15582 IEM_MC_PREPARE_FPU_USAGE();
15583 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15584 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15585 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15586 IEM_MC_ENDIF();
15587 IEM_MC_UPDATE_FPU_OPCODE_IP();
15588 IEM_MC_ELSE()
15589 IEM_MC_FPU_STACK_UNDERFLOW(0);
15590 IEM_MC_ENDIF();
15591 IEM_MC_ADVANCE_RIP();
15592
15593 IEM_MC_END();
15594 return VINF_SUCCESS;
15595}
15596
15597
15598/** Opcode 0xda 11/3. */
15599FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
15600{
15601 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
15602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15603
15604 IEM_MC_BEGIN(0, 1);
15605 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15606
15607 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15608 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15609
15610 IEM_MC_PREPARE_FPU_USAGE();
15611 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15612 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
15613 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15614 IEM_MC_ENDIF();
15615 IEM_MC_UPDATE_FPU_OPCODE_IP();
15616 IEM_MC_ELSE()
15617 IEM_MC_FPU_STACK_UNDERFLOW(0);
15618 IEM_MC_ENDIF();
15619 IEM_MC_ADVANCE_RIP();
15620
15621 IEM_MC_END();
15622 return VINF_SUCCESS;
15623}
15624
15625
15626/**
15627 * Common worker for FPU instructions working on ST0 and STn, only affecting
15628 * flags, and popping twice when done.
15629 *
15630 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15631 */
15632FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
15633{
15634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15635
15636 IEM_MC_BEGIN(3, 1);
15637 IEM_MC_LOCAL(uint16_t, u16Fsw);
15638 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15639 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15640 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15641
15642 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15643 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15644
15645 IEM_MC_PREPARE_FPU_USAGE();
15646 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
15647 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
15648 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
15649 IEM_MC_ELSE()
15650 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
15651 IEM_MC_ENDIF();
15652 IEM_MC_ADVANCE_RIP();
15653
15654 IEM_MC_END();
15655 return VINF_SUCCESS;
15656}
15657
15658
15659/** Opcode 0xda 0xe9. */
15660FNIEMOP_DEF(iemOp_fucompp)
15661{
15662 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
15663 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
15664}
15665
15666
15667/**
15668 * Common worker for FPU instructions working on ST0 and an m32i, and storing
15669 * the result in ST0.
15670 *
15671 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15672 */
15673FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
15674{
15675 IEM_MC_BEGIN(3, 3);
15676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15677 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15678 IEM_MC_LOCAL(int32_t, i32Val2);
15679 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15680 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15681 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15682
15683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15685
15686 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15687 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15688 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15689
15690 IEM_MC_PREPARE_FPU_USAGE();
15691 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15692 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
15693 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15694 IEM_MC_ELSE()
15695 IEM_MC_FPU_STACK_UNDERFLOW(0);
15696 IEM_MC_ENDIF();
15697 IEM_MC_ADVANCE_RIP();
15698
15699 IEM_MC_END();
15700 return VINF_SUCCESS;
15701}
15702
15703
15704/** Opcode 0xda !11/0. */
15705FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
15706{
15707 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
15708 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
15709}
15710
15711
15712/** Opcode 0xda !11/1. */
15713FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
15714{
15715 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
15716 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
15717}
15718
15719
15720/** Opcode 0xda !11/2. */
15721FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
15722{
15723 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
15724
15725 IEM_MC_BEGIN(3, 3);
15726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15727 IEM_MC_LOCAL(uint16_t, u16Fsw);
15728 IEM_MC_LOCAL(int32_t, i32Val2);
15729 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15730 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15731 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15732
15733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15735
15736 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15737 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15738 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15739
15740 IEM_MC_PREPARE_FPU_USAGE();
15741 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15742 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15743 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15744 IEM_MC_ELSE()
15745 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15746 IEM_MC_ENDIF();
15747 IEM_MC_ADVANCE_RIP();
15748
15749 IEM_MC_END();
15750 return VINF_SUCCESS;
15751}
15752
15753
15754/** Opcode 0xda !11/3. */
15755FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
15756{
15757 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
15758
15759 IEM_MC_BEGIN(3, 3);
15760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15761 IEM_MC_LOCAL(uint16_t, u16Fsw);
15762 IEM_MC_LOCAL(int32_t, i32Val2);
15763 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15764 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15765 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15766
15767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15769
15770 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15771 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15772 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15773
15774 IEM_MC_PREPARE_FPU_USAGE();
15775 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15776 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15777 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15778 IEM_MC_ELSE()
15779 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15780 IEM_MC_ENDIF();
15781 IEM_MC_ADVANCE_RIP();
15782
15783 IEM_MC_END();
15784 return VINF_SUCCESS;
15785}
15786
15787
15788/** Opcode 0xda !11/4. */
15789FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
15790{
15791 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
15792 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
15793}
15794
15795
15796/** Opcode 0xda !11/5. */
15797FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
15798{
15799 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
15800 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
15801}
15802
15803
15804/** Opcode 0xda !11/6. */
15805FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
15806{
15807 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
15808 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
15809}
15810
15811
15812/** Opcode 0xda !11/7. */
15813FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
15814{
15815 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
15816 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
15817}
15818
15819
15820/** Opcode 0xda. */
15821FNIEMOP_DEF(iemOp_EscF2)
15822{
15823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15824 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
15825 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15826 {
15827 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15828 {
15829 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
15830 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
15831 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
15832 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
15833 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15834 case 5:
15835 if (bRm == 0xe9)
15836 return FNIEMOP_CALL(iemOp_fucompp);
15837 return IEMOP_RAISE_INVALID_OPCODE();
15838 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15839 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15840 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15841 }
15842 }
15843 else
15844 {
15845 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15846 {
15847 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
15848 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
15849 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
15850 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
15851 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
15852 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
15853 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
15854 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
15855 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15856 }
15857 }
15858}
15859
15860
15861/** Opcode 0xdb !11/0. */
15862FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
15863{
15864 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
15865
15866 IEM_MC_BEGIN(2, 3);
15867 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15868 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15869 IEM_MC_LOCAL(int32_t, i32Val);
15870 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15871 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
15872
15873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15875
15876 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15877 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15878 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15879
15880 IEM_MC_PREPARE_FPU_USAGE();
15881 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15882 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
15883 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15884 IEM_MC_ELSE()
15885 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15886 IEM_MC_ENDIF();
15887 IEM_MC_ADVANCE_RIP();
15888
15889 IEM_MC_END();
15890 return VINF_SUCCESS;
15891}
15892
15893
15894/** Opcode 0xdb !11/1. */
15895FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
15896{
15897 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
15898 IEM_MC_BEGIN(3, 2);
15899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15900 IEM_MC_LOCAL(uint16_t, u16Fsw);
15901 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15902 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15903 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15904
15905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15907 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15908 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15909
15910 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15911 IEM_MC_PREPARE_FPU_USAGE();
15912 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15913 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15914 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15915 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15916 IEM_MC_ELSE()
15917 IEM_MC_IF_FCW_IM()
15918 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15919 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15920 IEM_MC_ENDIF();
15921 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15922 IEM_MC_ENDIF();
15923 IEM_MC_ADVANCE_RIP();
15924
15925 IEM_MC_END();
15926 return VINF_SUCCESS;
15927}
15928
15929
15930/** Opcode 0xdb !11/2. */
15931FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
15932{
15933 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
15934 IEM_MC_BEGIN(3, 2);
15935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15936 IEM_MC_LOCAL(uint16_t, u16Fsw);
15937 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15938 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15939 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15940
15941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15943 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15944 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15945
15946 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15947 IEM_MC_PREPARE_FPU_USAGE();
15948 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15949 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15950 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15951 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15952 IEM_MC_ELSE()
15953 IEM_MC_IF_FCW_IM()
15954 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15955 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15956 IEM_MC_ENDIF();
15957 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15958 IEM_MC_ENDIF();
15959 IEM_MC_ADVANCE_RIP();
15960
15961 IEM_MC_END();
15962 return VINF_SUCCESS;
15963}
15964
15965
15966/** Opcode 0xdb !11/3. */
15967FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
15968{
15969 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
15970 IEM_MC_BEGIN(3, 2);
15971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15972 IEM_MC_LOCAL(uint16_t, u16Fsw);
15973 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15974 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15975 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15976
15977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15979 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15980 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15981
15982 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15983 IEM_MC_PREPARE_FPU_USAGE();
15984 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15985 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15986 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15987 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15988 IEM_MC_ELSE()
15989 IEM_MC_IF_FCW_IM()
15990 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15991 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15992 IEM_MC_ENDIF();
15993 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15994 IEM_MC_ENDIF();
15995 IEM_MC_ADVANCE_RIP();
15996
15997 IEM_MC_END();
15998 return VINF_SUCCESS;
15999}
16000
16001
16002/** Opcode 0xdb !11/5. */
16003FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
16004{
16005 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
16006
16007 IEM_MC_BEGIN(2, 3);
16008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16009 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16010 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
16011 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16012 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
16013
16014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16016
16017 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16018 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16019 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16020
16021 IEM_MC_PREPARE_FPU_USAGE();
16022 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16023 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
16024 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16025 IEM_MC_ELSE()
16026 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16027 IEM_MC_ENDIF();
16028 IEM_MC_ADVANCE_RIP();
16029
16030 IEM_MC_END();
16031 return VINF_SUCCESS;
16032}
16033
16034
16035/** Opcode 0xdb !11/7. */
16036FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
16037{
16038 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
16039 IEM_MC_BEGIN(3, 2);
16040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16041 IEM_MC_LOCAL(uint16_t, u16Fsw);
16042 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16043 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
16044 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16045
16046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16048 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16049 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16050
16051 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16052 IEM_MC_PREPARE_FPU_USAGE();
16053 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16054 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
16055 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
16056 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16057 IEM_MC_ELSE()
16058 IEM_MC_IF_FCW_IM()
16059 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
16060 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
16061 IEM_MC_ENDIF();
16062 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16063 IEM_MC_ENDIF();
16064 IEM_MC_ADVANCE_RIP();
16065
16066 IEM_MC_END();
16067 return VINF_SUCCESS;
16068}
16069
16070
16071/** Opcode 0xdb 11/0. */
16072FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
16073{
16074 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
16075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16076
16077 IEM_MC_BEGIN(0, 1);
16078 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16079
16080 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16081 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16082
16083 IEM_MC_PREPARE_FPU_USAGE();
16084 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16085 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
16086 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16087 IEM_MC_ENDIF();
16088 IEM_MC_UPDATE_FPU_OPCODE_IP();
16089 IEM_MC_ELSE()
16090 IEM_MC_FPU_STACK_UNDERFLOW(0);
16091 IEM_MC_ENDIF();
16092 IEM_MC_ADVANCE_RIP();
16093
16094 IEM_MC_END();
16095 return VINF_SUCCESS;
16096}
16097
16098
16099/** Opcode 0xdb 11/1. */
16100FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
16101{
16102 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
16103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16104
16105 IEM_MC_BEGIN(0, 1);
16106 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16107
16108 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16109 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16110
16111 IEM_MC_PREPARE_FPU_USAGE();
16112 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16113 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
16114 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16115 IEM_MC_ENDIF();
16116 IEM_MC_UPDATE_FPU_OPCODE_IP();
16117 IEM_MC_ELSE()
16118 IEM_MC_FPU_STACK_UNDERFLOW(0);
16119 IEM_MC_ENDIF();
16120 IEM_MC_ADVANCE_RIP();
16121
16122 IEM_MC_END();
16123 return VINF_SUCCESS;
16124}
16125
16126
16127/** Opcode 0xdb 11/2. */
16128FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
16129{
16130 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
16131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16132
16133 IEM_MC_BEGIN(0, 1);
16134 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16135
16136 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16137 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16138
16139 IEM_MC_PREPARE_FPU_USAGE();
16140 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16141 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
16142 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16143 IEM_MC_ENDIF();
16144 IEM_MC_UPDATE_FPU_OPCODE_IP();
16145 IEM_MC_ELSE()
16146 IEM_MC_FPU_STACK_UNDERFLOW(0);
16147 IEM_MC_ENDIF();
16148 IEM_MC_ADVANCE_RIP();
16149
16150 IEM_MC_END();
16151 return VINF_SUCCESS;
16152}
16153
16154
16155/** Opcode 0xdb 11/3. */
16156FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
16157{
16158 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
16159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16160
16161 IEM_MC_BEGIN(0, 1);
16162 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16163
16164 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16165 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16166
16167 IEM_MC_PREPARE_FPU_USAGE();
16168 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16169 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
16170 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16171 IEM_MC_ENDIF();
16172 IEM_MC_UPDATE_FPU_OPCODE_IP();
16173 IEM_MC_ELSE()
16174 IEM_MC_FPU_STACK_UNDERFLOW(0);
16175 IEM_MC_ENDIF();
16176 IEM_MC_ADVANCE_RIP();
16177
16178 IEM_MC_END();
16179 return VINF_SUCCESS;
16180}
16181
16182
16183/** Opcode 0xdb 0xe0. */
16184FNIEMOP_DEF(iemOp_fneni)
16185{
16186 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
16187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16188 IEM_MC_BEGIN(0,0);
16189 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16190 IEM_MC_ADVANCE_RIP();
16191 IEM_MC_END();
16192 return VINF_SUCCESS;
16193}
16194
16195
16196/** Opcode 0xdb 0xe1. */
16197FNIEMOP_DEF(iemOp_fndisi)
16198{
16199 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
16200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16201 IEM_MC_BEGIN(0,0);
16202 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16203 IEM_MC_ADVANCE_RIP();
16204 IEM_MC_END();
16205 return VINF_SUCCESS;
16206}
16207
16208
16209/** Opcode 0xdb 0xe2. */
16210FNIEMOP_DEF(iemOp_fnclex)
16211{
16212 IEMOP_MNEMONIC(fnclex, "fnclex");
16213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16214
16215 IEM_MC_BEGIN(0,0);
16216 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16217 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16218 IEM_MC_CLEAR_FSW_EX();
16219 IEM_MC_ADVANCE_RIP();
16220 IEM_MC_END();
16221 return VINF_SUCCESS;
16222}
16223
16224
16225/** Opcode 0xdb 0xe3. */
16226FNIEMOP_DEF(iemOp_fninit)
16227{
16228 IEMOP_MNEMONIC(fninit, "fninit");
16229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16230 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
16231}
16232
16233
16234/** Opcode 0xdb 0xe4. */
16235FNIEMOP_DEF(iemOp_fnsetpm)
16236{
16237 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
16238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16239 IEM_MC_BEGIN(0,0);
16240 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16241 IEM_MC_ADVANCE_RIP();
16242 IEM_MC_END();
16243 return VINF_SUCCESS;
16244}
16245
16246
16247/** Opcode 0xdb 0xe5. */
16248FNIEMOP_DEF(iemOp_frstpm)
16249{
16250 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
16251#if 0 /* #UDs on newer CPUs */
16252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16253 IEM_MC_BEGIN(0,0);
16254 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16255 IEM_MC_ADVANCE_RIP();
16256 IEM_MC_END();
16257 return VINF_SUCCESS;
16258#else
16259 return IEMOP_RAISE_INVALID_OPCODE();
16260#endif
16261}
16262
16263
16264/** Opcode 0xdb 11/5. */
16265FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
16266{
16267 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
16268 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
16269}
16270
16271
16272/** Opcode 0xdb 11/6. */
16273FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
16274{
16275 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
16276 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
16277}
16278
16279
16280/** Opcode 0xdb. */
16281FNIEMOP_DEF(iemOp_EscF3)
16282{
16283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16284 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
16285 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16286 {
16287 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16288 {
16289 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
16290 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
16291 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
16292 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
16293 case 4:
16294 switch (bRm)
16295 {
16296 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
16297 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
16298 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
16299 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
16300 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
16301 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
16302 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
16303 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
16304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16305 }
16306 break;
16307 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
16308 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
16309 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16311 }
16312 }
16313 else
16314 {
16315 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16316 {
16317 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
16318 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
16319 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
16320 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
16321 case 4: return IEMOP_RAISE_INVALID_OPCODE();
16322 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
16323 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16324 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
16325 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16326 }
16327 }
16328}
16329
16330
16331/**
16332 * Common worker for FPU instructions working on STn and ST0, and storing the
16333 * result in STn unless IE, DE or ZE was raised.
16334 *
16335 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16336 */
16337FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
16338{
16339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16340
16341 IEM_MC_BEGIN(3, 1);
16342 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16343 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16344 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16345 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
16346
16347 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16348 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16349
16350 IEM_MC_PREPARE_FPU_USAGE();
16351 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
16352 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
16353 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16354 IEM_MC_ELSE()
16355 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16356 IEM_MC_ENDIF();
16357 IEM_MC_ADVANCE_RIP();
16358
16359 IEM_MC_END();
16360 return VINF_SUCCESS;
16361}
16362
16363
16364/** Opcode 0xdc 11/0. */
16365FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
16366{
16367 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
16368 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
16369}
16370
16371
16372/** Opcode 0xdc 11/1. */
16373FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
16374{
16375 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
16376 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
16377}
16378
16379
16380/** Opcode 0xdc 11/4. */
16381FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
16382{
16383 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
16384 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
16385}
16386
16387
16388/** Opcode 0xdc 11/5. */
16389FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
16390{
16391 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
16392 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
16393}
16394
16395
16396/** Opcode 0xdc 11/6. */
16397FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
16398{
16399 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
16400 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
16401}
16402
16403
16404/** Opcode 0xdc 11/7. */
16405FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
16406{
16407 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
16408 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
16409}
16410
16411
16412/**
16413 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
16414 * memory operand, and storing the result in ST0.
16415 *
16416 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16417 */
16418FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
16419{
16420 IEM_MC_BEGIN(3, 3);
16421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16422 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16423 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
16424 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16425 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
16426 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
16427
16428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16430 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16431 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16432
16433 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16434 IEM_MC_PREPARE_FPU_USAGE();
16435 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
16436 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
16437 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16438 IEM_MC_ELSE()
16439 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16440 IEM_MC_ENDIF();
16441 IEM_MC_ADVANCE_RIP();
16442
16443 IEM_MC_END();
16444 return VINF_SUCCESS;
16445}
16446
16447
16448/** Opcode 0xdc !11/0. */
16449FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
16450{
16451 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
16452 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
16453}
16454
16455
16456/** Opcode 0xdc !11/1. */
16457FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
16458{
16459 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
16460 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
16461}
16462
16463
16464/** Opcode 0xdc !11/2. */
16465FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
16466{
16467 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
16468
16469 IEM_MC_BEGIN(3, 3);
16470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16471 IEM_MC_LOCAL(uint16_t, u16Fsw);
16472 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16473 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16474 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16475 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16476
16477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16479
16480 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16481 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16482 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16483
16484 IEM_MC_PREPARE_FPU_USAGE();
16485 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16486 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16487 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16488 IEM_MC_ELSE()
16489 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16490 IEM_MC_ENDIF();
16491 IEM_MC_ADVANCE_RIP();
16492
16493 IEM_MC_END();
16494 return VINF_SUCCESS;
16495}
16496
16497
16498/** Opcode 0xdc !11/3. */
16499FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
16500{
16501 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
16502
16503 IEM_MC_BEGIN(3, 3);
16504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16505 IEM_MC_LOCAL(uint16_t, u16Fsw);
16506 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16507 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16508 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16509 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16510
16511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16513
16514 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16515 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16516 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16517
16518 IEM_MC_PREPARE_FPU_USAGE();
16519 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16520 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16521 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16522 IEM_MC_ELSE()
16523 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16524 IEM_MC_ENDIF();
16525 IEM_MC_ADVANCE_RIP();
16526
16527 IEM_MC_END();
16528 return VINF_SUCCESS;
16529}
16530
16531
16532/** Opcode 0xdc !11/4. */
16533FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
16534{
16535 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
16536 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
16537}
16538
16539
16540/** Opcode 0xdc !11/5. */
16541FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
16542{
16543 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
16544 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
16545}
16546
16547
16548/** Opcode 0xdc !11/6. */
16549FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
16550{
16551 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
16552 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
16553}
16554
16555
16556/** Opcode 0xdc !11/7. */
16557FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
16558{
16559 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
16560 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
16561}
16562
16563
16564/** Opcode 0xdc. */
16565FNIEMOP_DEF(iemOp_EscF4)
16566{
16567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16568 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
16569 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16570 {
16571 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16572 {
16573 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
16574 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
16575 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
16576 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
16577 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
16578 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
16579 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
16580 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
16581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16582 }
16583 }
16584 else
16585 {
16586 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16587 {
16588 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
16589 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
16590 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
16591 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
16592 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
16593 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
16594 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
16595 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
16596 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16597 }
16598 }
16599}
16600
16601
16602/** Opcode 0xdd !11/0.
16603 * @sa iemOp_fld_m32r */
16604FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
16605{
16606 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
16607
16608 IEM_MC_BEGIN(2, 3);
16609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16610 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16611 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
16612 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16613 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
16614
16615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16617 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16618 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16619
16620 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16621 IEM_MC_PREPARE_FPU_USAGE();
16622 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16623 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
16624 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16625 IEM_MC_ELSE()
16626 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16627 IEM_MC_ENDIF();
16628 IEM_MC_ADVANCE_RIP();
16629
16630 IEM_MC_END();
16631 return VINF_SUCCESS;
16632}
16633
16634
16635/** Opcode 0xdd !11/0. */
16636FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
16637{
16638 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
16639 IEM_MC_BEGIN(3, 2);
16640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16641 IEM_MC_LOCAL(uint16_t, u16Fsw);
16642 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16643 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16644 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16645
16646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16648 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16649 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16650
16651 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16652 IEM_MC_PREPARE_FPU_USAGE();
16653 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16654 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16655 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16656 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16657 IEM_MC_ELSE()
16658 IEM_MC_IF_FCW_IM()
16659 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16660 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16661 IEM_MC_ENDIF();
16662 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16663 IEM_MC_ENDIF();
16664 IEM_MC_ADVANCE_RIP();
16665
16666 IEM_MC_END();
16667 return VINF_SUCCESS;
16668}
16669
16670
16671/** Opcode 0xdd !11/0. */
16672FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
16673{
16674 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
16675 IEM_MC_BEGIN(3, 2);
16676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16677 IEM_MC_LOCAL(uint16_t, u16Fsw);
16678 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16679 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16680 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16681
16682 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16684 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16685 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16686
16687 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16688 IEM_MC_PREPARE_FPU_USAGE();
16689 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16690 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16691 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16692 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16693 IEM_MC_ELSE()
16694 IEM_MC_IF_FCW_IM()
16695 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16696 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16697 IEM_MC_ENDIF();
16698 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16699 IEM_MC_ENDIF();
16700 IEM_MC_ADVANCE_RIP();
16701
16702 IEM_MC_END();
16703 return VINF_SUCCESS;
16704}
16705
16706
16707
16708
16709/** Opcode 0xdd !11/0. */
16710FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
16711{
16712 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
16713 IEM_MC_BEGIN(3, 2);
16714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16715 IEM_MC_LOCAL(uint16_t, u16Fsw);
16716 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16717 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16718 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16719
16720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16722 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16723 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16724
16725 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16726 IEM_MC_PREPARE_FPU_USAGE();
16727 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16728 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16729 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16730 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16731 IEM_MC_ELSE()
16732 IEM_MC_IF_FCW_IM()
16733 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16734 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16735 IEM_MC_ENDIF();
16736 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16737 IEM_MC_ENDIF();
16738 IEM_MC_ADVANCE_RIP();
16739
16740 IEM_MC_END();
16741 return VINF_SUCCESS;
16742}
16743
16744
16745/** Opcode 0xdd !11/0. */
16746FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
16747{
16748 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
16749 IEM_MC_BEGIN(3, 0);
16750 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16751 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16752 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
16753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16755 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16756 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16757 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16758 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
16759 IEM_MC_END();
16760 return VINF_SUCCESS;
16761}
16762
16763
16764/** Opcode 0xdd !11/0. */
16765FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
16766{
16767 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
16768 IEM_MC_BEGIN(3, 0);
16769 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16770 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16771 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
16772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16774 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16775 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16776 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16777 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
16778 IEM_MC_END();
16779 return VINF_SUCCESS;
16780
16781}
16782
16783/** Opcode 0xdd !11/0. */
16784FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
16785{
16786 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
16787
16788 IEM_MC_BEGIN(0, 2);
16789 IEM_MC_LOCAL(uint16_t, u16Tmp);
16790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16791
16792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16794 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16795
16796 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16797 IEM_MC_FETCH_FSW(u16Tmp);
16798 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
16799 IEM_MC_ADVANCE_RIP();
16800
16801/** @todo Debug / drop a hint to the verifier that things may differ
16802 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
16803 * NT4SP1. (X86_FSW_PE) */
16804 IEM_MC_END();
16805 return VINF_SUCCESS;
16806}
16807
16808
16809/** Opcode 0xdd 11/0. */
16810FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
16811{
16812 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
16813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16814 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
16815 unmodified. */
16816
16817 IEM_MC_BEGIN(0, 0);
16818
16819 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16820 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16821
16822 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16823 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16824 IEM_MC_UPDATE_FPU_OPCODE_IP();
16825
16826 IEM_MC_ADVANCE_RIP();
16827 IEM_MC_END();
16828 return VINF_SUCCESS;
16829}
16830
16831
16832/** Opcode 0xdd 11/1. */
16833FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
16834{
16835 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
16836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16837
16838 IEM_MC_BEGIN(0, 2);
16839 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
16840 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16841 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16842 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16843
16844 IEM_MC_PREPARE_FPU_USAGE();
16845 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16846 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
16847 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16848 IEM_MC_ELSE()
16849 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16850 IEM_MC_ENDIF();
16851
16852 IEM_MC_ADVANCE_RIP();
16853 IEM_MC_END();
16854 return VINF_SUCCESS;
16855}
16856
16857
16858/** Opcode 0xdd 11/3. */
16859FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
16860{
16861 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
16862 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
16863}
16864
16865
16866/** Opcode 0xdd 11/4. */
16867FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
16868{
16869 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
16870 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
16871}
16872
16873
16874/** Opcode 0xdd. */
16875FNIEMOP_DEF(iemOp_EscF5)
16876{
16877 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16878 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
16879 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16880 {
16881 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16882 {
16883 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
16884 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
16885 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
16886 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
16887 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
16888 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
16889 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16890 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16891 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16892 }
16893 }
16894 else
16895 {
16896 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16897 {
16898 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
16899 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
16900 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
16901 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
16902 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
16903 case 5: return IEMOP_RAISE_INVALID_OPCODE();
16904 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
16905 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
16906 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16907 }
16908 }
16909}
16910
16911
16912/** Opcode 0xde 11/0. */
16913FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
16914{
16915 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
16916 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
16917}
16918
16919
16920/** Opcode 0xde 11/0. */
16921FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
16922{
16923 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
16924 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
16925}
16926
16927
16928/** Opcode 0xde 0xd9. */
16929FNIEMOP_DEF(iemOp_fcompp)
16930{
16931 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
16932 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
16933}
16934
16935
16936/** Opcode 0xde 11/4. */
16937FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
16938{
16939 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
16940 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
16941}
16942
16943
16944/** Opcode 0xde 11/5. */
16945FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
16946{
16947 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
16948 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
16949}
16950
16951
16952/** Opcode 0xde 11/6. */
16953FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
16954{
16955 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
16956 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
16957}
16958
16959
16960/** Opcode 0xde 11/7. */
16961FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
16962{
16963 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
16964 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
16965}
16966
16967
16968/**
16969 * Common worker for FPU instructions working on ST0 and an m16i, and storing
16970 * the result in ST0.
16971 *
16972 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16973 */
16974FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
16975{
16976 IEM_MC_BEGIN(3, 3);
16977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16978 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16979 IEM_MC_LOCAL(int16_t, i16Val2);
16980 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16981 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16982 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16983
16984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16986
16987 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16988 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16989 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16990
16991 IEM_MC_PREPARE_FPU_USAGE();
16992 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16993 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
16994 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
16995 IEM_MC_ELSE()
16996 IEM_MC_FPU_STACK_UNDERFLOW(0);
16997 IEM_MC_ENDIF();
16998 IEM_MC_ADVANCE_RIP();
16999
17000 IEM_MC_END();
17001 return VINF_SUCCESS;
17002}
17003
17004
17005/** Opcode 0xde !11/0. */
17006FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
17007{
17008 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
17009 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
17010}
17011
17012
17013/** Opcode 0xde !11/1. */
17014FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
17015{
17016 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
17017 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
17018}
17019
17020
17021/** Opcode 0xde !11/2. */
17022FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
17023{
17024 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
17025
17026 IEM_MC_BEGIN(3, 3);
17027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17028 IEM_MC_LOCAL(uint16_t, u16Fsw);
17029 IEM_MC_LOCAL(int16_t, i16Val2);
17030 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17031 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
17032 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
17033
17034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17036
17037 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17038 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17039 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17040
17041 IEM_MC_PREPARE_FPU_USAGE();
17042 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
17043 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
17044 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17045 IEM_MC_ELSE()
17046 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17047 IEM_MC_ENDIF();
17048 IEM_MC_ADVANCE_RIP();
17049
17050 IEM_MC_END();
17051 return VINF_SUCCESS;
17052}
17053
17054
17055/** Opcode 0xde !11/3. */
17056FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
17057{
17058 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
17059
17060 IEM_MC_BEGIN(3, 3);
17061 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17062 IEM_MC_LOCAL(uint16_t, u16Fsw);
17063 IEM_MC_LOCAL(int16_t, i16Val2);
17064 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17065 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
17066 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
17067
17068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17070
17071 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17072 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17073 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17074
17075 IEM_MC_PREPARE_FPU_USAGE();
17076 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
17077 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
17078 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17079 IEM_MC_ELSE()
17080 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17081 IEM_MC_ENDIF();
17082 IEM_MC_ADVANCE_RIP();
17083
17084 IEM_MC_END();
17085 return VINF_SUCCESS;
17086}
17087
17088
17089/** Opcode 0xde !11/4. */
17090FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
17091{
17092 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
17093 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
17094}
17095
17096
17097/** Opcode 0xde !11/5. */
17098FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
17099{
17100 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
17101 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
17102}
17103
17104
17105/** Opcode 0xde !11/6. */
17106FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
17107{
17108 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
17109 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
17110}
17111
17112
17113/** Opcode 0xde !11/7. */
17114FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
17115{
17116 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
17117 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
17118}
17119
17120
17121/** Opcode 0xde. */
17122FNIEMOP_DEF(iemOp_EscF6)
17123{
17124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17125 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
17126 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17127 {
17128 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17129 {
17130 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
17131 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
17132 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
17133 case 3: if (bRm == 0xd9)
17134 return FNIEMOP_CALL(iemOp_fcompp);
17135 return IEMOP_RAISE_INVALID_OPCODE();
17136 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
17137 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
17138 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
17139 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
17140 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17141 }
17142 }
17143 else
17144 {
17145 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17146 {
17147 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
17148 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
17149 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
17150 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
17151 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
17152 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
17153 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
17154 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
17155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17156 }
17157 }
17158}
17159
17160
17161/** Opcode 0xdf 11/0.
17162 * Undocument instruction, assumed to work like ffree + fincstp. */
17163FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
17164{
17165 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
17166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17167
17168 IEM_MC_BEGIN(0, 0);
17169
17170 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17171 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17172
17173 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
17174 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
17175 IEM_MC_FPU_STACK_INC_TOP();
17176 IEM_MC_UPDATE_FPU_OPCODE_IP();
17177
17178 IEM_MC_ADVANCE_RIP();
17179 IEM_MC_END();
17180 return VINF_SUCCESS;
17181}
17182
17183
17184/** Opcode 0xdf 0xe0. */
17185FNIEMOP_DEF(iemOp_fnstsw_ax)
17186{
17187 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
17188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17189
17190 IEM_MC_BEGIN(0, 1);
17191 IEM_MC_LOCAL(uint16_t, u16Tmp);
17192 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17193 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
17194 IEM_MC_FETCH_FSW(u16Tmp);
17195 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
17196 IEM_MC_ADVANCE_RIP();
17197 IEM_MC_END();
17198 return VINF_SUCCESS;
17199}
17200
17201
17202/** Opcode 0xdf 11/5. */
17203FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
17204{
17205 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
17206 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17207}
17208
17209
17210/** Opcode 0xdf 11/6. */
17211FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
17212{
17213 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
17214 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17215}
17216
17217
17218/** Opcode 0xdf !11/0. */
17219FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
17220{
17221 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
17222
17223 IEM_MC_BEGIN(2, 3);
17224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17225 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17226 IEM_MC_LOCAL(int16_t, i16Val);
17227 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17228 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
17229
17230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17232
17233 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17234 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17235 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17236
17237 IEM_MC_PREPARE_FPU_USAGE();
17238 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17239 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
17240 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17241 IEM_MC_ELSE()
17242 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17243 IEM_MC_ENDIF();
17244 IEM_MC_ADVANCE_RIP();
17245
17246 IEM_MC_END();
17247 return VINF_SUCCESS;
17248}
17249
17250
17251/** Opcode 0xdf !11/1. */
17252FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
17253{
17254 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
17255 IEM_MC_BEGIN(3, 2);
17256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17257 IEM_MC_LOCAL(uint16_t, u16Fsw);
17258 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17259 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17260 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17261
17262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17264 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17265 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17266
17267 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17268 IEM_MC_PREPARE_FPU_USAGE();
17269 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17270 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17271 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17272 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17273 IEM_MC_ELSE()
17274 IEM_MC_IF_FCW_IM()
17275 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17276 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17277 IEM_MC_ENDIF();
17278 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17279 IEM_MC_ENDIF();
17280 IEM_MC_ADVANCE_RIP();
17281
17282 IEM_MC_END();
17283 return VINF_SUCCESS;
17284}
17285
17286
17287/** Opcode 0xdf !11/2. */
17288FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
17289{
17290 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
17291 IEM_MC_BEGIN(3, 2);
17292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17293 IEM_MC_LOCAL(uint16_t, u16Fsw);
17294 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17295 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17296 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17297
17298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17300 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17301 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17302
17303 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17304 IEM_MC_PREPARE_FPU_USAGE();
17305 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17306 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17307 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17308 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17309 IEM_MC_ELSE()
17310 IEM_MC_IF_FCW_IM()
17311 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17312 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17313 IEM_MC_ENDIF();
17314 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17315 IEM_MC_ENDIF();
17316 IEM_MC_ADVANCE_RIP();
17317
17318 IEM_MC_END();
17319 return VINF_SUCCESS;
17320}
17321
17322
17323/** Opcode 0xdf !11/3. */
17324FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
17325{
17326 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
17327 IEM_MC_BEGIN(3, 2);
17328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17329 IEM_MC_LOCAL(uint16_t, u16Fsw);
17330 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17331 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17332 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17333
17334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17336 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17337 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17338
17339 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17340 IEM_MC_PREPARE_FPU_USAGE();
17341 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17342 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17343 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17344 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17345 IEM_MC_ELSE()
17346 IEM_MC_IF_FCW_IM()
17347 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17348 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17349 IEM_MC_ENDIF();
17350 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17351 IEM_MC_ENDIF();
17352 IEM_MC_ADVANCE_RIP();
17353
17354 IEM_MC_END();
17355 return VINF_SUCCESS;
17356}
17357
17358
17359/** Opcode 0xdf !11/4. */
17360FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
17361
17362
17363/** Opcode 0xdf !11/5. */
17364FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
17365{
17366 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
17367
17368 IEM_MC_BEGIN(2, 3);
17369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17370 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17371 IEM_MC_LOCAL(int64_t, i64Val);
17372 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17373 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
17374
17375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17377
17378 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17379 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17380 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17381
17382 IEM_MC_PREPARE_FPU_USAGE();
17383 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17384 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
17385 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17386 IEM_MC_ELSE()
17387 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17388 IEM_MC_ENDIF();
17389 IEM_MC_ADVANCE_RIP();
17390
17391 IEM_MC_END();
17392 return VINF_SUCCESS;
17393}
17394
17395
17396/** Opcode 0xdf !11/6. */
17397FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
17398
17399
17400/** Opcode 0xdf !11/7. */
17401FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
17402{
17403 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
17404 IEM_MC_BEGIN(3, 2);
17405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17406 IEM_MC_LOCAL(uint16_t, u16Fsw);
17407 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17408 IEM_MC_ARG(int64_t *, pi64Dst, 1);
17409 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17410
17411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17413 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17414 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17415
17416 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17417 IEM_MC_PREPARE_FPU_USAGE();
17418 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17419 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
17420 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
17421 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17422 IEM_MC_ELSE()
17423 IEM_MC_IF_FCW_IM()
17424 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
17425 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
17426 IEM_MC_ENDIF();
17427 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17428 IEM_MC_ENDIF();
17429 IEM_MC_ADVANCE_RIP();
17430
17431 IEM_MC_END();
17432 return VINF_SUCCESS;
17433}
17434
17435
17436/** Opcode 0xdf. */
17437FNIEMOP_DEF(iemOp_EscF7)
17438{
17439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17440 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17441 {
17442 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17443 {
17444 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
17445 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
17446 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17447 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17448 case 4: if (bRm == 0xe0)
17449 return FNIEMOP_CALL(iemOp_fnstsw_ax);
17450 return IEMOP_RAISE_INVALID_OPCODE();
17451 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
17452 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
17453 case 7: return IEMOP_RAISE_INVALID_OPCODE();
17454 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17455 }
17456 }
17457 else
17458 {
17459 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17460 {
17461 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
17462 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
17463 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
17464 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
17465 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
17466 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
17467 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
17468 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
17469 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17470 }
17471 }
17472}
17473
17474
17475/** Opcode 0xe0. */
17476FNIEMOP_DEF(iemOp_loopne_Jb)
17477{
17478 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
17479 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17481 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17482
17483 switch (pVCpu->iem.s.enmEffAddrMode)
17484 {
17485 case IEMMODE_16BIT:
17486 IEM_MC_BEGIN(0,0);
17487 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17488 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17489 IEM_MC_REL_JMP_S8(i8Imm);
17490 } IEM_MC_ELSE() {
17491 IEM_MC_ADVANCE_RIP();
17492 } IEM_MC_ENDIF();
17493 IEM_MC_END();
17494 return VINF_SUCCESS;
17495
17496 case IEMMODE_32BIT:
17497 IEM_MC_BEGIN(0,0);
17498 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17499 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17500 IEM_MC_REL_JMP_S8(i8Imm);
17501 } IEM_MC_ELSE() {
17502 IEM_MC_ADVANCE_RIP();
17503 } IEM_MC_ENDIF();
17504 IEM_MC_END();
17505 return VINF_SUCCESS;
17506
17507 case IEMMODE_64BIT:
17508 IEM_MC_BEGIN(0,0);
17509 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17510 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17511 IEM_MC_REL_JMP_S8(i8Imm);
17512 } IEM_MC_ELSE() {
17513 IEM_MC_ADVANCE_RIP();
17514 } IEM_MC_ENDIF();
17515 IEM_MC_END();
17516 return VINF_SUCCESS;
17517
17518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17519 }
17520}
17521
17522
17523/** Opcode 0xe1. */
17524FNIEMOP_DEF(iemOp_loope_Jb)
17525{
17526 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
17527 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17529 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17530
17531 switch (pVCpu->iem.s.enmEffAddrMode)
17532 {
17533 case IEMMODE_16BIT:
17534 IEM_MC_BEGIN(0,0);
17535 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17536 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17537 IEM_MC_REL_JMP_S8(i8Imm);
17538 } IEM_MC_ELSE() {
17539 IEM_MC_ADVANCE_RIP();
17540 } IEM_MC_ENDIF();
17541 IEM_MC_END();
17542 return VINF_SUCCESS;
17543
17544 case IEMMODE_32BIT:
17545 IEM_MC_BEGIN(0,0);
17546 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17547 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17548 IEM_MC_REL_JMP_S8(i8Imm);
17549 } IEM_MC_ELSE() {
17550 IEM_MC_ADVANCE_RIP();
17551 } IEM_MC_ENDIF();
17552 IEM_MC_END();
17553 return VINF_SUCCESS;
17554
17555 case IEMMODE_64BIT:
17556 IEM_MC_BEGIN(0,0);
17557 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17558 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17559 IEM_MC_REL_JMP_S8(i8Imm);
17560 } IEM_MC_ELSE() {
17561 IEM_MC_ADVANCE_RIP();
17562 } IEM_MC_ENDIF();
17563 IEM_MC_END();
17564 return VINF_SUCCESS;
17565
17566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17567 }
17568}
17569
17570
17571/** Opcode 0xe2. */
17572FNIEMOP_DEF(iemOp_loop_Jb)
17573{
17574 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
17575 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17577 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17578
17579 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
17580 * using the 32-bit operand size override. How can that be restarted? See
17581 * weird pseudo code in intel manual. */
17582 switch (pVCpu->iem.s.enmEffAddrMode)
17583 {
17584 case IEMMODE_16BIT:
17585 IEM_MC_BEGIN(0,0);
17586 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17587 {
17588 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17589 IEM_MC_IF_CX_IS_NZ() {
17590 IEM_MC_REL_JMP_S8(i8Imm);
17591 } IEM_MC_ELSE() {
17592 IEM_MC_ADVANCE_RIP();
17593 } IEM_MC_ENDIF();
17594 }
17595 else
17596 {
17597 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
17598 IEM_MC_ADVANCE_RIP();
17599 }
17600 IEM_MC_END();
17601 return VINF_SUCCESS;
17602
17603 case IEMMODE_32BIT:
17604 IEM_MC_BEGIN(0,0);
17605 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17606 {
17607 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17608 IEM_MC_IF_ECX_IS_NZ() {
17609 IEM_MC_REL_JMP_S8(i8Imm);
17610 } IEM_MC_ELSE() {
17611 IEM_MC_ADVANCE_RIP();
17612 } IEM_MC_ENDIF();
17613 }
17614 else
17615 {
17616 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
17617 IEM_MC_ADVANCE_RIP();
17618 }
17619 IEM_MC_END();
17620 return VINF_SUCCESS;
17621
17622 case IEMMODE_64BIT:
17623 IEM_MC_BEGIN(0,0);
17624 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17625 {
17626 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17627 IEM_MC_IF_RCX_IS_NZ() {
17628 IEM_MC_REL_JMP_S8(i8Imm);
17629 } IEM_MC_ELSE() {
17630 IEM_MC_ADVANCE_RIP();
17631 } IEM_MC_ENDIF();
17632 }
17633 else
17634 {
17635 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
17636 IEM_MC_ADVANCE_RIP();
17637 }
17638 IEM_MC_END();
17639 return VINF_SUCCESS;
17640
17641 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17642 }
17643}
17644
17645
17646/** Opcode 0xe3. */
17647FNIEMOP_DEF(iemOp_jecxz_Jb)
17648{
17649 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
17650 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17652 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17653
17654 switch (pVCpu->iem.s.enmEffAddrMode)
17655 {
17656 case IEMMODE_16BIT:
17657 IEM_MC_BEGIN(0,0);
17658 IEM_MC_IF_CX_IS_NZ() {
17659 IEM_MC_ADVANCE_RIP();
17660 } IEM_MC_ELSE() {
17661 IEM_MC_REL_JMP_S8(i8Imm);
17662 } IEM_MC_ENDIF();
17663 IEM_MC_END();
17664 return VINF_SUCCESS;
17665
17666 case IEMMODE_32BIT:
17667 IEM_MC_BEGIN(0,0);
17668 IEM_MC_IF_ECX_IS_NZ() {
17669 IEM_MC_ADVANCE_RIP();
17670 } IEM_MC_ELSE() {
17671 IEM_MC_REL_JMP_S8(i8Imm);
17672 } IEM_MC_ENDIF();
17673 IEM_MC_END();
17674 return VINF_SUCCESS;
17675
17676 case IEMMODE_64BIT:
17677 IEM_MC_BEGIN(0,0);
17678 IEM_MC_IF_RCX_IS_NZ() {
17679 IEM_MC_ADVANCE_RIP();
17680 } IEM_MC_ELSE() {
17681 IEM_MC_REL_JMP_S8(i8Imm);
17682 } IEM_MC_ENDIF();
17683 IEM_MC_END();
17684 return VINF_SUCCESS;
17685
17686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17687 }
17688}
17689
17690
17691/** Opcode 0xe4 */
17692FNIEMOP_DEF(iemOp_in_AL_Ib)
17693{
17694 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
17695 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17697 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
17698}
17699
17700
17701/** Opcode 0xe5 */
17702FNIEMOP_DEF(iemOp_in_eAX_Ib)
17703{
17704 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
17705 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17707 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17708}
17709
17710
17711/** Opcode 0xe6 */
17712FNIEMOP_DEF(iemOp_out_Ib_AL)
17713{
17714 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
17715 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17717 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
17718}
17719
17720
17721/** Opcode 0xe7 */
17722FNIEMOP_DEF(iemOp_out_Ib_eAX)
17723{
17724 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
17725 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17727 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17728}
17729
17730
17731/** Opcode 0xe8. */
17732FNIEMOP_DEF(iemOp_call_Jv)
17733{
17734 IEMOP_MNEMONIC(call_Jv, "call Jv");
17735 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17736 switch (pVCpu->iem.s.enmEffOpSize)
17737 {
17738 case IEMMODE_16BIT:
17739 {
17740 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17741 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
17742 }
17743
17744 case IEMMODE_32BIT:
17745 {
17746 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17747 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
17748 }
17749
17750 case IEMMODE_64BIT:
17751 {
17752 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17753 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
17754 }
17755
17756 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17757 }
17758}
17759
17760
17761/** Opcode 0xe9. */
17762FNIEMOP_DEF(iemOp_jmp_Jv)
17763{
17764 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
17765 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17766 switch (pVCpu->iem.s.enmEffOpSize)
17767 {
17768 case IEMMODE_16BIT:
17769 {
17770 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
17771 IEM_MC_BEGIN(0, 0);
17772 IEM_MC_REL_JMP_S16(i16Imm);
17773 IEM_MC_END();
17774 return VINF_SUCCESS;
17775 }
17776
17777 case IEMMODE_64BIT:
17778 case IEMMODE_32BIT:
17779 {
17780 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
17781 IEM_MC_BEGIN(0, 0);
17782 IEM_MC_REL_JMP_S32(i32Imm);
17783 IEM_MC_END();
17784 return VINF_SUCCESS;
17785 }
17786
17787 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17788 }
17789}
17790
17791
17792/** Opcode 0xea. */
17793FNIEMOP_DEF(iemOp_jmp_Ap)
17794{
17795 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
17796 IEMOP_HLP_NO_64BIT();
17797
17798 /* Decode the far pointer address and pass it on to the far call C implementation. */
17799 uint32_t offSeg;
17800 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
17801 IEM_OPCODE_GET_NEXT_U32(&offSeg);
17802 else
17803 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
17804 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
17805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17806 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
17807}
17808
17809
17810/** Opcode 0xeb. */
17811FNIEMOP_DEF(iemOp_jmp_Jb)
17812{
17813 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
17814 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17816 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17817
17818 IEM_MC_BEGIN(0, 0);
17819 IEM_MC_REL_JMP_S8(i8Imm);
17820 IEM_MC_END();
17821 return VINF_SUCCESS;
17822}
17823
17824
17825/** Opcode 0xec */
17826FNIEMOP_DEF(iemOp_in_AL_DX)
17827{
17828 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
17829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17830 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
17831}
17832
17833
17834/** Opcode 0xed */
17835FNIEMOP_DEF(iemOp_eAX_DX)
17836{
17837 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
17838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17839 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17840}
17841
17842
17843/** Opcode 0xee */
17844FNIEMOP_DEF(iemOp_out_DX_AL)
17845{
17846 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
17847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17848 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
17849}
17850
17851
17852/** Opcode 0xef */
17853FNIEMOP_DEF(iemOp_out_DX_eAX)
17854{
17855 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
17856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17857 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17858}
17859
17860
17861/** Opcode 0xf0. */
17862FNIEMOP_DEF(iemOp_lock)
17863{
17864 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
17865 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
17866
17867 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17868 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17869}
17870
17871
17872/** Opcode 0xf1. */
17873FNIEMOP_DEF(iemOp_int_1)
17874{
17875 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
17876 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
17877 /** @todo testcase! */
17878 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
17879}
17880
17881
17882/** Opcode 0xf2. */
17883FNIEMOP_DEF(iemOp_repne)
17884{
17885 /* This overrides any previous REPE prefix. */
17886 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
17887 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
17888 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
17889
17890 /* For the 4 entry opcode tables, REPNZ overrides any previous
17891 REPZ and operand size prefixes. */
17892 pVCpu->iem.s.idxPrefix = 3;
17893
17894 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17895 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17896}
17897
17898
17899/** Opcode 0xf3. */
17900FNIEMOP_DEF(iemOp_repe)
17901{
17902 /* This overrides any previous REPNE prefix. */
17903 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
17904 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
17905 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
17906
17907 /* For the 4 entry opcode tables, REPNZ overrides any previous
17908 REPNZ and operand size prefixes. */
17909 pVCpu->iem.s.idxPrefix = 2;
17910
17911 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17912 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17913}
17914
17915
17916/** Opcode 0xf4. */
17917FNIEMOP_DEF(iemOp_hlt)
17918{
17919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17920 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
17921}
17922
17923
17924/** Opcode 0xf5. */
17925FNIEMOP_DEF(iemOp_cmc)
17926{
17927 IEMOP_MNEMONIC(cmc, "cmc");
17928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17929 IEM_MC_BEGIN(0, 0);
17930 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
17931 IEM_MC_ADVANCE_RIP();
17932 IEM_MC_END();
17933 return VINF_SUCCESS;
17934}
17935
17936
17937/**
17938 * Common implementation of 'inc/dec/not/neg Eb'.
17939 *
17940 * @param bRm The RM byte.
17941 * @param pImpl The instruction implementation.
17942 */
17943FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17944{
17945 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17946 {
17947 /* register access */
17948 IEM_MC_BEGIN(2, 0);
17949 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17950 IEM_MC_ARG(uint32_t *, pEFlags, 1);
17951 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17952 IEM_MC_REF_EFLAGS(pEFlags);
17953 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17954 IEM_MC_ADVANCE_RIP();
17955 IEM_MC_END();
17956 }
17957 else
17958 {
17959 /* memory access. */
17960 IEM_MC_BEGIN(2, 2);
17961 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17962 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17964
17965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17966 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17967 IEM_MC_FETCH_EFLAGS(EFlags);
17968 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17969 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17970 else
17971 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
17972
17973 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
17974 IEM_MC_COMMIT_EFLAGS(EFlags);
17975 IEM_MC_ADVANCE_RIP();
17976 IEM_MC_END();
17977 }
17978 return VINF_SUCCESS;
17979}
17980
17981
17982/**
17983 * Common implementation of 'inc/dec/not/neg Ev'.
17984 *
17985 * @param bRm The RM byte.
17986 * @param pImpl The instruction implementation.
17987 */
17988FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17989{
17990 /* Registers are handled by a common worker. */
17991 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17992 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17993
17994 /* Memory we do here. */
17995 switch (pVCpu->iem.s.enmEffOpSize)
17996 {
17997 case IEMMODE_16BIT:
17998 IEM_MC_BEGIN(2, 2);
17999 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
18000 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
18001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18002
18003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18004 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18005 IEM_MC_FETCH_EFLAGS(EFlags);
18006 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
18007 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
18008 else
18009 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
18010
18011 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
18012 IEM_MC_COMMIT_EFLAGS(EFlags);
18013 IEM_MC_ADVANCE_RIP();
18014 IEM_MC_END();
18015 return VINF_SUCCESS;
18016
18017 case IEMMODE_32BIT:
18018 IEM_MC_BEGIN(2, 2);
18019 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18020 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
18021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18022
18023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18024 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18025 IEM_MC_FETCH_EFLAGS(EFlags);
18026 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
18027 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
18028 else
18029 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
18030
18031 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
18032 IEM_MC_COMMIT_EFLAGS(EFlags);
18033 IEM_MC_ADVANCE_RIP();
18034 IEM_MC_END();
18035 return VINF_SUCCESS;
18036
18037 case IEMMODE_64BIT:
18038 IEM_MC_BEGIN(2, 2);
18039 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18040 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
18041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18042
18043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18044 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18045 IEM_MC_FETCH_EFLAGS(EFlags);
18046 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
18047 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
18048 else
18049 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
18050
18051 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
18052 IEM_MC_COMMIT_EFLAGS(EFlags);
18053 IEM_MC_ADVANCE_RIP();
18054 IEM_MC_END();
18055 return VINF_SUCCESS;
18056
18057 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18058 }
18059}
18060
18061
18062/** Opcode 0xf6 /0. */
18063FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
18064{
18065 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
18066 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
18067
18068 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18069 {
18070 /* register access */
18071 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
18072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18073
18074 IEM_MC_BEGIN(3, 0);
18075 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
18076 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
18077 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18078 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18079 IEM_MC_REF_EFLAGS(pEFlags);
18080 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
18081 IEM_MC_ADVANCE_RIP();
18082 IEM_MC_END();
18083 }
18084 else
18085 {
18086 /* memory access. */
18087 IEM_MC_BEGIN(3, 2);
18088 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
18089 IEM_MC_ARG(uint8_t, u8Src, 1);
18090 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18092
18093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
18094 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
18095 IEM_MC_ASSIGN(u8Src, u8Imm);
18096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18097 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18098 IEM_MC_FETCH_EFLAGS(EFlags);
18099 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
18100
18101 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
18102 IEM_MC_COMMIT_EFLAGS(EFlags);
18103 IEM_MC_ADVANCE_RIP();
18104 IEM_MC_END();
18105 }
18106 return VINF_SUCCESS;
18107}
18108
18109
18110/** Opcode 0xf7 /0. */
18111FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
18112{
18113 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
18114 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
18115
18116 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18117 {
18118 /* register access */
18119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18120 switch (pVCpu->iem.s.enmEffOpSize)
18121 {
18122 case IEMMODE_16BIT:
18123 {
18124 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
18125 IEM_MC_BEGIN(3, 0);
18126 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
18127 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
18128 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18129 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18130 IEM_MC_REF_EFLAGS(pEFlags);
18131 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
18132 IEM_MC_ADVANCE_RIP();
18133 IEM_MC_END();
18134 return VINF_SUCCESS;
18135 }
18136
18137 case IEMMODE_32BIT:
18138 {
18139 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
18140 IEM_MC_BEGIN(3, 0);
18141 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18142 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
18143 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18144 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18145 IEM_MC_REF_EFLAGS(pEFlags);
18146 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
18147 /* No clearing the high dword here - test doesn't write back the result. */
18148 IEM_MC_ADVANCE_RIP();
18149 IEM_MC_END();
18150 return VINF_SUCCESS;
18151 }
18152
18153 case IEMMODE_64BIT:
18154 {
18155 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18156 IEM_MC_BEGIN(3, 0);
18157 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18158 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
18159 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18160 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18161 IEM_MC_REF_EFLAGS(pEFlags);
18162 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18163 IEM_MC_ADVANCE_RIP();
18164 IEM_MC_END();
18165 return VINF_SUCCESS;
18166 }
18167
18168 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18169 }
18170 }
18171 else
18172 {
18173 /* memory access. */
18174 switch (pVCpu->iem.s.enmEffOpSize)
18175 {
18176 case IEMMODE_16BIT:
18177 {
18178 IEM_MC_BEGIN(3, 2);
18179 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
18180 IEM_MC_ARG(uint16_t, u16Src, 1);
18181 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18183
18184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
18185 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
18186 IEM_MC_ASSIGN(u16Src, u16Imm);
18187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18188 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18189 IEM_MC_FETCH_EFLAGS(EFlags);
18190 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
18191
18192 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
18193 IEM_MC_COMMIT_EFLAGS(EFlags);
18194 IEM_MC_ADVANCE_RIP();
18195 IEM_MC_END();
18196 return VINF_SUCCESS;
18197 }
18198
18199 case IEMMODE_32BIT:
18200 {
18201 IEM_MC_BEGIN(3, 2);
18202 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18203 IEM_MC_ARG(uint32_t, u32Src, 1);
18204 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18206
18207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18208 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
18209 IEM_MC_ASSIGN(u32Src, u32Imm);
18210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18211 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18212 IEM_MC_FETCH_EFLAGS(EFlags);
18213 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
18214
18215 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
18216 IEM_MC_COMMIT_EFLAGS(EFlags);
18217 IEM_MC_ADVANCE_RIP();
18218 IEM_MC_END();
18219 return VINF_SUCCESS;
18220 }
18221
18222 case IEMMODE_64BIT:
18223 {
18224 IEM_MC_BEGIN(3, 2);
18225 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18226 IEM_MC_ARG(uint64_t, u64Src, 1);
18227 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18229
18230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18231 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18232 IEM_MC_ASSIGN(u64Src, u64Imm);
18233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18234 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18235 IEM_MC_FETCH_EFLAGS(EFlags);
18236 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18237
18238 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
18239 IEM_MC_COMMIT_EFLAGS(EFlags);
18240 IEM_MC_ADVANCE_RIP();
18241 IEM_MC_END();
18242 return VINF_SUCCESS;
18243 }
18244
18245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18246 }
18247 }
18248}
18249
18250
18251/** Opcode 0xf6 /4, /5, /6 and /7. */
18252FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
18253{
18254 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18255 {
18256 /* register access */
18257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18258 IEM_MC_BEGIN(3, 1);
18259 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18260 IEM_MC_ARG(uint8_t, u8Value, 1);
18261 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18262 IEM_MC_LOCAL(int32_t, rc);
18263
18264 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18265 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18266 IEM_MC_REF_EFLAGS(pEFlags);
18267 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18268 IEM_MC_IF_LOCAL_IS_Z(rc) {
18269 IEM_MC_ADVANCE_RIP();
18270 } IEM_MC_ELSE() {
18271 IEM_MC_RAISE_DIVIDE_ERROR();
18272 } IEM_MC_ENDIF();
18273
18274 IEM_MC_END();
18275 }
18276 else
18277 {
18278 /* memory access. */
18279 IEM_MC_BEGIN(3, 2);
18280 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18281 IEM_MC_ARG(uint8_t, u8Value, 1);
18282 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18284 IEM_MC_LOCAL(int32_t, rc);
18285
18286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18288 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18289 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18290 IEM_MC_REF_EFLAGS(pEFlags);
18291 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18292 IEM_MC_IF_LOCAL_IS_Z(rc) {
18293 IEM_MC_ADVANCE_RIP();
18294 } IEM_MC_ELSE() {
18295 IEM_MC_RAISE_DIVIDE_ERROR();
18296 } IEM_MC_ENDIF();
18297
18298 IEM_MC_END();
18299 }
18300 return VINF_SUCCESS;
18301}
18302
18303
18304/** Opcode 0xf7 /4, /5, /6 and /7. */
18305FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
18306{
18307 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18308
18309 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18310 {
18311 /* register access */
18312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18313 switch (pVCpu->iem.s.enmEffOpSize)
18314 {
18315 case IEMMODE_16BIT:
18316 {
18317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18318 IEM_MC_BEGIN(4, 1);
18319 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18320 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18321 IEM_MC_ARG(uint16_t, u16Value, 2);
18322 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18323 IEM_MC_LOCAL(int32_t, rc);
18324
18325 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18326 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18327 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18328 IEM_MC_REF_EFLAGS(pEFlags);
18329 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18330 IEM_MC_IF_LOCAL_IS_Z(rc) {
18331 IEM_MC_ADVANCE_RIP();
18332 } IEM_MC_ELSE() {
18333 IEM_MC_RAISE_DIVIDE_ERROR();
18334 } IEM_MC_ENDIF();
18335
18336 IEM_MC_END();
18337 return VINF_SUCCESS;
18338 }
18339
18340 case IEMMODE_32BIT:
18341 {
18342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18343 IEM_MC_BEGIN(4, 1);
18344 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18345 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18346 IEM_MC_ARG(uint32_t, u32Value, 2);
18347 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18348 IEM_MC_LOCAL(int32_t, rc);
18349
18350 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18351 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18352 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18353 IEM_MC_REF_EFLAGS(pEFlags);
18354 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18355 IEM_MC_IF_LOCAL_IS_Z(rc) {
18356 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18357 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18358 IEM_MC_ADVANCE_RIP();
18359 } IEM_MC_ELSE() {
18360 IEM_MC_RAISE_DIVIDE_ERROR();
18361 } IEM_MC_ENDIF();
18362
18363 IEM_MC_END();
18364 return VINF_SUCCESS;
18365 }
18366
18367 case IEMMODE_64BIT:
18368 {
18369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18370 IEM_MC_BEGIN(4, 1);
18371 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18372 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18373 IEM_MC_ARG(uint64_t, u64Value, 2);
18374 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18375 IEM_MC_LOCAL(int32_t, rc);
18376
18377 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18378 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18379 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18380 IEM_MC_REF_EFLAGS(pEFlags);
18381 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18382 IEM_MC_IF_LOCAL_IS_Z(rc) {
18383 IEM_MC_ADVANCE_RIP();
18384 } IEM_MC_ELSE() {
18385 IEM_MC_RAISE_DIVIDE_ERROR();
18386 } IEM_MC_ENDIF();
18387
18388 IEM_MC_END();
18389 return VINF_SUCCESS;
18390 }
18391
18392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18393 }
18394 }
18395 else
18396 {
18397 /* memory access. */
18398 switch (pVCpu->iem.s.enmEffOpSize)
18399 {
18400 case IEMMODE_16BIT:
18401 {
18402 IEM_MC_BEGIN(4, 2);
18403 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18404 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18405 IEM_MC_ARG(uint16_t, u16Value, 2);
18406 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18408 IEM_MC_LOCAL(int32_t, rc);
18409
18410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18412 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18413 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18414 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18415 IEM_MC_REF_EFLAGS(pEFlags);
18416 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18417 IEM_MC_IF_LOCAL_IS_Z(rc) {
18418 IEM_MC_ADVANCE_RIP();
18419 } IEM_MC_ELSE() {
18420 IEM_MC_RAISE_DIVIDE_ERROR();
18421 } IEM_MC_ENDIF();
18422
18423 IEM_MC_END();
18424 return VINF_SUCCESS;
18425 }
18426
18427 case IEMMODE_32BIT:
18428 {
18429 IEM_MC_BEGIN(4, 2);
18430 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18431 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18432 IEM_MC_ARG(uint32_t, u32Value, 2);
18433 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18435 IEM_MC_LOCAL(int32_t, rc);
18436
18437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18439 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18440 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18441 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18442 IEM_MC_REF_EFLAGS(pEFlags);
18443 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18444 IEM_MC_IF_LOCAL_IS_Z(rc) {
18445 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18446 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18447 IEM_MC_ADVANCE_RIP();
18448 } IEM_MC_ELSE() {
18449 IEM_MC_RAISE_DIVIDE_ERROR();
18450 } IEM_MC_ENDIF();
18451
18452 IEM_MC_END();
18453 return VINF_SUCCESS;
18454 }
18455
18456 case IEMMODE_64BIT:
18457 {
18458 IEM_MC_BEGIN(4, 2);
18459 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18460 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18461 IEM_MC_ARG(uint64_t, u64Value, 2);
18462 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18464 IEM_MC_LOCAL(int32_t, rc);
18465
18466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18468 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18469 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18470 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18471 IEM_MC_REF_EFLAGS(pEFlags);
18472 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18473 IEM_MC_IF_LOCAL_IS_Z(rc) {
18474 IEM_MC_ADVANCE_RIP();
18475 } IEM_MC_ELSE() {
18476 IEM_MC_RAISE_DIVIDE_ERROR();
18477 } IEM_MC_ENDIF();
18478
18479 IEM_MC_END();
18480 return VINF_SUCCESS;
18481 }
18482
18483 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18484 }
18485 }
18486}
18487
18488/** Opcode 0xf6. */
18489FNIEMOP_DEF(iemOp_Grp3_Eb)
18490{
18491 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18492 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18493 {
18494 case 0:
18495 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
18496 case 1:
18497/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18498 return IEMOP_RAISE_INVALID_OPCODE();
18499 case 2:
18500 IEMOP_MNEMONIC(not_Eb, "not Eb");
18501 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
18502 case 3:
18503 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
18504 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
18505 case 4:
18506 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
18507 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18508 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
18509 case 5:
18510 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
18511 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18512 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
18513 case 6:
18514 IEMOP_MNEMONIC(div_Eb, "div Eb");
18515 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18516 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
18517 case 7:
18518 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
18519 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18520 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
18521 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18522 }
18523}
18524
18525
18526/** Opcode 0xf7. */
18527FNIEMOP_DEF(iemOp_Grp3_Ev)
18528{
18529 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18530 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18531 {
18532 case 0:
18533 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
18534 case 1:
18535/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18536 return IEMOP_RAISE_INVALID_OPCODE();
18537 case 2:
18538 IEMOP_MNEMONIC(not_Ev, "not Ev");
18539 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
18540 case 3:
18541 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
18542 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
18543 case 4:
18544 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
18545 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18546 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
18547 case 5:
18548 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
18549 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18550 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
18551 case 6:
18552 IEMOP_MNEMONIC(div_Ev, "div Ev");
18553 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18554 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
18555 case 7:
18556 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
18557 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18558 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
18559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18560 }
18561}
18562
18563
18564/** Opcode 0xf8. */
18565FNIEMOP_DEF(iemOp_clc)
18566{
18567 IEMOP_MNEMONIC(clc, "clc");
18568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18569 IEM_MC_BEGIN(0, 0);
18570 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
18571 IEM_MC_ADVANCE_RIP();
18572 IEM_MC_END();
18573 return VINF_SUCCESS;
18574}
18575
18576
18577/** Opcode 0xf9. */
18578FNIEMOP_DEF(iemOp_stc)
18579{
18580 IEMOP_MNEMONIC(stc, "stc");
18581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18582 IEM_MC_BEGIN(0, 0);
18583 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
18584 IEM_MC_ADVANCE_RIP();
18585 IEM_MC_END();
18586 return VINF_SUCCESS;
18587}
18588
18589
18590/** Opcode 0xfa. */
18591FNIEMOP_DEF(iemOp_cli)
18592{
18593 IEMOP_MNEMONIC(cli, "cli");
18594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18595 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
18596}
18597
18598
18599FNIEMOP_DEF(iemOp_sti)
18600{
18601 IEMOP_MNEMONIC(sti, "sti");
18602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18603 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
18604}
18605
18606
18607/** Opcode 0xfc. */
18608FNIEMOP_DEF(iemOp_cld)
18609{
18610 IEMOP_MNEMONIC(cld, "cld");
18611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18612 IEM_MC_BEGIN(0, 0);
18613 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
18614 IEM_MC_ADVANCE_RIP();
18615 IEM_MC_END();
18616 return VINF_SUCCESS;
18617}
18618
18619
18620/** Opcode 0xfd. */
18621FNIEMOP_DEF(iemOp_std)
18622{
18623 IEMOP_MNEMONIC(std, "std");
18624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18625 IEM_MC_BEGIN(0, 0);
18626 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
18627 IEM_MC_ADVANCE_RIP();
18628 IEM_MC_END();
18629 return VINF_SUCCESS;
18630}
18631
18632
18633/** Opcode 0xfe. */
18634FNIEMOP_DEF(iemOp_Grp4)
18635{
18636 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18637 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18638 {
18639 case 0:
18640 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
18641 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
18642 case 1:
18643 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
18644 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
18645 default:
18646 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
18647 return IEMOP_RAISE_INVALID_OPCODE();
18648 }
18649}
18650
18651
18652/**
18653 * Opcode 0xff /2.
18654 * @param bRm The RM byte.
18655 */
18656FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
18657{
18658 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
18659 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18660
18661 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18662 {
18663 /* The new RIP is taken from a register. */
18664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18665 switch (pVCpu->iem.s.enmEffOpSize)
18666 {
18667 case IEMMODE_16BIT:
18668 IEM_MC_BEGIN(1, 0);
18669 IEM_MC_ARG(uint16_t, u16Target, 0);
18670 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18671 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18672 IEM_MC_END()
18673 return VINF_SUCCESS;
18674
18675 case IEMMODE_32BIT:
18676 IEM_MC_BEGIN(1, 0);
18677 IEM_MC_ARG(uint32_t, u32Target, 0);
18678 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18679 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18680 IEM_MC_END()
18681 return VINF_SUCCESS;
18682
18683 case IEMMODE_64BIT:
18684 IEM_MC_BEGIN(1, 0);
18685 IEM_MC_ARG(uint64_t, u64Target, 0);
18686 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18687 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18688 IEM_MC_END()
18689 return VINF_SUCCESS;
18690
18691 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18692 }
18693 }
18694 else
18695 {
18696 /* The new RIP is taken from a register. */
18697 switch (pVCpu->iem.s.enmEffOpSize)
18698 {
18699 case IEMMODE_16BIT:
18700 IEM_MC_BEGIN(1, 1);
18701 IEM_MC_ARG(uint16_t, u16Target, 0);
18702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18705 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18706 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18707 IEM_MC_END()
18708 return VINF_SUCCESS;
18709
18710 case IEMMODE_32BIT:
18711 IEM_MC_BEGIN(1, 1);
18712 IEM_MC_ARG(uint32_t, u32Target, 0);
18713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18716 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18717 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18718 IEM_MC_END()
18719 return VINF_SUCCESS;
18720
18721 case IEMMODE_64BIT:
18722 IEM_MC_BEGIN(1, 1);
18723 IEM_MC_ARG(uint64_t, u64Target, 0);
18724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18727 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18728 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18729 IEM_MC_END()
18730 return VINF_SUCCESS;
18731
18732 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18733 }
18734 }
18735}
18736
18737typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
18738
18739FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
18740{
18741 /* Registers? How?? */
18742 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
18743 { /* likely */ }
18744 else
18745 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
18746
18747 /* Far pointer loaded from memory. */
18748 switch (pVCpu->iem.s.enmEffOpSize)
18749 {
18750 case IEMMODE_16BIT:
18751 IEM_MC_BEGIN(3, 1);
18752 IEM_MC_ARG(uint16_t, u16Sel, 0);
18753 IEM_MC_ARG(uint16_t, offSeg, 1);
18754 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18758 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18759 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
18760 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18761 IEM_MC_END();
18762 return VINF_SUCCESS;
18763
18764 case IEMMODE_64BIT:
18765 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
18766 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
18767 * and call far qword [rsp] encodings. */
18768 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
18769 {
18770 IEM_MC_BEGIN(3, 1);
18771 IEM_MC_ARG(uint16_t, u16Sel, 0);
18772 IEM_MC_ARG(uint64_t, offSeg, 1);
18773 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18777 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18778 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
18779 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18780 IEM_MC_END();
18781 return VINF_SUCCESS;
18782 }
18783 /* AMD falls thru. */
18784 /* fall thru */
18785
18786 case IEMMODE_32BIT:
18787 IEM_MC_BEGIN(3, 1);
18788 IEM_MC_ARG(uint16_t, u16Sel, 0);
18789 IEM_MC_ARG(uint32_t, offSeg, 1);
18790 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
18791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18794 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18795 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
18796 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18797 IEM_MC_END();
18798 return VINF_SUCCESS;
18799
18800 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18801 }
18802}
18803
18804
18805/**
18806 * Opcode 0xff /3.
18807 * @param bRm The RM byte.
18808 */
18809FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
18810{
18811 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
18812 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
18813}
18814
18815
18816/**
18817 * Opcode 0xff /4.
18818 * @param bRm The RM byte.
18819 */
18820FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
18821{
18822 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
18823 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18824
18825 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18826 {
18827 /* The new RIP is taken from a register. */
18828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18829 switch (pVCpu->iem.s.enmEffOpSize)
18830 {
18831 case IEMMODE_16BIT:
18832 IEM_MC_BEGIN(0, 1);
18833 IEM_MC_LOCAL(uint16_t, u16Target);
18834 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18835 IEM_MC_SET_RIP_U16(u16Target);
18836 IEM_MC_END()
18837 return VINF_SUCCESS;
18838
18839 case IEMMODE_32BIT:
18840 IEM_MC_BEGIN(0, 1);
18841 IEM_MC_LOCAL(uint32_t, u32Target);
18842 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18843 IEM_MC_SET_RIP_U32(u32Target);
18844 IEM_MC_END()
18845 return VINF_SUCCESS;
18846
18847 case IEMMODE_64BIT:
18848 IEM_MC_BEGIN(0, 1);
18849 IEM_MC_LOCAL(uint64_t, u64Target);
18850 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18851 IEM_MC_SET_RIP_U64(u64Target);
18852 IEM_MC_END()
18853 return VINF_SUCCESS;
18854
18855 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18856 }
18857 }
18858 else
18859 {
18860 /* The new RIP is taken from a memory location. */
18861 switch (pVCpu->iem.s.enmEffOpSize)
18862 {
18863 case IEMMODE_16BIT:
18864 IEM_MC_BEGIN(0, 2);
18865 IEM_MC_LOCAL(uint16_t, u16Target);
18866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18869 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18870 IEM_MC_SET_RIP_U16(u16Target);
18871 IEM_MC_END()
18872 return VINF_SUCCESS;
18873
18874 case IEMMODE_32BIT:
18875 IEM_MC_BEGIN(0, 2);
18876 IEM_MC_LOCAL(uint32_t, u32Target);
18877 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18880 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18881 IEM_MC_SET_RIP_U32(u32Target);
18882 IEM_MC_END()
18883 return VINF_SUCCESS;
18884
18885 case IEMMODE_64BIT:
18886 IEM_MC_BEGIN(0, 2);
18887 IEM_MC_LOCAL(uint64_t, u64Target);
18888 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18891 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18892 IEM_MC_SET_RIP_U64(u64Target);
18893 IEM_MC_END()
18894 return VINF_SUCCESS;
18895
18896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18897 }
18898 }
18899}
18900
18901
18902/**
18903 * Opcode 0xff /5.
18904 * @param bRm The RM byte.
18905 */
18906FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
18907{
18908 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
18909 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
18910}
18911
18912
18913/**
18914 * Opcode 0xff /6.
18915 * @param bRm The RM byte.
18916 */
18917FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
18918{
18919 IEMOP_MNEMONIC(push_Ev, "push Ev");
18920
18921 /* Registers are handled by a common worker. */
18922 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18923 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18924
18925 /* Memory we do here. */
18926 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18927 switch (pVCpu->iem.s.enmEffOpSize)
18928 {
18929 case IEMMODE_16BIT:
18930 IEM_MC_BEGIN(0, 2);
18931 IEM_MC_LOCAL(uint16_t, u16Src);
18932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18935 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18936 IEM_MC_PUSH_U16(u16Src);
18937 IEM_MC_ADVANCE_RIP();
18938 IEM_MC_END();
18939 return VINF_SUCCESS;
18940
18941 case IEMMODE_32BIT:
18942 IEM_MC_BEGIN(0, 2);
18943 IEM_MC_LOCAL(uint32_t, u32Src);
18944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18947 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18948 IEM_MC_PUSH_U32(u32Src);
18949 IEM_MC_ADVANCE_RIP();
18950 IEM_MC_END();
18951 return VINF_SUCCESS;
18952
18953 case IEMMODE_64BIT:
18954 IEM_MC_BEGIN(0, 2);
18955 IEM_MC_LOCAL(uint64_t, u64Src);
18956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18959 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18960 IEM_MC_PUSH_U64(u64Src);
18961 IEM_MC_ADVANCE_RIP();
18962 IEM_MC_END();
18963 return VINF_SUCCESS;
18964
18965 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18966 }
18967}
18968
18969
18970/** Opcode 0xff. */
18971FNIEMOP_DEF(iemOp_Grp5)
18972{
18973 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18974 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18975 {
18976 case 0:
18977 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
18978 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
18979 case 1:
18980 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
18981 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
18982 case 2:
18983 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
18984 case 3:
18985 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
18986 case 4:
18987 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
18988 case 5:
18989 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
18990 case 6:
18991 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
18992 case 7:
18993 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
18994 return IEMOP_RAISE_INVALID_OPCODE();
18995 }
18996 AssertFailedReturn(VERR_IEM_IPE_3);
18997}
18998
18999
19000
19001const PFNIEMOP g_apfnOneByteMap[256] =
19002{
19003 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
19004 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
19005 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
19006 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
19007 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
19008 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
19009 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
19010 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
19011 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
19012 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
19013 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
19014 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
19015 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
19016 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
19017 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
19018 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
19019 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
19020 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
19021 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
19022 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
19023 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
19024 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
19025 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
19026 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
19027 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
19028 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
19029 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
19030 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
19031 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
19032 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
19033 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
19034 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
19035 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
19036 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
19037 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
19038 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
19039 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
19040 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
19041 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
19042 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
19043 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
19044 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
19045 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
19046 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
19047 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
19048 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
19049 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
19050 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
19051 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
19052 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
19053 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
19054 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
19055 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
19056 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
19057 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
19058 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
19059 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
19060 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
19061 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
19062 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
19063 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
19064 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
19065 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
19066 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
19067};
19068
19069
19070/** @} */
19071
19072#ifdef _MSC_VER
19073# pragma warning(pop)
19074#endif
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette