VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 65609

Last change on this file since 65609 was 65609, checked in by vboxsync, 8 years ago

IEM: 0x0f 0x28 split up.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 663.6 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 65609 2017-02-03 20:21:16Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24#ifdef _MSC_VER
25# pragma warning(push)
26# pragma warning(disable: 4702) /* Unreachable code like return in iemOp_Grp6_lldt. */
27#endif
28
29
30/**
31 * Common worker for instructions like ADD, AND, OR, ++ with a byte
32 * memory/register as the destination.
33 *
34 * @param pImpl Pointer to the instruction implementation (assembly).
35 */
36FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
37{
38 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
39
40 /*
41 * If rm is denoting a register, no more instruction bytes.
42 */
43 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
44 {
45 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
46
47 IEM_MC_BEGIN(3, 0);
48 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
49 IEM_MC_ARG(uint8_t, u8Src, 1);
50 IEM_MC_ARG(uint32_t *, pEFlags, 2);
51
52 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
53 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
54 IEM_MC_REF_EFLAGS(pEFlags);
55 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
56
57 IEM_MC_ADVANCE_RIP();
58 IEM_MC_END();
59 }
60 else
61 {
62 /*
63 * We're accessing memory.
64 * Note! We're putting the eflags on the stack here so we can commit them
65 * after the memory.
66 */
67 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
68 IEM_MC_BEGIN(3, 2);
69 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
70 IEM_MC_ARG(uint8_t, u8Src, 1);
71 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
72 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
73
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 if (!pImpl->pfnLockedU8)
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
77 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
78 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
79 IEM_MC_FETCH_EFLAGS(EFlags);
80 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
81 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
82 else
83 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
84
85 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
86 IEM_MC_COMMIT_EFLAGS(EFlags);
87 IEM_MC_ADVANCE_RIP();
88 IEM_MC_END();
89 }
90 return VINF_SUCCESS;
91}
92
93
94/**
95 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
96 * memory/register as the destination.
97 *
98 * @param pImpl Pointer to the instruction implementation (assembly).
99 */
100FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
101{
102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
103
104 /*
105 * If rm is denoting a register, no more instruction bytes.
106 */
107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
108 {
109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
110
111 switch (pVCpu->iem.s.enmEffOpSize)
112 {
113 case IEMMODE_16BIT:
114 IEM_MC_BEGIN(3, 0);
115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
116 IEM_MC_ARG(uint16_t, u16Src, 1);
117 IEM_MC_ARG(uint32_t *, pEFlags, 2);
118
119 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
120 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
121 IEM_MC_REF_EFLAGS(pEFlags);
122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
123
124 IEM_MC_ADVANCE_RIP();
125 IEM_MC_END();
126 break;
127
128 case IEMMODE_32BIT:
129 IEM_MC_BEGIN(3, 0);
130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
131 IEM_MC_ARG(uint32_t, u32Src, 1);
132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
133
134 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
135 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
136 IEM_MC_REF_EFLAGS(pEFlags);
137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
138
139 if (pImpl != &g_iemAImpl_test)
140 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
141 IEM_MC_ADVANCE_RIP();
142 IEM_MC_END();
143 break;
144
145 case IEMMODE_64BIT:
146 IEM_MC_BEGIN(3, 0);
147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
148 IEM_MC_ARG(uint64_t, u64Src, 1);
149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
150
151 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
152 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
153 IEM_MC_REF_EFLAGS(pEFlags);
154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
155
156 IEM_MC_ADVANCE_RIP();
157 IEM_MC_END();
158 break;
159 }
160 }
161 else
162 {
163 /*
164 * We're accessing memory.
165 * Note! We're putting the eflags on the stack here so we can commit them
166 * after the memory.
167 */
168 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
169 switch (pVCpu->iem.s.enmEffOpSize)
170 {
171 case IEMMODE_16BIT:
172 IEM_MC_BEGIN(3, 2);
173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
174 IEM_MC_ARG(uint16_t, u16Src, 1);
175 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
177
178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
179 if (!pImpl->pfnLockedU16)
180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
181 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
182 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
183 IEM_MC_FETCH_EFLAGS(EFlags);
184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
186 else
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
188
189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
190 IEM_MC_COMMIT_EFLAGS(EFlags);
191 IEM_MC_ADVANCE_RIP();
192 IEM_MC_END();
193 break;
194
195 case IEMMODE_32BIT:
196 IEM_MC_BEGIN(3, 2);
197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
198 IEM_MC_ARG(uint32_t, u32Src, 1);
199 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
201
202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
203 if (!pImpl->pfnLockedU32)
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
206 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
207 IEM_MC_FETCH_EFLAGS(EFlags);
208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
210 else
211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
212
213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
214 IEM_MC_COMMIT_EFLAGS(EFlags);
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 break;
218
219 case IEMMODE_64BIT:
220 IEM_MC_BEGIN(3, 2);
221 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
222 IEM_MC_ARG(uint64_t, u64Src, 1);
223 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
225
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
227 if (!pImpl->pfnLockedU64)
228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
229 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
230 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
231 IEM_MC_FETCH_EFLAGS(EFlags);
232 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
234 else
235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
236
237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
238 IEM_MC_COMMIT_EFLAGS(EFlags);
239 IEM_MC_ADVANCE_RIP();
240 IEM_MC_END();
241 break;
242 }
243 }
244 return VINF_SUCCESS;
245}
246
247
248/**
249 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
250 * the destination.
251 *
252 * @param pImpl Pointer to the instruction implementation (assembly).
253 */
254FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
255{
256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
257
258 /*
259 * If rm is denoting a register, no more instruction bytes.
260 */
261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
262 {
263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
264 IEM_MC_BEGIN(3, 0);
265 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
266 IEM_MC_ARG(uint8_t, u8Src, 1);
267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
268
269 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
270 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
271 IEM_MC_REF_EFLAGS(pEFlags);
272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
273
274 IEM_MC_ADVANCE_RIP();
275 IEM_MC_END();
276 }
277 else
278 {
279 /*
280 * We're accessing memory.
281 */
282 IEM_MC_BEGIN(3, 1);
283 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
284 IEM_MC_ARG(uint8_t, u8Src, 1);
285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
287
288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
290 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
291 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
292 IEM_MC_REF_EFLAGS(pEFlags);
293 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
294
295 IEM_MC_ADVANCE_RIP();
296 IEM_MC_END();
297 }
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
304 * register as the destination.
305 *
306 * @param pImpl Pointer to the instruction implementation (assembly).
307 */
308FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
309{
310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
311
312 /*
313 * If rm is denoting a register, no more instruction bytes.
314 */
315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
316 {
317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
318 switch (pVCpu->iem.s.enmEffOpSize)
319 {
320 case IEMMODE_16BIT:
321 IEM_MC_BEGIN(3, 0);
322 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
323 IEM_MC_ARG(uint16_t, u16Src, 1);
324 IEM_MC_ARG(uint32_t *, pEFlags, 2);
325
326 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
327 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
328 IEM_MC_REF_EFLAGS(pEFlags);
329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
330
331 IEM_MC_ADVANCE_RIP();
332 IEM_MC_END();
333 break;
334
335 case IEMMODE_32BIT:
336 IEM_MC_BEGIN(3, 0);
337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
338 IEM_MC_ARG(uint32_t, u32Src, 1);
339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
340
341 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
342 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
343 IEM_MC_REF_EFLAGS(pEFlags);
344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
345
346 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
347 IEM_MC_ADVANCE_RIP();
348 IEM_MC_END();
349 break;
350
351 case IEMMODE_64BIT:
352 IEM_MC_BEGIN(3, 0);
353 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
354 IEM_MC_ARG(uint64_t, u64Src, 1);
355 IEM_MC_ARG(uint32_t *, pEFlags, 2);
356
357 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
358 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
359 IEM_MC_REF_EFLAGS(pEFlags);
360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
361
362 IEM_MC_ADVANCE_RIP();
363 IEM_MC_END();
364 break;
365 }
366 }
367 else
368 {
369 /*
370 * We're accessing memory.
371 */
372 switch (pVCpu->iem.s.enmEffOpSize)
373 {
374 case IEMMODE_16BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
377 IEM_MC_ARG(uint16_t, u16Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
383 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
384 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
385 IEM_MC_REF_EFLAGS(pEFlags);
386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
387
388 IEM_MC_ADVANCE_RIP();
389 IEM_MC_END();
390 break;
391
392 case IEMMODE_32BIT:
393 IEM_MC_BEGIN(3, 1);
394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
395 IEM_MC_ARG(uint32_t, u32Src, 1);
396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
398
399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
401 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
402 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
403 IEM_MC_REF_EFLAGS(pEFlags);
404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
405
406 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 break;
410
411 case IEMMODE_64BIT:
412 IEM_MC_BEGIN(3, 1);
413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
414 IEM_MC_ARG(uint64_t, u64Src, 1);
415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
417
418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
420 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
421 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
422 IEM_MC_REF_EFLAGS(pEFlags);
423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
424
425 IEM_MC_ADVANCE_RIP();
426 IEM_MC_END();
427 break;
428 }
429 }
430 return VINF_SUCCESS;
431}
432
433
434/**
435 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
436 * a byte immediate.
437 *
438 * @param pImpl Pointer to the instruction implementation (assembly).
439 */
440FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
441{
442 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
444
445 IEM_MC_BEGIN(3, 0);
446 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
447 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
449
450 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
451 IEM_MC_REF_EFLAGS(pEFlags);
452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
453
454 IEM_MC_ADVANCE_RIP();
455 IEM_MC_END();
456 return VINF_SUCCESS;
457}
458
459
460/**
461 * Common worker for instructions like ADD, AND, OR, ++ with working on
462 * AX/EAX/RAX with a word/dword immediate.
463 *
464 * @param pImpl Pointer to the instruction implementation (assembly).
465 */
466FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
467{
468 switch (pVCpu->iem.s.enmEffOpSize)
469 {
470 case IEMMODE_16BIT:
471 {
472 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
474
475 IEM_MC_BEGIN(3, 0);
476 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
477 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
479
480 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
481 IEM_MC_REF_EFLAGS(pEFlags);
482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
483
484 IEM_MC_ADVANCE_RIP();
485 IEM_MC_END();
486 return VINF_SUCCESS;
487 }
488
489 case IEMMODE_32BIT:
490 {
491 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493
494 IEM_MC_BEGIN(3, 0);
495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
496 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
498
499 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
500 IEM_MC_REF_EFLAGS(pEFlags);
501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
502
503 if (pImpl != &g_iemAImpl_test)
504 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
505 IEM_MC_ADVANCE_RIP();
506 IEM_MC_END();
507 return VINF_SUCCESS;
508 }
509
510 case IEMMODE_64BIT:
511 {
512 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
514
515 IEM_MC_BEGIN(3, 0);
516 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
517 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
519
520 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
521 IEM_MC_REF_EFLAGS(pEFlags);
522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
523
524 IEM_MC_ADVANCE_RIP();
525 IEM_MC_END();
526 return VINF_SUCCESS;
527 }
528
529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
530 }
531}
532
533
534/** Opcodes 0xf1, 0xd6. */
535FNIEMOP_DEF(iemOp_Invalid)
536{
537 IEMOP_MNEMONIC(Invalid, "Invalid");
538 return IEMOP_RAISE_INVALID_OPCODE();
539}
540
541
542/** Invalid with RM byte . */
543FNIEMOPRM_DEF(iemOp_InvalidWithRM)
544{
545 RT_NOREF_PV(bRm);
546 IEMOP_MNEMONIC(InvalidWithRm, "InvalidWithRM");
547 return IEMOP_RAISE_INVALID_OPCODE();
548}
549
550
551/** Invalid opcode where intel requires Mod R/M sequence. */
552FNIEMOP_DEF(iemOp_InvalidNeedRM)
553{
554 IEMOP_MNEMONIC(InvalidNeedRM, "InvalidNeedRM");
555 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
556 {
557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
558#ifndef TST_IEM_CHECK_MC
559 RTGCPTR GCPtrEff;
560 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
561 if (rcStrict != VINF_SUCCESS)
562 return rcStrict;
563#endif
564 IEMOP_HLP_DONE_DECODING();
565 }
566 return IEMOP_RAISE_INVALID_OPCODE();
567}
568
569
570/** Invalid opcode where intel requires Mod R/M sequence and 8-byte
571 * immediate. */
572FNIEMOP_DEF(iemOp_InvalidNeedRMImm8)
573{
574 IEMOP_MNEMONIC(InvalidNeedRMImm8, "InvalidNeedRMImm8");
575 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
576 {
577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
578#ifndef TST_IEM_CHECK_MC
579 RTGCPTR GCPtrEff;
580 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
581 if (rcStrict != VINF_SUCCESS)
582 return rcStrict;
583#endif
584 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
585 IEMOP_HLP_DONE_DECODING();
586 }
587 return IEMOP_RAISE_INVALID_OPCODE();
588}
589
590
591/** Invalid opcode where intel requires a 3rd escape byte and a Mod R/M
592 * sequence. */
593FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRM)
594{
595 IEMOP_MNEMONIC(InvalidNeed3ByteEscRM, "InvalidNeed3ByteEscRM");
596 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
597 {
598 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
600#ifndef TST_IEM_CHECK_MC
601 RTGCPTR GCPtrEff;
602 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
603 if (rcStrict != VINF_SUCCESS)
604 return rcStrict;
605#endif
606 IEMOP_HLP_DONE_DECODING();
607 }
608 return IEMOP_RAISE_INVALID_OPCODE();
609}
610
611
612/** Invalid opcode where intel requires a 3rd escape byte, Mod R/M sequence, and
613 * a 8-byte immediate. */
614FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRMImm8)
615{
616 IEMOP_MNEMONIC(InvalidNeed3ByteEscRMImm8, "InvalidNeed3ByteEscRMImm8");
617 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
618 {
619 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
621#ifndef TST_IEM_CHECK_MC
622 RTGCPTR GCPtrEff;
623 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
624 if (rcStrict != VINF_SUCCESS)
625 return rcStrict;
626#endif
627 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
628 IEMOP_HLP_DONE_DECODING();
629 }
630 return IEMOP_RAISE_INVALID_OPCODE();
631}
632
633
634
635/** @name ..... opcodes.
636 *
637 * @{
638 */
639
640/** @} */
641
642
643/** @name Two byte opcodes (first byte 0x0f).
644 *
645 * @{
646 */
647
648/** Opcode 0x0f 0x00 /0. */
649FNIEMOPRM_DEF(iemOp_Grp6_sldt)
650{
651 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
652 IEMOP_HLP_MIN_286();
653 IEMOP_HLP_NO_REAL_OR_V86_MODE();
654
655 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
656 {
657 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
658 switch (pVCpu->iem.s.enmEffOpSize)
659 {
660 case IEMMODE_16BIT:
661 IEM_MC_BEGIN(0, 1);
662 IEM_MC_LOCAL(uint16_t, u16Ldtr);
663 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
664 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
665 IEM_MC_ADVANCE_RIP();
666 IEM_MC_END();
667 break;
668
669 case IEMMODE_32BIT:
670 IEM_MC_BEGIN(0, 1);
671 IEM_MC_LOCAL(uint32_t, u32Ldtr);
672 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
673 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
674 IEM_MC_ADVANCE_RIP();
675 IEM_MC_END();
676 break;
677
678 case IEMMODE_64BIT:
679 IEM_MC_BEGIN(0, 1);
680 IEM_MC_LOCAL(uint64_t, u64Ldtr);
681 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
682 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
683 IEM_MC_ADVANCE_RIP();
684 IEM_MC_END();
685 break;
686
687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
688 }
689 }
690 else
691 {
692 IEM_MC_BEGIN(0, 2);
693 IEM_MC_LOCAL(uint16_t, u16Ldtr);
694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
696 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
697 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
698 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
699 IEM_MC_ADVANCE_RIP();
700 IEM_MC_END();
701 }
702 return VINF_SUCCESS;
703}
704
705
706/** Opcode 0x0f 0x00 /1. */
707FNIEMOPRM_DEF(iemOp_Grp6_str)
708{
709 IEMOP_MNEMONIC(str, "str Rv/Mw");
710 IEMOP_HLP_MIN_286();
711 IEMOP_HLP_NO_REAL_OR_V86_MODE();
712
713 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
714 {
715 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
716 switch (pVCpu->iem.s.enmEffOpSize)
717 {
718 case IEMMODE_16BIT:
719 IEM_MC_BEGIN(0, 1);
720 IEM_MC_LOCAL(uint16_t, u16Tr);
721 IEM_MC_FETCH_TR_U16(u16Tr);
722 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
723 IEM_MC_ADVANCE_RIP();
724 IEM_MC_END();
725 break;
726
727 case IEMMODE_32BIT:
728 IEM_MC_BEGIN(0, 1);
729 IEM_MC_LOCAL(uint32_t, u32Tr);
730 IEM_MC_FETCH_TR_U32(u32Tr);
731 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
732 IEM_MC_ADVANCE_RIP();
733 IEM_MC_END();
734 break;
735
736 case IEMMODE_64BIT:
737 IEM_MC_BEGIN(0, 1);
738 IEM_MC_LOCAL(uint64_t, u64Tr);
739 IEM_MC_FETCH_TR_U64(u64Tr);
740 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
741 IEM_MC_ADVANCE_RIP();
742 IEM_MC_END();
743 break;
744
745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
746 }
747 }
748 else
749 {
750 IEM_MC_BEGIN(0, 2);
751 IEM_MC_LOCAL(uint16_t, u16Tr);
752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
754 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 IEM_MC_FETCH_TR_U16(u16Tr);
756 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
757 IEM_MC_ADVANCE_RIP();
758 IEM_MC_END();
759 }
760 return VINF_SUCCESS;
761}
762
763
764/** Opcode 0x0f 0x00 /2. */
765FNIEMOPRM_DEF(iemOp_Grp6_lldt)
766{
767 IEMOP_MNEMONIC(lldt, "lldt Ew");
768 IEMOP_HLP_MIN_286();
769 IEMOP_HLP_NO_REAL_OR_V86_MODE();
770
771 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
772 {
773 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
774 IEM_MC_BEGIN(1, 0);
775 IEM_MC_ARG(uint16_t, u16Sel, 0);
776 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
777 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
778 IEM_MC_END();
779 }
780 else
781 {
782 IEM_MC_BEGIN(1, 1);
783 IEM_MC_ARG(uint16_t, u16Sel, 0);
784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
786 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
787 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
788 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
789 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
790 IEM_MC_END();
791 }
792 return VINF_SUCCESS;
793}
794
795
796/** Opcode 0x0f 0x00 /3. */
797FNIEMOPRM_DEF(iemOp_Grp6_ltr)
798{
799 IEMOP_MNEMONIC(ltr, "ltr Ew");
800 IEMOP_HLP_MIN_286();
801 IEMOP_HLP_NO_REAL_OR_V86_MODE();
802
803 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
804 {
805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
806 IEM_MC_BEGIN(1, 0);
807 IEM_MC_ARG(uint16_t, u16Sel, 0);
808 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
809 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
810 IEM_MC_END();
811 }
812 else
813 {
814 IEM_MC_BEGIN(1, 1);
815 IEM_MC_ARG(uint16_t, u16Sel, 0);
816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
819 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
820 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
821 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
822 IEM_MC_END();
823 }
824 return VINF_SUCCESS;
825}
826
827
828/** Opcode 0x0f 0x00 /3. */
829FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
830{
831 IEMOP_HLP_MIN_286();
832 IEMOP_HLP_NO_REAL_OR_V86_MODE();
833
834 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
835 {
836 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
837 IEM_MC_BEGIN(2, 0);
838 IEM_MC_ARG(uint16_t, u16Sel, 0);
839 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
840 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
841 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
842 IEM_MC_END();
843 }
844 else
845 {
846 IEM_MC_BEGIN(2, 1);
847 IEM_MC_ARG(uint16_t, u16Sel, 0);
848 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
849 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
851 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
852 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
853 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
854 IEM_MC_END();
855 }
856 return VINF_SUCCESS;
857}
858
859
860/** Opcode 0x0f 0x00 /4. */
861FNIEMOPRM_DEF(iemOp_Grp6_verr)
862{
863 IEMOP_MNEMONIC(verr, "verr Ew");
864 IEMOP_HLP_MIN_286();
865 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
866}
867
868
869/** Opcode 0x0f 0x00 /5. */
870FNIEMOPRM_DEF(iemOp_Grp6_verw)
871{
872 IEMOP_MNEMONIC(verw, "verw Ew");
873 IEMOP_HLP_MIN_286();
874 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
875}
876
877
878/**
879 * Group 6 jump table.
880 */
881IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
882{
883 iemOp_Grp6_sldt,
884 iemOp_Grp6_str,
885 iemOp_Grp6_lldt,
886 iemOp_Grp6_ltr,
887 iemOp_Grp6_verr,
888 iemOp_Grp6_verw,
889 iemOp_InvalidWithRM,
890 iemOp_InvalidWithRM
891};
892
893/** Opcode 0x0f 0x00. */
894FNIEMOP_DEF(iemOp_Grp6)
895{
896 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
897 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
898}
899
900
901/** Opcode 0x0f 0x01 /0. */
902FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
903{
904 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
905 IEMOP_HLP_MIN_286();
906 IEMOP_HLP_64BIT_OP_SIZE();
907 IEM_MC_BEGIN(2, 1);
908 IEM_MC_ARG(uint8_t, iEffSeg, 0);
909 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
912 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
913 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
914 IEM_MC_END();
915 return VINF_SUCCESS;
916}
917
918
919/** Opcode 0x0f 0x01 /0. */
920FNIEMOP_DEF(iemOp_Grp7_vmcall)
921{
922 IEMOP_BITCH_ABOUT_STUB();
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926
927/** Opcode 0x0f 0x01 /0. */
928FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
929{
930 IEMOP_BITCH_ABOUT_STUB();
931 return IEMOP_RAISE_INVALID_OPCODE();
932}
933
934
935/** Opcode 0x0f 0x01 /0. */
936FNIEMOP_DEF(iemOp_Grp7_vmresume)
937{
938 IEMOP_BITCH_ABOUT_STUB();
939 return IEMOP_RAISE_INVALID_OPCODE();
940}
941
942
943/** Opcode 0x0f 0x01 /0. */
944FNIEMOP_DEF(iemOp_Grp7_vmxoff)
945{
946 IEMOP_BITCH_ABOUT_STUB();
947 return IEMOP_RAISE_INVALID_OPCODE();
948}
949
950
951/** Opcode 0x0f 0x01 /1. */
952FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
953{
954 IEMOP_MNEMONIC(sidt, "sidt Ms");
955 IEMOP_HLP_MIN_286();
956 IEMOP_HLP_64BIT_OP_SIZE();
957 IEM_MC_BEGIN(2, 1);
958 IEM_MC_ARG(uint8_t, iEffSeg, 0);
959 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
962 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
963 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
964 IEM_MC_END();
965 return VINF_SUCCESS;
966}
967
968
969/** Opcode 0x0f 0x01 /1. */
970FNIEMOP_DEF(iemOp_Grp7_monitor)
971{
972 IEMOP_MNEMONIC(monitor, "monitor");
973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
974 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
975}
976
977
978/** Opcode 0x0f 0x01 /1. */
979FNIEMOP_DEF(iemOp_Grp7_mwait)
980{
981 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
983 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
984}
985
986
987/** Opcode 0x0f 0x01 /2. */
988FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
989{
990 IEMOP_MNEMONIC(lgdt, "lgdt");
991 IEMOP_HLP_64BIT_OP_SIZE();
992 IEM_MC_BEGIN(3, 1);
993 IEM_MC_ARG(uint8_t, iEffSeg, 0);
994 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
995 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
998 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
999 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1000 IEM_MC_END();
1001 return VINF_SUCCESS;
1002}
1003
1004
1005/** Opcode 0x0f 0x01 0xd0. */
1006FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1007{
1008 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1009 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1010 {
1011 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1012 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1013 }
1014 return IEMOP_RAISE_INVALID_OPCODE();
1015}
1016
1017
1018/** Opcode 0x0f 0x01 0xd1. */
1019FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1020{
1021 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1022 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1023 {
1024 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1025 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1026 }
1027 return IEMOP_RAISE_INVALID_OPCODE();
1028}
1029
1030
1031/** Opcode 0x0f 0x01 /3. */
1032FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1033{
1034 IEMOP_MNEMONIC(lidt, "lidt");
1035 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1036 ? IEMMODE_64BIT
1037 : pVCpu->iem.s.enmEffOpSize;
1038 IEM_MC_BEGIN(3, 1);
1039 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1040 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1041 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1044 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1045 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1046 IEM_MC_END();
1047 return VINF_SUCCESS;
1048}
1049
1050
1051/** Opcode 0x0f 0x01 0xd8. */
1052FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1053
1054/** Opcode 0x0f 0x01 0xd9. */
1055FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
1056
1057/** Opcode 0x0f 0x01 0xda. */
1058FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1059
1060/** Opcode 0x0f 0x01 0xdb. */
1061FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1062
1063/** Opcode 0x0f 0x01 0xdc. */
1064FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1065
1066/** Opcode 0x0f 0x01 0xdd. */
1067FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1068
1069/** Opcode 0x0f 0x01 0xde. */
1070FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1071
1072/** Opcode 0x0f 0x01 0xdf. */
1073FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1074
1075/** Opcode 0x0f 0x01 /4. */
1076FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1077{
1078 IEMOP_MNEMONIC(smsw, "smsw");
1079 IEMOP_HLP_MIN_286();
1080 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1081 {
1082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1083 switch (pVCpu->iem.s.enmEffOpSize)
1084 {
1085 case IEMMODE_16BIT:
1086 IEM_MC_BEGIN(0, 1);
1087 IEM_MC_LOCAL(uint16_t, u16Tmp);
1088 IEM_MC_FETCH_CR0_U16(u16Tmp);
1089 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1090 { /* likely */ }
1091 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
1092 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1093 else
1094 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1095 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
1096 IEM_MC_ADVANCE_RIP();
1097 IEM_MC_END();
1098 return VINF_SUCCESS;
1099
1100 case IEMMODE_32BIT:
1101 IEM_MC_BEGIN(0, 1);
1102 IEM_MC_LOCAL(uint32_t, u32Tmp);
1103 IEM_MC_FETCH_CR0_U32(u32Tmp);
1104 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
1105 IEM_MC_ADVANCE_RIP();
1106 IEM_MC_END();
1107 return VINF_SUCCESS;
1108
1109 case IEMMODE_64BIT:
1110 IEM_MC_BEGIN(0, 1);
1111 IEM_MC_LOCAL(uint64_t, u64Tmp);
1112 IEM_MC_FETCH_CR0_U64(u64Tmp);
1113 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
1114 IEM_MC_ADVANCE_RIP();
1115 IEM_MC_END();
1116 return VINF_SUCCESS;
1117
1118 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1119 }
1120 }
1121 else
1122 {
1123 /* Ignore operand size here, memory refs are always 16-bit. */
1124 IEM_MC_BEGIN(0, 2);
1125 IEM_MC_LOCAL(uint16_t, u16Tmp);
1126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1129 IEM_MC_FETCH_CR0_U16(u16Tmp);
1130 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1131 { /* likely */ }
1132 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
1133 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1134 else
1135 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1136 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
1137 IEM_MC_ADVANCE_RIP();
1138 IEM_MC_END();
1139 return VINF_SUCCESS;
1140 }
1141}
1142
1143
1144/** Opcode 0x0f 0x01 /6. */
1145FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1146{
1147 /* The operand size is effectively ignored, all is 16-bit and only the
1148 lower 3-bits are used. */
1149 IEMOP_MNEMONIC(lmsw, "lmsw");
1150 IEMOP_HLP_MIN_286();
1151 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1152 {
1153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1154 IEM_MC_BEGIN(1, 0);
1155 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1156 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1157 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1158 IEM_MC_END();
1159 }
1160 else
1161 {
1162 IEM_MC_BEGIN(1, 1);
1163 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1167 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1168 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1169 IEM_MC_END();
1170 }
1171 return VINF_SUCCESS;
1172}
1173
1174
1175/** Opcode 0x0f 0x01 /7. */
1176FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1177{
1178 IEMOP_MNEMONIC(invlpg, "invlpg");
1179 IEMOP_HLP_MIN_486();
1180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1181 IEM_MC_BEGIN(1, 1);
1182 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1184 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1185 IEM_MC_END();
1186 return VINF_SUCCESS;
1187}
1188
1189
1190/** Opcode 0x0f 0x01 /7. */
1191FNIEMOP_DEF(iemOp_Grp7_swapgs)
1192{
1193 IEMOP_MNEMONIC(swapgs, "swapgs");
1194 IEMOP_HLP_ONLY_64BIT();
1195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1196 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1197}
1198
1199
1200/** Opcode 0x0f 0x01 /7. */
1201FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1202{
1203 NOREF(pVCpu);
1204 IEMOP_BITCH_ABOUT_STUB();
1205 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1206}
1207
1208
1209/** Opcode 0x0f 0x01. */
1210FNIEMOP_DEF(iemOp_Grp7)
1211{
1212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1213 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1214 {
1215 case 0:
1216 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1217 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1218 switch (bRm & X86_MODRM_RM_MASK)
1219 {
1220 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1221 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1222 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1223 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1224 }
1225 return IEMOP_RAISE_INVALID_OPCODE();
1226
1227 case 1:
1228 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1229 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1230 switch (bRm & X86_MODRM_RM_MASK)
1231 {
1232 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1233 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1234 }
1235 return IEMOP_RAISE_INVALID_OPCODE();
1236
1237 case 2:
1238 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1239 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1240 switch (bRm & X86_MODRM_RM_MASK)
1241 {
1242 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1243 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1244 }
1245 return IEMOP_RAISE_INVALID_OPCODE();
1246
1247 case 3:
1248 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1249 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1250 switch (bRm & X86_MODRM_RM_MASK)
1251 {
1252 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1253 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1254 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1255 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1256 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1257 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1258 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1259 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1261 }
1262
1263 case 4:
1264 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1265
1266 case 5:
1267 return IEMOP_RAISE_INVALID_OPCODE();
1268
1269 case 6:
1270 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1271
1272 case 7:
1273 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1274 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1275 switch (bRm & X86_MODRM_RM_MASK)
1276 {
1277 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1278 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1279 }
1280 return IEMOP_RAISE_INVALID_OPCODE();
1281
1282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1283 }
1284}
1285
1286/** Opcode 0x0f 0x00 /3. */
1287FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1288{
1289 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1291
1292 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1293 {
1294 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1295 switch (pVCpu->iem.s.enmEffOpSize)
1296 {
1297 case IEMMODE_16BIT:
1298 {
1299 IEM_MC_BEGIN(3, 0);
1300 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1301 IEM_MC_ARG(uint16_t, u16Sel, 1);
1302 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1303
1304 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1305 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1306 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1307
1308 IEM_MC_END();
1309 return VINF_SUCCESS;
1310 }
1311
1312 case IEMMODE_32BIT:
1313 case IEMMODE_64BIT:
1314 {
1315 IEM_MC_BEGIN(3, 0);
1316 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1317 IEM_MC_ARG(uint16_t, u16Sel, 1);
1318 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1319
1320 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1321 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1322 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1323
1324 IEM_MC_END();
1325 return VINF_SUCCESS;
1326 }
1327
1328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1329 }
1330 }
1331 else
1332 {
1333 switch (pVCpu->iem.s.enmEffOpSize)
1334 {
1335 case IEMMODE_16BIT:
1336 {
1337 IEM_MC_BEGIN(3, 1);
1338 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1339 IEM_MC_ARG(uint16_t, u16Sel, 1);
1340 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1342
1343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1344 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1345
1346 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1347 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1348 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1349
1350 IEM_MC_END();
1351 return VINF_SUCCESS;
1352 }
1353
1354 case IEMMODE_32BIT:
1355 case IEMMODE_64BIT:
1356 {
1357 IEM_MC_BEGIN(3, 1);
1358 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1359 IEM_MC_ARG(uint16_t, u16Sel, 1);
1360 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1362
1363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1364 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1365/** @todo testcase: make sure it's a 16-bit read. */
1366
1367 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1368 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1369 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1370
1371 IEM_MC_END();
1372 return VINF_SUCCESS;
1373 }
1374
1375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1376 }
1377 }
1378}
1379
1380
1381
1382/** Opcode 0x0f 0x02. */
1383FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1384{
1385 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1386 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1387}
1388
1389
1390/** Opcode 0x0f 0x03. */
1391FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1392{
1393 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1394 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1395}
1396
1397
1398/** Opcode 0x0f 0x05. */
1399FNIEMOP_DEF(iemOp_syscall)
1400{
1401 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1403 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1404}
1405
1406
1407/** Opcode 0x0f 0x06. */
1408FNIEMOP_DEF(iemOp_clts)
1409{
1410 IEMOP_MNEMONIC(clts, "clts");
1411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1412 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1413}
1414
1415
1416/** Opcode 0x0f 0x07. */
1417FNIEMOP_DEF(iemOp_sysret)
1418{
1419 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1421 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1422}
1423
1424
1425/** Opcode 0x0f 0x08. */
1426FNIEMOP_STUB(iemOp_invd);
1427// IEMOP_HLP_MIN_486();
1428
1429
1430/** Opcode 0x0f 0x09. */
1431FNIEMOP_DEF(iemOp_wbinvd)
1432{
1433 IEMOP_MNEMONIC(wbinvd, "wbinvd");
1434 IEMOP_HLP_MIN_486();
1435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1436 IEM_MC_BEGIN(0, 0);
1437 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1438 IEM_MC_ADVANCE_RIP();
1439 IEM_MC_END();
1440 return VINF_SUCCESS; /* ignore for now */
1441}
1442
1443
1444/** Opcode 0x0f 0x0b. */
1445FNIEMOP_DEF(iemOp_ud2)
1446{
1447 IEMOP_MNEMONIC(ud2, "ud2");
1448 return IEMOP_RAISE_INVALID_OPCODE();
1449}
1450
1451/** Opcode 0x0f 0x0d. */
1452FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1453{
1454 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1455 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1456 {
1457 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1458 return IEMOP_RAISE_INVALID_OPCODE();
1459 }
1460
1461 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1462 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1463 {
1464 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1465 return IEMOP_RAISE_INVALID_OPCODE();
1466 }
1467
1468 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1469 {
1470 case 2: /* Aliased to /0 for the time being. */
1471 case 4: /* Aliased to /0 for the time being. */
1472 case 5: /* Aliased to /0 for the time being. */
1473 case 6: /* Aliased to /0 for the time being. */
1474 case 7: /* Aliased to /0 for the time being. */
1475 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1476 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1477 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1479 }
1480
1481 IEM_MC_BEGIN(0, 1);
1482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1485 /* Currently a NOP. */
1486 NOREF(GCPtrEffSrc);
1487 IEM_MC_ADVANCE_RIP();
1488 IEM_MC_END();
1489 return VINF_SUCCESS;
1490}
1491
1492
1493/** Opcode 0x0f 0x0e. */
1494FNIEMOP_STUB(iemOp_femms);
1495
1496
1497/** Opcode 0x0f 0x0f 0x0c. */
1498FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1499
1500/** Opcode 0x0f 0x0f 0x0d. */
1501FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1502
1503/** Opcode 0x0f 0x0f 0x1c. */
1504FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1505
1506/** Opcode 0x0f 0x0f 0x1d. */
1507FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1508
1509/** Opcode 0x0f 0x0f 0x8a. */
1510FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1511
1512/** Opcode 0x0f 0x0f 0x8e. */
1513FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1514
1515/** Opcode 0x0f 0x0f 0x90. */
1516FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1517
1518/** Opcode 0x0f 0x0f 0x94. */
1519FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1520
1521/** Opcode 0x0f 0x0f 0x96. */
1522FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1523
1524/** Opcode 0x0f 0x0f 0x97. */
1525FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1526
1527/** Opcode 0x0f 0x0f 0x9a. */
1528FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1529
1530/** Opcode 0x0f 0x0f 0x9e. */
1531FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1532
1533/** Opcode 0x0f 0x0f 0xa0. */
1534FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1535
1536/** Opcode 0x0f 0x0f 0xa4. */
1537FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1538
1539/** Opcode 0x0f 0x0f 0xa6. */
1540FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1541
1542/** Opcode 0x0f 0x0f 0xa7. */
1543FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1544
1545/** Opcode 0x0f 0x0f 0xaa. */
1546FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1547
1548/** Opcode 0x0f 0x0f 0xae. */
1549FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1550
1551/** Opcode 0x0f 0x0f 0xb0. */
1552FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1553
1554/** Opcode 0x0f 0x0f 0xb4. */
1555FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1556
1557/** Opcode 0x0f 0x0f 0xb6. */
1558FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1559
1560/** Opcode 0x0f 0x0f 0xb7. */
1561FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1562
1563/** Opcode 0x0f 0x0f 0xbb. */
1564FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1565
1566/** Opcode 0x0f 0x0f 0xbf. */
1567FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1568
1569
1570/** Opcode 0x0f 0x0f. */
1571FNIEMOP_DEF(iemOp_3Dnow)
1572{
1573 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1574 {
1575 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1576 return IEMOP_RAISE_INVALID_OPCODE();
1577 }
1578
1579 /* This is pretty sparse, use switch instead of table. */
1580 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1581 switch (b)
1582 {
1583 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1584 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1585 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1586 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1587 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1588 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1589 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1590 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1591 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1592 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1593 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1594 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1595 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1596 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1597 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1598 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1599 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1600 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1601 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1602 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1603 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1604 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1605 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1606 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1607 default:
1608 return IEMOP_RAISE_INVALID_OPCODE();
1609 }
1610}
1611
1612
1613/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1614FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1615/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1616FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1617/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1618FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1619/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1620FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1621
1622
1623/** Opcode 0x0f 0x11 - vmovups Wps, Vps */
1624FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1625{
1626 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1628 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1629 {
1630 /*
1631 * Register, register.
1632 */
1633 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1634 IEM_MC_BEGIN(0, 0);
1635 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1636 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1637 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1638 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1639 IEM_MC_ADVANCE_RIP();
1640 IEM_MC_END();
1641 }
1642 else
1643 {
1644 /*
1645 * Memory, register.
1646 */
1647 IEM_MC_BEGIN(0, 2);
1648 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1650
1651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1652 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1653 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1654 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1655
1656 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1657 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1658
1659 IEM_MC_ADVANCE_RIP();
1660 IEM_MC_END();
1661 }
1662 return VINF_SUCCESS;
1663}
1664
1665
1666/** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */
1667FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
1668
1669/** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */
1670FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
1671
1672/** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */
1673FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1674{
1675 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1676 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1677 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1678 {
1679 /*
1680 * Register, register.
1681 */
1682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1683 IEM_MC_BEGIN(0, 1);
1684 IEM_MC_LOCAL(uint64_t, uSrc);
1685
1686 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1687 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1688 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1689 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1690
1691 IEM_MC_ADVANCE_RIP();
1692 IEM_MC_END();
1693 }
1694 else
1695 {
1696 /*
1697 * Memory, register.
1698 */
1699 IEM_MC_BEGIN(0, 2);
1700 IEM_MC_LOCAL(uint64_t, uSrc);
1701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1702
1703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1705 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1706 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1707
1708 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1709 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1710
1711 IEM_MC_ADVANCE_RIP();
1712 IEM_MC_END();
1713 }
1714 return VINF_SUCCESS;
1715}
1716
1717
1718/** Opcode 0x0f 0x12. */
1719FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1720
1721/** Opcode 0x66 0x0f 0x12. */
1722FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1723
1724/** Opcode 0xf3 0x0f 0x12. */
1725FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1726
1727/** Opcode 0xf2 0x0f 0x12. */
1728FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1729
1730/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1731FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1732
1733/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1734FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1735{
1736 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1737 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1738 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1739 {
1740#if 0
1741 /*
1742 * Register, register.
1743 */
1744 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1745 IEM_MC_BEGIN(0, 1);
1746 IEM_MC_LOCAL(uint64_t, uSrc);
1747 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1748 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1749 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1750 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1751 IEM_MC_ADVANCE_RIP();
1752 IEM_MC_END();
1753#else
1754 return IEMOP_RAISE_INVALID_OPCODE();
1755#endif
1756 }
1757 else
1758 {
1759 /*
1760 * Memory, register.
1761 */
1762 IEM_MC_BEGIN(0, 2);
1763 IEM_MC_LOCAL(uint64_t, uSrc);
1764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1765
1766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1767 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1768 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1769 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1770
1771 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1772 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1773
1774 IEM_MC_ADVANCE_RIP();
1775 IEM_MC_END();
1776 }
1777 return VINF_SUCCESS;
1778}
1779
1780/* Opcode 0xf3 0x0f 0x13 - invalid */
1781/* Opcode 0xf2 0x0f 0x13 - invalid */
1782
1783/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1784FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1785/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1786FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1787/* Opcode 0xf3 0x0f 0x14 - invalid */
1788/* Opcode 0xf2 0x0f 0x14 - invalid */
1789/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1790FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1791/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1792FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1793/* Opcode 0xf3 0x0f 0x15 - invalid */
1794/* Opcode 0xf2 0x0f 0x15 - invalid */
1795/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1796FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1797/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1798FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1799/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1800FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1801/* Opcode 0xf2 0x0f 0x16 - invalid */
1802/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1803FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1804/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1805FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1806/* Opcode 0xf3 0x0f 0x17 - invalid */
1807/* Opcode 0xf2 0x0f 0x17 - invalid */
1808
1809
1810/** Opcode 0x0f 0x18. */
1811FNIEMOP_DEF(iemOp_prefetch_Grp16)
1812{
1813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1814 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1815 {
1816 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1817 {
1818 case 4: /* Aliased to /0 for the time being according to AMD. */
1819 case 5: /* Aliased to /0 for the time being according to AMD. */
1820 case 6: /* Aliased to /0 for the time being according to AMD. */
1821 case 7: /* Aliased to /0 for the time being according to AMD. */
1822 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1823 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1824 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1825 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1826 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1827 }
1828
1829 IEM_MC_BEGIN(0, 1);
1830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1833 /* Currently a NOP. */
1834 NOREF(GCPtrEffSrc);
1835 IEM_MC_ADVANCE_RIP();
1836 IEM_MC_END();
1837 return VINF_SUCCESS;
1838 }
1839
1840 return IEMOP_RAISE_INVALID_OPCODE();
1841}
1842
1843
1844/** Opcode 0x0f 0x19..0x1f. */
1845FNIEMOP_DEF(iemOp_nop_Ev)
1846{
1847 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1849 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1850 {
1851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1852 IEM_MC_BEGIN(0, 0);
1853 IEM_MC_ADVANCE_RIP();
1854 IEM_MC_END();
1855 }
1856 else
1857 {
1858 IEM_MC_BEGIN(0, 1);
1859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1862 /* Currently a NOP. */
1863 NOREF(GCPtrEffSrc);
1864 IEM_MC_ADVANCE_RIP();
1865 IEM_MC_END();
1866 }
1867 return VINF_SUCCESS;
1868}
1869
1870
1871/** Opcode 0x0f 0x20. */
1872FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1873{
1874 /* mod is ignored, as is operand size overrides. */
1875 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1876 IEMOP_HLP_MIN_386();
1877 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1878 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1879 else
1880 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1881
1882 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1883 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1884 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1885 {
1886 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1887 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1888 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1889 iCrReg |= 8;
1890 }
1891 switch (iCrReg)
1892 {
1893 case 0: case 2: case 3: case 4: case 8:
1894 break;
1895 default:
1896 return IEMOP_RAISE_INVALID_OPCODE();
1897 }
1898 IEMOP_HLP_DONE_DECODING();
1899
1900 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1901}
1902
1903
1904/** Opcode 0x0f 0x21. */
1905FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1906{
1907 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1908 IEMOP_HLP_MIN_386();
1909 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1911 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1912 return IEMOP_RAISE_INVALID_OPCODE();
1913 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1914 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1915 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1916}
1917
1918
1919/** Opcode 0x0f 0x22. */
1920FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1921{
1922 /* mod is ignored, as is operand size overrides. */
1923 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1924 IEMOP_HLP_MIN_386();
1925 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1926 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1927 else
1928 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1929
1930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1931 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1932 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1933 {
1934 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1935 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1936 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1937 iCrReg |= 8;
1938 }
1939 switch (iCrReg)
1940 {
1941 case 0: case 2: case 3: case 4: case 8:
1942 break;
1943 default:
1944 return IEMOP_RAISE_INVALID_OPCODE();
1945 }
1946 IEMOP_HLP_DONE_DECODING();
1947
1948 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1949}
1950
1951
1952/** Opcode 0x0f 0x23. */
1953FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1954{
1955 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1956 IEMOP_HLP_MIN_386();
1957 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1959 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1960 return IEMOP_RAISE_INVALID_OPCODE();
1961 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1962 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1963 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1964}
1965
1966
1967/** Opcode 0x0f 0x24. */
1968FNIEMOP_DEF(iemOp_mov_Rd_Td)
1969{
1970 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1971 /** @todo works on 386 and 486. */
1972 /* The RM byte is not considered, see testcase. */
1973 return IEMOP_RAISE_INVALID_OPCODE();
1974}
1975
1976
1977/** Opcode 0x0f 0x26. */
1978FNIEMOP_DEF(iemOp_mov_Td_Rd)
1979{
1980 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1981 /** @todo works on 386 and 486. */
1982 /* The RM byte is not considered, see testcase. */
1983 return IEMOP_RAISE_INVALID_OPCODE();
1984}
1985
1986
1987/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
1988FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1989{
1990 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1992 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1993 {
1994 /*
1995 * Register, register.
1996 */
1997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1998 IEM_MC_BEGIN(0, 0);
1999 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2000 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2001 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2002 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2003 IEM_MC_ADVANCE_RIP();
2004 IEM_MC_END();
2005 }
2006 else
2007 {
2008 /*
2009 * Register, memory.
2010 */
2011 IEM_MC_BEGIN(0, 2);
2012 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2014
2015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2017 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2018 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2019
2020 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2021 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2022
2023 IEM_MC_ADVANCE_RIP();
2024 IEM_MC_END();
2025 }
2026 return VINF_SUCCESS;
2027}
2028
2029/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
2030FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
2031{
2032 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
2033 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2034 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2035 {
2036 /*
2037 * Register, register.
2038 */
2039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2040 IEM_MC_BEGIN(0, 0);
2041 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2042 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2043 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2044 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2045 IEM_MC_ADVANCE_RIP();
2046 IEM_MC_END();
2047 }
2048 else
2049 {
2050 /*
2051 * Register, memory.
2052 */
2053 IEM_MC_BEGIN(0, 2);
2054 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2055 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2056
2057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2059 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2060 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2061
2062 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2063 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2064
2065 IEM_MC_ADVANCE_RIP();
2066 IEM_MC_END();
2067 }
2068 return VINF_SUCCESS;
2069}
2070
2071/* Opcode 0xf3 0x0f 0x28 - invalid */
2072/* Opcode 0xf2 0x0f 0x28 - invalid */
2073
2074/** Opcode 0x0f 0x29. */
2075FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
2076{
2077 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2078 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
2079 else
2080 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
2081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2082 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2083 {
2084 /*
2085 * Register, register.
2086 */
2087 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
2088 IEM_MC_BEGIN(0, 0);
2089 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2090 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2091 else
2092 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2093 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2094 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2095 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2096 IEM_MC_ADVANCE_RIP();
2097 IEM_MC_END();
2098 }
2099 else
2100 {
2101 /*
2102 * Memory, register.
2103 */
2104 IEM_MC_BEGIN(0, 2);
2105 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2107
2108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2109 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2110 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2111 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2112 else
2113 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2114 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2115
2116 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2117 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2118
2119 IEM_MC_ADVANCE_RIP();
2120 IEM_MC_END();
2121 }
2122 return VINF_SUCCESS;
2123}
2124
2125
2126/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2127FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2128/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2129FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2130/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2131FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
2132/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2133FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
2134
2135
2136/** Opcode 0x0f 0x2b. */
2137FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
2138{
2139 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2140 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2141 else
2142 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2144 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2145 {
2146 /*
2147 * memory, register.
2148 */
2149 IEM_MC_BEGIN(0, 2);
2150 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2151 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2152
2153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2154 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2155 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2156 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2157 else
2158 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2159 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2160
2161 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2162 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2163
2164 IEM_MC_ADVANCE_RIP();
2165 IEM_MC_END();
2166 }
2167 /* The register, register encoding is invalid. */
2168 else
2169 return IEMOP_RAISE_INVALID_OPCODE();
2170 return VINF_SUCCESS;
2171}
2172
2173
2174/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2175FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2176/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2177FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2178/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
2179FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2180/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
2181FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2182
2183/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2184FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2185/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2186FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2187/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
2188FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2189/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
2190FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2191
2192/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
2193FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
2194/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
2195FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
2196/* Opcode 0xf3 0x0f 0x2e - invalid */
2197/* Opcode 0xf2 0x0f 0x2e - invalid */
2198
2199/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
2200FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
2201/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
2202FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
2203/* Opcode 0xf3 0x0f 0x2f - invalid */
2204/* Opcode 0xf2 0x0f 0x2f - invalid */
2205
2206/** Opcode 0x0f 0x30. */
2207FNIEMOP_DEF(iemOp_wrmsr)
2208{
2209 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2211 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2212}
2213
2214
2215/** Opcode 0x0f 0x31. */
2216FNIEMOP_DEF(iemOp_rdtsc)
2217{
2218 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2220 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2221}
2222
2223
2224/** Opcode 0x0f 0x33. */
2225FNIEMOP_DEF(iemOp_rdmsr)
2226{
2227 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2229 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2230}
2231
2232
2233/** Opcode 0x0f 0x34. */
2234FNIEMOP_STUB(iemOp_rdpmc);
2235/** Opcode 0x0f 0x34. */
2236FNIEMOP_STUB(iemOp_sysenter);
2237/** Opcode 0x0f 0x35. */
2238FNIEMOP_STUB(iemOp_sysexit);
2239/** Opcode 0x0f 0x37. */
2240FNIEMOP_STUB(iemOp_getsec);
2241/** Opcode 0x0f 0x38. */
2242FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2243/** Opcode 0x0f 0x3a. */
2244FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2245
2246
2247/**
2248 * Implements a conditional move.
2249 *
2250 * Wish there was an obvious way to do this where we could share and reduce
2251 * code bloat.
2252 *
2253 * @param a_Cnd The conditional "microcode" operation.
2254 */
2255#define CMOV_X(a_Cnd) \
2256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2257 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2258 { \
2259 switch (pVCpu->iem.s.enmEffOpSize) \
2260 { \
2261 case IEMMODE_16BIT: \
2262 IEM_MC_BEGIN(0, 1); \
2263 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2264 a_Cnd { \
2265 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2266 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2267 } IEM_MC_ENDIF(); \
2268 IEM_MC_ADVANCE_RIP(); \
2269 IEM_MC_END(); \
2270 return VINF_SUCCESS; \
2271 \
2272 case IEMMODE_32BIT: \
2273 IEM_MC_BEGIN(0, 1); \
2274 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2275 a_Cnd { \
2276 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2277 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2278 } IEM_MC_ELSE() { \
2279 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2280 } IEM_MC_ENDIF(); \
2281 IEM_MC_ADVANCE_RIP(); \
2282 IEM_MC_END(); \
2283 return VINF_SUCCESS; \
2284 \
2285 case IEMMODE_64BIT: \
2286 IEM_MC_BEGIN(0, 1); \
2287 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2288 a_Cnd { \
2289 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2290 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2291 } IEM_MC_ENDIF(); \
2292 IEM_MC_ADVANCE_RIP(); \
2293 IEM_MC_END(); \
2294 return VINF_SUCCESS; \
2295 \
2296 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2297 } \
2298 } \
2299 else \
2300 { \
2301 switch (pVCpu->iem.s.enmEffOpSize) \
2302 { \
2303 case IEMMODE_16BIT: \
2304 IEM_MC_BEGIN(0, 2); \
2305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2306 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2308 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2309 a_Cnd { \
2310 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2311 } IEM_MC_ENDIF(); \
2312 IEM_MC_ADVANCE_RIP(); \
2313 IEM_MC_END(); \
2314 return VINF_SUCCESS; \
2315 \
2316 case IEMMODE_32BIT: \
2317 IEM_MC_BEGIN(0, 2); \
2318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2319 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2321 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2322 a_Cnd { \
2323 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2324 } IEM_MC_ELSE() { \
2325 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2326 } IEM_MC_ENDIF(); \
2327 IEM_MC_ADVANCE_RIP(); \
2328 IEM_MC_END(); \
2329 return VINF_SUCCESS; \
2330 \
2331 case IEMMODE_64BIT: \
2332 IEM_MC_BEGIN(0, 2); \
2333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2334 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2336 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2337 a_Cnd { \
2338 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2339 } IEM_MC_ENDIF(); \
2340 IEM_MC_ADVANCE_RIP(); \
2341 IEM_MC_END(); \
2342 return VINF_SUCCESS; \
2343 \
2344 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2345 } \
2346 } do {} while (0)
2347
2348
2349
2350/** Opcode 0x0f 0x40. */
2351FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2352{
2353 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2354 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2355}
2356
2357
2358/** Opcode 0x0f 0x41. */
2359FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2360{
2361 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2362 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2363}
2364
2365
2366/** Opcode 0x0f 0x42. */
2367FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2368{
2369 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2370 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2371}
2372
2373
2374/** Opcode 0x0f 0x43. */
2375FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2376{
2377 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2378 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2379}
2380
2381
2382/** Opcode 0x0f 0x44. */
2383FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2384{
2385 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2386 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2387}
2388
2389
2390/** Opcode 0x0f 0x45. */
2391FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2392{
2393 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2394 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2395}
2396
2397
2398/** Opcode 0x0f 0x46. */
2399FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2400{
2401 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2402 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2403}
2404
2405
2406/** Opcode 0x0f 0x47. */
2407FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2408{
2409 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2410 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2411}
2412
2413
2414/** Opcode 0x0f 0x48. */
2415FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2416{
2417 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2418 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2419}
2420
2421
2422/** Opcode 0x0f 0x49. */
2423FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2424{
2425 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2426 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2427}
2428
2429
2430/** Opcode 0x0f 0x4a. */
2431FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2432{
2433 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2434 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2435}
2436
2437
2438/** Opcode 0x0f 0x4b. */
2439FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2440{
2441 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2442 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2443}
2444
2445
2446/** Opcode 0x0f 0x4c. */
2447FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2448{
2449 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2450 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2451}
2452
2453
2454/** Opcode 0x0f 0x4d. */
2455FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2456{
2457 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2458 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2459}
2460
2461
2462/** Opcode 0x0f 0x4e. */
2463FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2464{
2465 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2466 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2467}
2468
2469
2470/** Opcode 0x0f 0x4f. */
2471FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2472{
2473 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2474 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2475}
2476
2477#undef CMOV_X
2478
2479/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
2480FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
2481/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
2482FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
2483/* Opcode 0xf3 0x0f 0x50 - invalid */
2484/* Opcode 0xf2 0x0f 0x50 - invalid */
2485
2486/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
2487FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2488/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
2489FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2490/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
2491FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2492/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2493FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2494
2495/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
2496FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2497/* Opcode 0x66 0x0f 0x52 - invalid */
2498/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
2499FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2500/* Opcode 0xf2 0x0f 0x52 - invalid */
2501
2502/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
2503FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2504/* Opcode 0x66 0x0f 0x53 - invalid */
2505/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
2506FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2507/* Opcode 0xf2 0x0f 0x53 - invalid */
2508
2509/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
2510FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
2511/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
2512FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
2513/* Opcode 0xf3 0x0f 0x54 - invalid */
2514/* Opcode 0xf2 0x0f 0x54 - invalid */
2515
2516/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
2517FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
2518/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
2519FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
2520/* Opcode 0xf3 0x0f 0x55 - invalid */
2521/* Opcode 0xf2 0x0f 0x55 - invalid */
2522
2523/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
2524FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
2525/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
2526FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
2527/* Opcode 0xf3 0x0f 0x56 - invalid */
2528/* Opcode 0xf2 0x0f 0x56 - invalid */
2529
2530/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
2531FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
2532/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
2533FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
2534/* Opcode 0xf3 0x0f 0x57 - invalid */
2535/* Opcode 0xf2 0x0f 0x57 - invalid */
2536
2537/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2538FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2539/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2540FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2541/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2542FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2543/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2544FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2545
2546/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2547FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2548/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2549FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2550/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2551FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2552/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2553FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2554
2555/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2556FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2557/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2558FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2559/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2560FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2561/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2562FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2563
2564/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2565FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2566/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2567FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2568/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2569FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2570/* Opcode 0xf2 0x0f 0x5b - invalid */
2571
2572/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2573FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2574/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2575FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2576/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2577FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2578/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2579FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2580
2581/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2582FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2583/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2584FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2585/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2586FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2587/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2588FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2589
2590/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2591FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2592/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2593FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2594/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2595FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2596/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2597FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2598
2599/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2600FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2601/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2602FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2603/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2604FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2605/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2606FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2607
2608
2609/**
2610 * Common worker for SSE2 and MMX instructions on the forms:
2611 * pxxxx xmm1, xmm2/mem128
2612 * pxxxx mm1, mm2/mem32
2613 *
2614 * The 2nd operand is the first half of a register, which in the memory case
2615 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2616 * memory accessed for MMX.
2617 *
2618 * Exceptions type 4.
2619 */
2620FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2621{
2622 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2623 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2624 {
2625 case IEM_OP_PRF_SIZE_OP: /* SSE */
2626 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2627 {
2628 /*
2629 * Register, register.
2630 */
2631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2632 IEM_MC_BEGIN(2, 0);
2633 IEM_MC_ARG(uint128_t *, pDst, 0);
2634 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2635 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2636 IEM_MC_PREPARE_SSE_USAGE();
2637 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2638 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2639 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2640 IEM_MC_ADVANCE_RIP();
2641 IEM_MC_END();
2642 }
2643 else
2644 {
2645 /*
2646 * Register, memory.
2647 */
2648 IEM_MC_BEGIN(2, 2);
2649 IEM_MC_ARG(uint128_t *, pDst, 0);
2650 IEM_MC_LOCAL(uint64_t, uSrc);
2651 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2653
2654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2656 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2657 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2658
2659 IEM_MC_PREPARE_SSE_USAGE();
2660 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2661 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2662
2663 IEM_MC_ADVANCE_RIP();
2664 IEM_MC_END();
2665 }
2666 return VINF_SUCCESS;
2667
2668 case 0: /* MMX */
2669 if (!pImpl->pfnU64)
2670 return IEMOP_RAISE_INVALID_OPCODE();
2671 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2672 {
2673 /*
2674 * Register, register.
2675 */
2676 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2677 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2679 IEM_MC_BEGIN(2, 0);
2680 IEM_MC_ARG(uint64_t *, pDst, 0);
2681 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2682 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2683 IEM_MC_PREPARE_FPU_USAGE();
2684 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2685 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2686 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2687 IEM_MC_ADVANCE_RIP();
2688 IEM_MC_END();
2689 }
2690 else
2691 {
2692 /*
2693 * Register, memory.
2694 */
2695 IEM_MC_BEGIN(2, 2);
2696 IEM_MC_ARG(uint64_t *, pDst, 0);
2697 IEM_MC_LOCAL(uint32_t, uSrc);
2698 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2700
2701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2703 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2704 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2705
2706 IEM_MC_PREPARE_FPU_USAGE();
2707 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2708 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2709
2710 IEM_MC_ADVANCE_RIP();
2711 IEM_MC_END();
2712 }
2713 return VINF_SUCCESS;
2714
2715 default:
2716 return IEMOP_RAISE_INVALID_OPCODE();
2717 }
2718}
2719
2720
2721/** Opcode 0x0f 0x60. */
2722FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2723{
2724 IEMOP_MNEMONIC(punpcklbw, "punpcklbw");
2725 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2726}
2727
2728
2729/** Opcode 0x0f 0x61. */
2730FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2731{
2732 IEMOP_MNEMONIC(punpcklwd, "punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2733 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2734}
2735
2736
2737/** Opcode 0x0f 0x62. */
2738FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2739{
2740 IEMOP_MNEMONIC(punpckldq, "punpckldq");
2741 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2742}
2743
2744
2745/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2746FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2747/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2748FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2749/* Opcode 0xf3 0x0f 0x63 - invalid */
2750
2751/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2752FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2753/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2754FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2755/* Opcode 0xf3 0x0f 0x64 - invalid */
2756
2757/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2758FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2759/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2760FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2761/* Opcode 0xf3 0x0f 0x65 - invalid */
2762
2763/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2764FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2765/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2766FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2767/* Opcode 0xf3 0x0f 0x66 - invalid */
2768
2769/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2770FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2771/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2772FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2773/* Opcode 0xf3 0x0f 0x67 - invalid */
2774
2775
2776/**
2777 * Common worker for SSE2 and MMX instructions on the forms:
2778 * pxxxx xmm1, xmm2/mem128
2779 * pxxxx mm1, mm2/mem64
2780 *
2781 * The 2nd operand is the second half of a register, which in the memory case
2782 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2783 * where it may read the full 128 bits or only the upper 64 bits.
2784 *
2785 * Exceptions type 4.
2786 */
2787FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2788{
2789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2790 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2791 {
2792 case IEM_OP_PRF_SIZE_OP: /* SSE */
2793 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2794 {
2795 /*
2796 * Register, register.
2797 */
2798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2799 IEM_MC_BEGIN(2, 0);
2800 IEM_MC_ARG(uint128_t *, pDst, 0);
2801 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2802 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2803 IEM_MC_PREPARE_SSE_USAGE();
2804 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2805 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2806 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2807 IEM_MC_ADVANCE_RIP();
2808 IEM_MC_END();
2809 }
2810 else
2811 {
2812 /*
2813 * Register, memory.
2814 */
2815 IEM_MC_BEGIN(2, 2);
2816 IEM_MC_ARG(uint128_t *, pDst, 0);
2817 IEM_MC_LOCAL(uint128_t, uSrc);
2818 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2820
2821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2823 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2824 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2825
2826 IEM_MC_PREPARE_SSE_USAGE();
2827 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2828 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2829
2830 IEM_MC_ADVANCE_RIP();
2831 IEM_MC_END();
2832 }
2833 return VINF_SUCCESS;
2834
2835 case 0: /* MMX */
2836 if (!pImpl->pfnU64)
2837 return IEMOP_RAISE_INVALID_OPCODE();
2838 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2839 {
2840 /*
2841 * Register, register.
2842 */
2843 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2844 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2846 IEM_MC_BEGIN(2, 0);
2847 IEM_MC_ARG(uint64_t *, pDst, 0);
2848 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2849 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2850 IEM_MC_PREPARE_FPU_USAGE();
2851 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2852 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2853 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2854 IEM_MC_ADVANCE_RIP();
2855 IEM_MC_END();
2856 }
2857 else
2858 {
2859 /*
2860 * Register, memory.
2861 */
2862 IEM_MC_BEGIN(2, 2);
2863 IEM_MC_ARG(uint64_t *, pDst, 0);
2864 IEM_MC_LOCAL(uint64_t, uSrc);
2865 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2867
2868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2870 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2871 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2872
2873 IEM_MC_PREPARE_FPU_USAGE();
2874 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2875 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2876
2877 IEM_MC_ADVANCE_RIP();
2878 IEM_MC_END();
2879 }
2880 return VINF_SUCCESS;
2881
2882 default:
2883 return IEMOP_RAISE_INVALID_OPCODE();
2884 }
2885}
2886
2887
2888/** Opcode 0x0f 0x68. */
2889FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2890{
2891 IEMOP_MNEMONIC(punpckhbw, "punpckhbw");
2892 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2893}
2894
2895
2896/** Opcode 0x0f 0x69. */
2897FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2898{
2899 IEMOP_MNEMONIC(punpckhwd, "punpckhwd");
2900 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2901}
2902
2903
2904/** Opcode 0x0f 0x6a. */
2905FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2906{
2907 IEMOP_MNEMONIC(punpckhdq, "punpckhdq");
2908 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2909}
2910
2911/** Opcode 0x0f 0x6b. */
2912FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2913
2914
2915/** Opcode 0x0f 0x6c. */
2916FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2917{
2918 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq");
2919 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2920}
2921
2922
2923/** Opcode 0x0f 0x6d. */
2924FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2925{
2926 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2927 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2928}
2929
2930
2931/** Opcode 0x0f 0x6e. */
2932FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2933{
2934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2935 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2936 {
2937 case IEM_OP_PRF_SIZE_OP: /* SSE */
2938 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2939 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
2940 else
2941 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
2942 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2943 {
2944 /* XMM, greg*/
2945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2946 IEM_MC_BEGIN(0, 1);
2947 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2948 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2949 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2950 {
2951 IEM_MC_LOCAL(uint64_t, u64Tmp);
2952 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2953 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2954 }
2955 else
2956 {
2957 IEM_MC_LOCAL(uint32_t, u32Tmp);
2958 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2959 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2960 }
2961 IEM_MC_ADVANCE_RIP();
2962 IEM_MC_END();
2963 }
2964 else
2965 {
2966 /* XMM, [mem] */
2967 IEM_MC_BEGIN(0, 2);
2968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2969 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2972 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2973 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2974 {
2975 IEM_MC_LOCAL(uint64_t, u64Tmp);
2976 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2977 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2978 }
2979 else
2980 {
2981 IEM_MC_LOCAL(uint32_t, u32Tmp);
2982 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2983 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2984 }
2985 IEM_MC_ADVANCE_RIP();
2986 IEM_MC_END();
2987 }
2988 return VINF_SUCCESS;
2989
2990 case 0: /* MMX */
2991 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2992 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2993 else
2994 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2995 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2996 {
2997 /* MMX, greg */
2998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2999 IEM_MC_BEGIN(0, 1);
3000 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3001 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3002 IEM_MC_LOCAL(uint64_t, u64Tmp);
3003 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3004 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3005 else
3006 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3007 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3008 IEM_MC_ADVANCE_RIP();
3009 IEM_MC_END();
3010 }
3011 else
3012 {
3013 /* MMX, [mem] */
3014 IEM_MC_BEGIN(0, 2);
3015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3016 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3019 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3020 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3021 {
3022 IEM_MC_LOCAL(uint64_t, u64Tmp);
3023 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3024 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3025 }
3026 else
3027 {
3028 IEM_MC_LOCAL(uint32_t, u32Tmp);
3029 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3030 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3031 }
3032 IEM_MC_ADVANCE_RIP();
3033 IEM_MC_END();
3034 }
3035 return VINF_SUCCESS;
3036
3037 default:
3038 return IEMOP_RAISE_INVALID_OPCODE();
3039 }
3040}
3041
3042
3043/** Opcode 0x0f 0x6f. */
3044FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
3045{
3046 bool fAligned = false;
3047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3048 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3049 {
3050 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3051 fAligned = true;
3052 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3053 if (fAligned)
3054 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3055 else
3056 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3058 {
3059 /*
3060 * Register, register.
3061 */
3062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3063 IEM_MC_BEGIN(0, 0);
3064 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3065 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3066 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3067 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3068 IEM_MC_ADVANCE_RIP();
3069 IEM_MC_END();
3070 }
3071 else
3072 {
3073 /*
3074 * Register, memory.
3075 */
3076 IEM_MC_BEGIN(0, 2);
3077 IEM_MC_LOCAL(uint128_t, u128Tmp);
3078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3079
3080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3082 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3083 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3084 if (fAligned)
3085 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3086 else
3087 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3088 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3089
3090 IEM_MC_ADVANCE_RIP();
3091 IEM_MC_END();
3092 }
3093 return VINF_SUCCESS;
3094
3095 case 0: /* MMX */
3096 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3097 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3098 {
3099 /*
3100 * Register, register.
3101 */
3102 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3103 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3105 IEM_MC_BEGIN(0, 1);
3106 IEM_MC_LOCAL(uint64_t, u64Tmp);
3107 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3108 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3109 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3110 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3111 IEM_MC_ADVANCE_RIP();
3112 IEM_MC_END();
3113 }
3114 else
3115 {
3116 /*
3117 * Register, memory.
3118 */
3119 IEM_MC_BEGIN(0, 2);
3120 IEM_MC_LOCAL(uint64_t, u64Tmp);
3121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3122
3123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3125 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3126 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3127 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3128 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3129
3130 IEM_MC_ADVANCE_RIP();
3131 IEM_MC_END();
3132 }
3133 return VINF_SUCCESS;
3134
3135 default:
3136 return IEMOP_RAISE_INVALID_OPCODE();
3137 }
3138}
3139
3140
3141/** Opcode 0x0f 0x70. The immediate here is evil! */
3142FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
3143{
3144 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3145 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3146 {
3147 case IEM_OP_PRF_SIZE_OP: /* SSE */
3148 case IEM_OP_PRF_REPNZ: /* SSE */
3149 case IEM_OP_PRF_REPZ: /* SSE */
3150 {
3151 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
3152 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3153 {
3154 case IEM_OP_PRF_SIZE_OP:
3155 IEMOP_MNEMONIC(pshufd_Vdq_Wdq, "pshufd Vdq,Wdq,Ib");
3156 pfnAImpl = iemAImpl_pshufd;
3157 break;
3158 case IEM_OP_PRF_REPNZ:
3159 IEMOP_MNEMONIC(pshuflw_Vdq_Wdq, "pshuflw Vdq,Wdq,Ib");
3160 pfnAImpl = iemAImpl_pshuflw;
3161 break;
3162 case IEM_OP_PRF_REPZ:
3163 IEMOP_MNEMONIC(pshufhw_Vdq_Wdq, "pshufhw Vdq,Wdq,Ib");
3164 pfnAImpl = iemAImpl_pshufhw;
3165 break;
3166 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3167 }
3168 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3169 {
3170 /*
3171 * Register, register.
3172 */
3173 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3175
3176 IEM_MC_BEGIN(3, 0);
3177 IEM_MC_ARG(uint128_t *, pDst, 0);
3178 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3179 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3180 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3181 IEM_MC_PREPARE_SSE_USAGE();
3182 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3183 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3184 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
3185 IEM_MC_ADVANCE_RIP();
3186 IEM_MC_END();
3187 }
3188 else
3189 {
3190 /*
3191 * Register, memory.
3192 */
3193 IEM_MC_BEGIN(3, 2);
3194 IEM_MC_ARG(uint128_t *, pDst, 0);
3195 IEM_MC_LOCAL(uint128_t, uSrc);
3196 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3198
3199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3200 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3201 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3203 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3204
3205 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3206 IEM_MC_PREPARE_SSE_USAGE();
3207 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3208 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
3209
3210 IEM_MC_ADVANCE_RIP();
3211 IEM_MC_END();
3212 }
3213 return VINF_SUCCESS;
3214 }
3215
3216 case 0: /* MMX Extension */
3217 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3218 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3219 {
3220 /*
3221 * Register, register.
3222 */
3223 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3225
3226 IEM_MC_BEGIN(3, 0);
3227 IEM_MC_ARG(uint64_t *, pDst, 0);
3228 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3229 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3230 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3231 IEM_MC_PREPARE_FPU_USAGE();
3232 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3233 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3234 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3235 IEM_MC_ADVANCE_RIP();
3236 IEM_MC_END();
3237 }
3238 else
3239 {
3240 /*
3241 * Register, memory.
3242 */
3243 IEM_MC_BEGIN(3, 2);
3244 IEM_MC_ARG(uint64_t *, pDst, 0);
3245 IEM_MC_LOCAL(uint64_t, uSrc);
3246 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3248
3249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3250 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3251 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3253 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3254
3255 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3256 IEM_MC_PREPARE_FPU_USAGE();
3257 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3258 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3259
3260 IEM_MC_ADVANCE_RIP();
3261 IEM_MC_END();
3262 }
3263 return VINF_SUCCESS;
3264
3265 default:
3266 return IEMOP_RAISE_INVALID_OPCODE();
3267 }
3268}
3269
3270
3271/** Opcode 0x0f 0x71 11/2. */
3272FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3273
3274/** Opcode 0x66 0x0f 0x71 11/2. */
3275FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
3276
3277/** Opcode 0x0f 0x71 11/4. */
3278FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3279
3280/** Opcode 0x66 0x0f 0x71 11/4. */
3281FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
3282
3283/** Opcode 0x0f 0x71 11/6. */
3284FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3285
3286/** Opcode 0x66 0x0f 0x71 11/6. */
3287FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
3288
3289
3290/** Opcode 0x0f 0x71. */
3291FNIEMOP_DEF(iemOp_Grp12)
3292{
3293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3294 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3295 return IEMOP_RAISE_INVALID_OPCODE();
3296 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3297 {
3298 case 0: case 1: case 3: case 5: case 7:
3299 return IEMOP_RAISE_INVALID_OPCODE();
3300 case 2:
3301 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3302 {
3303 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
3304 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
3305 default: return IEMOP_RAISE_INVALID_OPCODE();
3306 }
3307 case 4:
3308 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3309 {
3310 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
3311 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
3312 default: return IEMOP_RAISE_INVALID_OPCODE();
3313 }
3314 case 6:
3315 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3316 {
3317 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
3318 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
3319 default: return IEMOP_RAISE_INVALID_OPCODE();
3320 }
3321 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3322 }
3323}
3324
3325
3326/** Opcode 0x0f 0x72 11/2. */
3327FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3328
3329/** Opcode 0x66 0x0f 0x72 11/2. */
3330FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
3331
3332/** Opcode 0x0f 0x72 11/4. */
3333FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3334
3335/** Opcode 0x66 0x0f 0x72 11/4. */
3336FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
3337
3338/** Opcode 0x0f 0x72 11/6. */
3339FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3340
3341/** Opcode 0x66 0x0f 0x72 11/6. */
3342FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
3343
3344
3345/** Opcode 0x0f 0x72. */
3346FNIEMOP_DEF(iemOp_Grp13)
3347{
3348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3349 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3350 return IEMOP_RAISE_INVALID_OPCODE();
3351 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3352 {
3353 case 0: case 1: case 3: case 5: case 7:
3354 return IEMOP_RAISE_INVALID_OPCODE();
3355 case 2:
3356 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3357 {
3358 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
3359 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
3360 default: return IEMOP_RAISE_INVALID_OPCODE();
3361 }
3362 case 4:
3363 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3364 {
3365 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
3366 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3367 default: return IEMOP_RAISE_INVALID_OPCODE();
3368 }
3369 case 6:
3370 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3371 {
3372 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3373 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3374 default: return IEMOP_RAISE_INVALID_OPCODE();
3375 }
3376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3377 }
3378}
3379
3380
3381/** Opcode 0x0f 0x73 11/2. */
3382FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3383
3384/** Opcode 0x66 0x0f 0x73 11/2. */
3385FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3386
3387/** Opcode 0x66 0x0f 0x73 11/3. */
3388FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3389
3390/** Opcode 0x0f 0x73 11/6. */
3391FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3392
3393/** Opcode 0x66 0x0f 0x73 11/6. */
3394FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3395
3396/** Opcode 0x66 0x0f 0x73 11/7. */
3397FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3398
3399
3400/** Opcode 0x0f 0x73. */
3401FNIEMOP_DEF(iemOp_Grp14)
3402{
3403 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3404 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3405 return IEMOP_RAISE_INVALID_OPCODE();
3406 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3407 {
3408 case 0: case 1: case 4: case 5:
3409 return IEMOP_RAISE_INVALID_OPCODE();
3410 case 2:
3411 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3412 {
3413 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3414 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3415 default: return IEMOP_RAISE_INVALID_OPCODE();
3416 }
3417 case 3:
3418 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3419 {
3420 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3421 default: return IEMOP_RAISE_INVALID_OPCODE();
3422 }
3423 case 6:
3424 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3425 {
3426 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3427 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3428 default: return IEMOP_RAISE_INVALID_OPCODE();
3429 }
3430 case 7:
3431 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3432 {
3433 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3434 default: return IEMOP_RAISE_INVALID_OPCODE();
3435 }
3436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3437 }
3438}
3439
3440
3441/**
3442 * Common worker for SSE2 and MMX instructions on the forms:
3443 * pxxx mm1, mm2/mem64
3444 * pxxx xmm1, xmm2/mem128
3445 *
3446 * Proper alignment of the 128-bit operand is enforced.
3447 * Exceptions type 4. SSE2 and MMX cpuid checks.
3448 */
3449FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3450{
3451 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3452 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3453 {
3454 case IEM_OP_PRF_SIZE_OP: /* SSE */
3455 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3456 {
3457 /*
3458 * Register, register.
3459 */
3460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3461 IEM_MC_BEGIN(2, 0);
3462 IEM_MC_ARG(uint128_t *, pDst, 0);
3463 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3464 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3465 IEM_MC_PREPARE_SSE_USAGE();
3466 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3467 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3468 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3469 IEM_MC_ADVANCE_RIP();
3470 IEM_MC_END();
3471 }
3472 else
3473 {
3474 /*
3475 * Register, memory.
3476 */
3477 IEM_MC_BEGIN(2, 2);
3478 IEM_MC_ARG(uint128_t *, pDst, 0);
3479 IEM_MC_LOCAL(uint128_t, uSrc);
3480 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3482
3483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3485 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3486 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3487
3488 IEM_MC_PREPARE_SSE_USAGE();
3489 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3490 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3491
3492 IEM_MC_ADVANCE_RIP();
3493 IEM_MC_END();
3494 }
3495 return VINF_SUCCESS;
3496
3497 case 0: /* MMX */
3498 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3499 {
3500 /*
3501 * Register, register.
3502 */
3503 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3504 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3506 IEM_MC_BEGIN(2, 0);
3507 IEM_MC_ARG(uint64_t *, pDst, 0);
3508 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3509 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3510 IEM_MC_PREPARE_FPU_USAGE();
3511 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3512 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3513 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3514 IEM_MC_ADVANCE_RIP();
3515 IEM_MC_END();
3516 }
3517 else
3518 {
3519 /*
3520 * Register, memory.
3521 */
3522 IEM_MC_BEGIN(2, 2);
3523 IEM_MC_ARG(uint64_t *, pDst, 0);
3524 IEM_MC_LOCAL(uint64_t, uSrc);
3525 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3527
3528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3530 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3531 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3532
3533 IEM_MC_PREPARE_FPU_USAGE();
3534 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3535 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3536
3537 IEM_MC_ADVANCE_RIP();
3538 IEM_MC_END();
3539 }
3540 return VINF_SUCCESS;
3541
3542 default:
3543 return IEMOP_RAISE_INVALID_OPCODE();
3544 }
3545}
3546
3547
3548/** Opcode 0x0f 0x74. */
3549FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3550{
3551 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3552 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3553}
3554
3555
3556/** Opcode 0x0f 0x75. */
3557FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3558{
3559 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3560 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3561}
3562
3563
3564/** Opcode 0x0f 0x76. */
3565FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3566{
3567 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3568 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3569}
3570
3571
3572/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3573FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3574/* Opcode 0x66 0x0f 0x77 - invalid */
3575/* Opcode 0xf3 0x0f 0x77 - invalid */
3576/* Opcode 0xf2 0x0f 0x77 - invalid */
3577
3578/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3579FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3580/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3581FNIEMOP_STUB(iemOp_AmdGrp17);
3582/* Opcode 0xf3 0x0f 0x78 - invalid */
3583/* Opcode 0xf2 0x0f 0x78 - invalid */
3584
3585/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3586FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3587/* Opcode 0x66 0x0f 0x79 - invalid */
3588/* Opcode 0xf3 0x0f 0x79 - invalid */
3589/* Opcode 0xf2 0x0f 0x79 - invalid */
3590
3591/* Opcode 0x0f 0x7a - invalid */
3592/* Opcode 0x66 0x0f 0x7a - invalid */
3593/* Opcode 0xf3 0x0f 0x7a - invalid */
3594/* Opcode 0xf2 0x0f 0x7a - invalid */
3595
3596/* Opcode 0x0f 0x7b - invalid */
3597/* Opcode 0x66 0x0f 0x7b - invalid */
3598/* Opcode 0xf3 0x0f 0x7b - invalid */
3599/* Opcode 0xf2 0x0f 0x7b - invalid */
3600
3601/* Opcode 0x0f 0x7c - invalid */
3602/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3603FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3604/* Opcode 0xf3 0x0f 0x7c - invalid */
3605/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3606FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3607
3608/* Opcode 0x0f 0x7d - invalid */
3609/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3610FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3611/* Opcode 0xf3 0x0f 0x7d - invalid */
3612/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3613FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3614
3615
3616/** Opcode 0x0f 0x7e. */
3617FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3618{
3619 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3620 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3621 {
3622 case IEM_OP_PRF_SIZE_OP: /* SSE */
3623 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3624 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3625 else
3626 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3627 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3628 {
3629 /* greg, XMM */
3630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3631 IEM_MC_BEGIN(0, 1);
3632 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3633 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3634 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3635 {
3636 IEM_MC_LOCAL(uint64_t, u64Tmp);
3637 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3638 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3639 }
3640 else
3641 {
3642 IEM_MC_LOCAL(uint32_t, u32Tmp);
3643 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3644 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3645 }
3646 IEM_MC_ADVANCE_RIP();
3647 IEM_MC_END();
3648 }
3649 else
3650 {
3651 /* [mem], XMM */
3652 IEM_MC_BEGIN(0, 2);
3653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3654 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3657 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3658 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3659 {
3660 IEM_MC_LOCAL(uint64_t, u64Tmp);
3661 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3662 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3663 }
3664 else
3665 {
3666 IEM_MC_LOCAL(uint32_t, u32Tmp);
3667 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3668 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3669 }
3670 IEM_MC_ADVANCE_RIP();
3671 IEM_MC_END();
3672 }
3673 return VINF_SUCCESS;
3674
3675 case 0: /* MMX */
3676 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3677 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3678 else
3679 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3680 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3681 {
3682 /* greg, MMX */
3683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3684 IEM_MC_BEGIN(0, 1);
3685 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3686 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3687 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3688 {
3689 IEM_MC_LOCAL(uint64_t, u64Tmp);
3690 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3691 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3692 }
3693 else
3694 {
3695 IEM_MC_LOCAL(uint32_t, u32Tmp);
3696 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3697 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3698 }
3699 IEM_MC_ADVANCE_RIP();
3700 IEM_MC_END();
3701 }
3702 else
3703 {
3704 /* [mem], MMX */
3705 IEM_MC_BEGIN(0, 2);
3706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3707 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3710 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3711 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3712 {
3713 IEM_MC_LOCAL(uint64_t, u64Tmp);
3714 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3715 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3716 }
3717 else
3718 {
3719 IEM_MC_LOCAL(uint32_t, u32Tmp);
3720 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3721 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3722 }
3723 IEM_MC_ADVANCE_RIP();
3724 IEM_MC_END();
3725 }
3726 return VINF_SUCCESS;
3727
3728 default:
3729 return IEMOP_RAISE_INVALID_OPCODE();
3730 }
3731}
3732
3733
3734/** Opcode 0x0f 0x7f. */
3735FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3736{
3737 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3738 bool fAligned = false;
3739 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3740 {
3741 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3742 fAligned = true;
3743 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3744 if (fAligned)
3745 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wdq,Vdq");
3746 else
3747 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wdq,Vdq");
3748 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3749 {
3750 /*
3751 * Register, register.
3752 */
3753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3754 IEM_MC_BEGIN(0, 0);
3755 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3756 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3757 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3758 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3759 IEM_MC_ADVANCE_RIP();
3760 IEM_MC_END();
3761 }
3762 else
3763 {
3764 /*
3765 * Register, memory.
3766 */
3767 IEM_MC_BEGIN(0, 2);
3768 IEM_MC_LOCAL(uint128_t, u128Tmp);
3769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3770
3771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3773 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3774 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3775
3776 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3777 if (fAligned)
3778 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3779 else
3780 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3781
3782 IEM_MC_ADVANCE_RIP();
3783 IEM_MC_END();
3784 }
3785 return VINF_SUCCESS;
3786
3787 case 0: /* MMX */
3788 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3789
3790 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3791 {
3792 /*
3793 * Register, register.
3794 */
3795 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3796 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3798 IEM_MC_BEGIN(0, 1);
3799 IEM_MC_LOCAL(uint64_t, u64Tmp);
3800 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3801 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3802 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3803 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3804 IEM_MC_ADVANCE_RIP();
3805 IEM_MC_END();
3806 }
3807 else
3808 {
3809 /*
3810 * Register, memory.
3811 */
3812 IEM_MC_BEGIN(0, 2);
3813 IEM_MC_LOCAL(uint64_t, u64Tmp);
3814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3815
3816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3818 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3819 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3820
3821 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3822 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3823
3824 IEM_MC_ADVANCE_RIP();
3825 IEM_MC_END();
3826 }
3827 return VINF_SUCCESS;
3828
3829 default:
3830 return IEMOP_RAISE_INVALID_OPCODE();
3831 }
3832}
3833
3834
3835
3836/** Opcode 0x0f 0x80. */
3837FNIEMOP_DEF(iemOp_jo_Jv)
3838{
3839 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3840 IEMOP_HLP_MIN_386();
3841 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3842 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3843 {
3844 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3846
3847 IEM_MC_BEGIN(0, 0);
3848 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3849 IEM_MC_REL_JMP_S16(i16Imm);
3850 } IEM_MC_ELSE() {
3851 IEM_MC_ADVANCE_RIP();
3852 } IEM_MC_ENDIF();
3853 IEM_MC_END();
3854 }
3855 else
3856 {
3857 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3859
3860 IEM_MC_BEGIN(0, 0);
3861 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3862 IEM_MC_REL_JMP_S32(i32Imm);
3863 } IEM_MC_ELSE() {
3864 IEM_MC_ADVANCE_RIP();
3865 } IEM_MC_ENDIF();
3866 IEM_MC_END();
3867 }
3868 return VINF_SUCCESS;
3869}
3870
3871
3872/** Opcode 0x0f 0x81. */
3873FNIEMOP_DEF(iemOp_jno_Jv)
3874{
3875 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3876 IEMOP_HLP_MIN_386();
3877 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3878 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3879 {
3880 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3882
3883 IEM_MC_BEGIN(0, 0);
3884 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3885 IEM_MC_ADVANCE_RIP();
3886 } IEM_MC_ELSE() {
3887 IEM_MC_REL_JMP_S16(i16Imm);
3888 } IEM_MC_ENDIF();
3889 IEM_MC_END();
3890 }
3891 else
3892 {
3893 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3895
3896 IEM_MC_BEGIN(0, 0);
3897 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3898 IEM_MC_ADVANCE_RIP();
3899 } IEM_MC_ELSE() {
3900 IEM_MC_REL_JMP_S32(i32Imm);
3901 } IEM_MC_ENDIF();
3902 IEM_MC_END();
3903 }
3904 return VINF_SUCCESS;
3905}
3906
3907
3908/** Opcode 0x0f 0x82. */
3909FNIEMOP_DEF(iemOp_jc_Jv)
3910{
3911 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3912 IEMOP_HLP_MIN_386();
3913 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3914 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3915 {
3916 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3918
3919 IEM_MC_BEGIN(0, 0);
3920 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3921 IEM_MC_REL_JMP_S16(i16Imm);
3922 } IEM_MC_ELSE() {
3923 IEM_MC_ADVANCE_RIP();
3924 } IEM_MC_ENDIF();
3925 IEM_MC_END();
3926 }
3927 else
3928 {
3929 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3931
3932 IEM_MC_BEGIN(0, 0);
3933 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3934 IEM_MC_REL_JMP_S32(i32Imm);
3935 } IEM_MC_ELSE() {
3936 IEM_MC_ADVANCE_RIP();
3937 } IEM_MC_ENDIF();
3938 IEM_MC_END();
3939 }
3940 return VINF_SUCCESS;
3941}
3942
3943
3944/** Opcode 0x0f 0x83. */
3945FNIEMOP_DEF(iemOp_jnc_Jv)
3946{
3947 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3948 IEMOP_HLP_MIN_386();
3949 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3950 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3951 {
3952 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3954
3955 IEM_MC_BEGIN(0, 0);
3956 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3957 IEM_MC_ADVANCE_RIP();
3958 } IEM_MC_ELSE() {
3959 IEM_MC_REL_JMP_S16(i16Imm);
3960 } IEM_MC_ENDIF();
3961 IEM_MC_END();
3962 }
3963 else
3964 {
3965 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3967
3968 IEM_MC_BEGIN(0, 0);
3969 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3970 IEM_MC_ADVANCE_RIP();
3971 } IEM_MC_ELSE() {
3972 IEM_MC_REL_JMP_S32(i32Imm);
3973 } IEM_MC_ENDIF();
3974 IEM_MC_END();
3975 }
3976 return VINF_SUCCESS;
3977}
3978
3979
3980/** Opcode 0x0f 0x84. */
3981FNIEMOP_DEF(iemOp_je_Jv)
3982{
3983 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
3984 IEMOP_HLP_MIN_386();
3985 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3986 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3987 {
3988 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3990
3991 IEM_MC_BEGIN(0, 0);
3992 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3993 IEM_MC_REL_JMP_S16(i16Imm);
3994 } IEM_MC_ELSE() {
3995 IEM_MC_ADVANCE_RIP();
3996 } IEM_MC_ENDIF();
3997 IEM_MC_END();
3998 }
3999 else
4000 {
4001 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4003
4004 IEM_MC_BEGIN(0, 0);
4005 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4006 IEM_MC_REL_JMP_S32(i32Imm);
4007 } IEM_MC_ELSE() {
4008 IEM_MC_ADVANCE_RIP();
4009 } IEM_MC_ENDIF();
4010 IEM_MC_END();
4011 }
4012 return VINF_SUCCESS;
4013}
4014
4015
4016/** Opcode 0x0f 0x85. */
4017FNIEMOP_DEF(iemOp_jne_Jv)
4018{
4019 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4020 IEMOP_HLP_MIN_386();
4021 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4022 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4023 {
4024 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4026
4027 IEM_MC_BEGIN(0, 0);
4028 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4029 IEM_MC_ADVANCE_RIP();
4030 } IEM_MC_ELSE() {
4031 IEM_MC_REL_JMP_S16(i16Imm);
4032 } IEM_MC_ENDIF();
4033 IEM_MC_END();
4034 }
4035 else
4036 {
4037 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4039
4040 IEM_MC_BEGIN(0, 0);
4041 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4042 IEM_MC_ADVANCE_RIP();
4043 } IEM_MC_ELSE() {
4044 IEM_MC_REL_JMP_S32(i32Imm);
4045 } IEM_MC_ENDIF();
4046 IEM_MC_END();
4047 }
4048 return VINF_SUCCESS;
4049}
4050
4051
4052/** Opcode 0x0f 0x86. */
4053FNIEMOP_DEF(iemOp_jbe_Jv)
4054{
4055 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4056 IEMOP_HLP_MIN_386();
4057 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4058 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4059 {
4060 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4062
4063 IEM_MC_BEGIN(0, 0);
4064 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4065 IEM_MC_REL_JMP_S16(i16Imm);
4066 } IEM_MC_ELSE() {
4067 IEM_MC_ADVANCE_RIP();
4068 } IEM_MC_ENDIF();
4069 IEM_MC_END();
4070 }
4071 else
4072 {
4073 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4075
4076 IEM_MC_BEGIN(0, 0);
4077 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4078 IEM_MC_REL_JMP_S32(i32Imm);
4079 } IEM_MC_ELSE() {
4080 IEM_MC_ADVANCE_RIP();
4081 } IEM_MC_ENDIF();
4082 IEM_MC_END();
4083 }
4084 return VINF_SUCCESS;
4085}
4086
4087
4088/** Opcode 0x0f 0x87. */
4089FNIEMOP_DEF(iemOp_jnbe_Jv)
4090{
4091 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4092 IEMOP_HLP_MIN_386();
4093 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4094 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4095 {
4096 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4098
4099 IEM_MC_BEGIN(0, 0);
4100 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4101 IEM_MC_ADVANCE_RIP();
4102 } IEM_MC_ELSE() {
4103 IEM_MC_REL_JMP_S16(i16Imm);
4104 } IEM_MC_ENDIF();
4105 IEM_MC_END();
4106 }
4107 else
4108 {
4109 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4111
4112 IEM_MC_BEGIN(0, 0);
4113 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4114 IEM_MC_ADVANCE_RIP();
4115 } IEM_MC_ELSE() {
4116 IEM_MC_REL_JMP_S32(i32Imm);
4117 } IEM_MC_ENDIF();
4118 IEM_MC_END();
4119 }
4120 return VINF_SUCCESS;
4121}
4122
4123
4124/** Opcode 0x0f 0x88. */
4125FNIEMOP_DEF(iemOp_js_Jv)
4126{
4127 IEMOP_MNEMONIC(js_Jv, "js Jv");
4128 IEMOP_HLP_MIN_386();
4129 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4130 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4131 {
4132 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4134
4135 IEM_MC_BEGIN(0, 0);
4136 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4137 IEM_MC_REL_JMP_S16(i16Imm);
4138 } IEM_MC_ELSE() {
4139 IEM_MC_ADVANCE_RIP();
4140 } IEM_MC_ENDIF();
4141 IEM_MC_END();
4142 }
4143 else
4144 {
4145 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4147
4148 IEM_MC_BEGIN(0, 0);
4149 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4150 IEM_MC_REL_JMP_S32(i32Imm);
4151 } IEM_MC_ELSE() {
4152 IEM_MC_ADVANCE_RIP();
4153 } IEM_MC_ENDIF();
4154 IEM_MC_END();
4155 }
4156 return VINF_SUCCESS;
4157}
4158
4159
4160/** Opcode 0x0f 0x89. */
4161FNIEMOP_DEF(iemOp_jns_Jv)
4162{
4163 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4164 IEMOP_HLP_MIN_386();
4165 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4166 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4167 {
4168 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4170
4171 IEM_MC_BEGIN(0, 0);
4172 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4173 IEM_MC_ADVANCE_RIP();
4174 } IEM_MC_ELSE() {
4175 IEM_MC_REL_JMP_S16(i16Imm);
4176 } IEM_MC_ENDIF();
4177 IEM_MC_END();
4178 }
4179 else
4180 {
4181 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4183
4184 IEM_MC_BEGIN(0, 0);
4185 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4186 IEM_MC_ADVANCE_RIP();
4187 } IEM_MC_ELSE() {
4188 IEM_MC_REL_JMP_S32(i32Imm);
4189 } IEM_MC_ENDIF();
4190 IEM_MC_END();
4191 }
4192 return VINF_SUCCESS;
4193}
4194
4195
4196/** Opcode 0x0f 0x8a. */
4197FNIEMOP_DEF(iemOp_jp_Jv)
4198{
4199 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4200 IEMOP_HLP_MIN_386();
4201 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4202 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4203 {
4204 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4206
4207 IEM_MC_BEGIN(0, 0);
4208 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4209 IEM_MC_REL_JMP_S16(i16Imm);
4210 } IEM_MC_ELSE() {
4211 IEM_MC_ADVANCE_RIP();
4212 } IEM_MC_ENDIF();
4213 IEM_MC_END();
4214 }
4215 else
4216 {
4217 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4219
4220 IEM_MC_BEGIN(0, 0);
4221 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4222 IEM_MC_REL_JMP_S32(i32Imm);
4223 } IEM_MC_ELSE() {
4224 IEM_MC_ADVANCE_RIP();
4225 } IEM_MC_ENDIF();
4226 IEM_MC_END();
4227 }
4228 return VINF_SUCCESS;
4229}
4230
4231
4232/** Opcode 0x0f 0x8b. */
4233FNIEMOP_DEF(iemOp_jnp_Jv)
4234{
4235 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4236 IEMOP_HLP_MIN_386();
4237 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4238 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4239 {
4240 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4242
4243 IEM_MC_BEGIN(0, 0);
4244 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4245 IEM_MC_ADVANCE_RIP();
4246 } IEM_MC_ELSE() {
4247 IEM_MC_REL_JMP_S16(i16Imm);
4248 } IEM_MC_ENDIF();
4249 IEM_MC_END();
4250 }
4251 else
4252 {
4253 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4255
4256 IEM_MC_BEGIN(0, 0);
4257 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4258 IEM_MC_ADVANCE_RIP();
4259 } IEM_MC_ELSE() {
4260 IEM_MC_REL_JMP_S32(i32Imm);
4261 } IEM_MC_ENDIF();
4262 IEM_MC_END();
4263 }
4264 return VINF_SUCCESS;
4265}
4266
4267
4268/** Opcode 0x0f 0x8c. */
4269FNIEMOP_DEF(iemOp_jl_Jv)
4270{
4271 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4272 IEMOP_HLP_MIN_386();
4273 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4274 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4275 {
4276 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4278
4279 IEM_MC_BEGIN(0, 0);
4280 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4281 IEM_MC_REL_JMP_S16(i16Imm);
4282 } IEM_MC_ELSE() {
4283 IEM_MC_ADVANCE_RIP();
4284 } IEM_MC_ENDIF();
4285 IEM_MC_END();
4286 }
4287 else
4288 {
4289 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4291
4292 IEM_MC_BEGIN(0, 0);
4293 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4294 IEM_MC_REL_JMP_S32(i32Imm);
4295 } IEM_MC_ELSE() {
4296 IEM_MC_ADVANCE_RIP();
4297 } IEM_MC_ENDIF();
4298 IEM_MC_END();
4299 }
4300 return VINF_SUCCESS;
4301}
4302
4303
4304/** Opcode 0x0f 0x8d. */
4305FNIEMOP_DEF(iemOp_jnl_Jv)
4306{
4307 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4308 IEMOP_HLP_MIN_386();
4309 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4310 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4311 {
4312 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4314
4315 IEM_MC_BEGIN(0, 0);
4316 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4317 IEM_MC_ADVANCE_RIP();
4318 } IEM_MC_ELSE() {
4319 IEM_MC_REL_JMP_S16(i16Imm);
4320 } IEM_MC_ENDIF();
4321 IEM_MC_END();
4322 }
4323 else
4324 {
4325 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4327
4328 IEM_MC_BEGIN(0, 0);
4329 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4330 IEM_MC_ADVANCE_RIP();
4331 } IEM_MC_ELSE() {
4332 IEM_MC_REL_JMP_S32(i32Imm);
4333 } IEM_MC_ENDIF();
4334 IEM_MC_END();
4335 }
4336 return VINF_SUCCESS;
4337}
4338
4339
4340/** Opcode 0x0f 0x8e. */
4341FNIEMOP_DEF(iemOp_jle_Jv)
4342{
4343 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4344 IEMOP_HLP_MIN_386();
4345 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4346 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4347 {
4348 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4350
4351 IEM_MC_BEGIN(0, 0);
4352 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4353 IEM_MC_REL_JMP_S16(i16Imm);
4354 } IEM_MC_ELSE() {
4355 IEM_MC_ADVANCE_RIP();
4356 } IEM_MC_ENDIF();
4357 IEM_MC_END();
4358 }
4359 else
4360 {
4361 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4363
4364 IEM_MC_BEGIN(0, 0);
4365 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4366 IEM_MC_REL_JMP_S32(i32Imm);
4367 } IEM_MC_ELSE() {
4368 IEM_MC_ADVANCE_RIP();
4369 } IEM_MC_ENDIF();
4370 IEM_MC_END();
4371 }
4372 return VINF_SUCCESS;
4373}
4374
4375
4376/** Opcode 0x0f 0x8f. */
4377FNIEMOP_DEF(iemOp_jnle_Jv)
4378{
4379 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4380 IEMOP_HLP_MIN_386();
4381 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4382 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4383 {
4384 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4386
4387 IEM_MC_BEGIN(0, 0);
4388 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4389 IEM_MC_ADVANCE_RIP();
4390 } IEM_MC_ELSE() {
4391 IEM_MC_REL_JMP_S16(i16Imm);
4392 } IEM_MC_ENDIF();
4393 IEM_MC_END();
4394 }
4395 else
4396 {
4397 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4399
4400 IEM_MC_BEGIN(0, 0);
4401 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4402 IEM_MC_ADVANCE_RIP();
4403 } IEM_MC_ELSE() {
4404 IEM_MC_REL_JMP_S32(i32Imm);
4405 } IEM_MC_ENDIF();
4406 IEM_MC_END();
4407 }
4408 return VINF_SUCCESS;
4409}
4410
4411
4412/** Opcode 0x0f 0x90. */
4413FNIEMOP_DEF(iemOp_seto_Eb)
4414{
4415 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4416 IEMOP_HLP_MIN_386();
4417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4418
4419 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4420 * any way. AMD says it's "unused", whatever that means. We're
4421 * ignoring for now. */
4422 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4423 {
4424 /* register target */
4425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4426 IEM_MC_BEGIN(0, 0);
4427 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4428 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4429 } IEM_MC_ELSE() {
4430 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4431 } IEM_MC_ENDIF();
4432 IEM_MC_ADVANCE_RIP();
4433 IEM_MC_END();
4434 }
4435 else
4436 {
4437 /* memory target */
4438 IEM_MC_BEGIN(0, 1);
4439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4442 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4443 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4444 } IEM_MC_ELSE() {
4445 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4446 } IEM_MC_ENDIF();
4447 IEM_MC_ADVANCE_RIP();
4448 IEM_MC_END();
4449 }
4450 return VINF_SUCCESS;
4451}
4452
4453
4454/** Opcode 0x0f 0x91. */
4455FNIEMOP_DEF(iemOp_setno_Eb)
4456{
4457 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4458 IEMOP_HLP_MIN_386();
4459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4460
4461 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4462 * any way. AMD says it's "unused", whatever that means. We're
4463 * ignoring for now. */
4464 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4465 {
4466 /* register target */
4467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4468 IEM_MC_BEGIN(0, 0);
4469 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4470 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4471 } IEM_MC_ELSE() {
4472 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4473 } IEM_MC_ENDIF();
4474 IEM_MC_ADVANCE_RIP();
4475 IEM_MC_END();
4476 }
4477 else
4478 {
4479 /* memory target */
4480 IEM_MC_BEGIN(0, 1);
4481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4484 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4485 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4486 } IEM_MC_ELSE() {
4487 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4488 } IEM_MC_ENDIF();
4489 IEM_MC_ADVANCE_RIP();
4490 IEM_MC_END();
4491 }
4492 return VINF_SUCCESS;
4493}
4494
4495
4496/** Opcode 0x0f 0x92. */
4497FNIEMOP_DEF(iemOp_setc_Eb)
4498{
4499 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4500 IEMOP_HLP_MIN_386();
4501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4502
4503 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4504 * any way. AMD says it's "unused", whatever that means. We're
4505 * ignoring for now. */
4506 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4507 {
4508 /* register target */
4509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4510 IEM_MC_BEGIN(0, 0);
4511 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4512 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4513 } IEM_MC_ELSE() {
4514 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4515 } IEM_MC_ENDIF();
4516 IEM_MC_ADVANCE_RIP();
4517 IEM_MC_END();
4518 }
4519 else
4520 {
4521 /* memory target */
4522 IEM_MC_BEGIN(0, 1);
4523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4526 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4527 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4528 } IEM_MC_ELSE() {
4529 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4530 } IEM_MC_ENDIF();
4531 IEM_MC_ADVANCE_RIP();
4532 IEM_MC_END();
4533 }
4534 return VINF_SUCCESS;
4535}
4536
4537
4538/** Opcode 0x0f 0x93. */
4539FNIEMOP_DEF(iemOp_setnc_Eb)
4540{
4541 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4542 IEMOP_HLP_MIN_386();
4543 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4544
4545 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4546 * any way. AMD says it's "unused", whatever that means. We're
4547 * ignoring for now. */
4548 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4549 {
4550 /* register target */
4551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4552 IEM_MC_BEGIN(0, 0);
4553 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4554 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4555 } IEM_MC_ELSE() {
4556 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4557 } IEM_MC_ENDIF();
4558 IEM_MC_ADVANCE_RIP();
4559 IEM_MC_END();
4560 }
4561 else
4562 {
4563 /* memory target */
4564 IEM_MC_BEGIN(0, 1);
4565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4568 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4569 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4570 } IEM_MC_ELSE() {
4571 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4572 } IEM_MC_ENDIF();
4573 IEM_MC_ADVANCE_RIP();
4574 IEM_MC_END();
4575 }
4576 return VINF_SUCCESS;
4577}
4578
4579
4580/** Opcode 0x0f 0x94. */
4581FNIEMOP_DEF(iemOp_sete_Eb)
4582{
4583 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4584 IEMOP_HLP_MIN_386();
4585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4586
4587 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4588 * any way. AMD says it's "unused", whatever that means. We're
4589 * ignoring for now. */
4590 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4591 {
4592 /* register target */
4593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4594 IEM_MC_BEGIN(0, 0);
4595 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4596 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4597 } IEM_MC_ELSE() {
4598 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4599 } IEM_MC_ENDIF();
4600 IEM_MC_ADVANCE_RIP();
4601 IEM_MC_END();
4602 }
4603 else
4604 {
4605 /* memory target */
4606 IEM_MC_BEGIN(0, 1);
4607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4610 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4611 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4612 } IEM_MC_ELSE() {
4613 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4614 } IEM_MC_ENDIF();
4615 IEM_MC_ADVANCE_RIP();
4616 IEM_MC_END();
4617 }
4618 return VINF_SUCCESS;
4619}
4620
4621
4622/** Opcode 0x0f 0x95. */
4623FNIEMOP_DEF(iemOp_setne_Eb)
4624{
4625 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4626 IEMOP_HLP_MIN_386();
4627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4628
4629 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4630 * any way. AMD says it's "unused", whatever that means. We're
4631 * ignoring for now. */
4632 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4633 {
4634 /* register target */
4635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4636 IEM_MC_BEGIN(0, 0);
4637 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4638 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4639 } IEM_MC_ELSE() {
4640 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4641 } IEM_MC_ENDIF();
4642 IEM_MC_ADVANCE_RIP();
4643 IEM_MC_END();
4644 }
4645 else
4646 {
4647 /* memory target */
4648 IEM_MC_BEGIN(0, 1);
4649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4652 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4653 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4654 } IEM_MC_ELSE() {
4655 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4656 } IEM_MC_ENDIF();
4657 IEM_MC_ADVANCE_RIP();
4658 IEM_MC_END();
4659 }
4660 return VINF_SUCCESS;
4661}
4662
4663
4664/** Opcode 0x0f 0x96. */
4665FNIEMOP_DEF(iemOp_setbe_Eb)
4666{
4667 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4668 IEMOP_HLP_MIN_386();
4669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4670
4671 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4672 * any way. AMD says it's "unused", whatever that means. We're
4673 * ignoring for now. */
4674 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4675 {
4676 /* register target */
4677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4678 IEM_MC_BEGIN(0, 0);
4679 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4680 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4681 } IEM_MC_ELSE() {
4682 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4683 } IEM_MC_ENDIF();
4684 IEM_MC_ADVANCE_RIP();
4685 IEM_MC_END();
4686 }
4687 else
4688 {
4689 /* memory target */
4690 IEM_MC_BEGIN(0, 1);
4691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4694 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4695 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4696 } IEM_MC_ELSE() {
4697 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4698 } IEM_MC_ENDIF();
4699 IEM_MC_ADVANCE_RIP();
4700 IEM_MC_END();
4701 }
4702 return VINF_SUCCESS;
4703}
4704
4705
4706/** Opcode 0x0f 0x97. */
4707FNIEMOP_DEF(iemOp_setnbe_Eb)
4708{
4709 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4710 IEMOP_HLP_MIN_386();
4711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4712
4713 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4714 * any way. AMD says it's "unused", whatever that means. We're
4715 * ignoring for now. */
4716 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4717 {
4718 /* register target */
4719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4720 IEM_MC_BEGIN(0, 0);
4721 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4722 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4723 } IEM_MC_ELSE() {
4724 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4725 } IEM_MC_ENDIF();
4726 IEM_MC_ADVANCE_RIP();
4727 IEM_MC_END();
4728 }
4729 else
4730 {
4731 /* memory target */
4732 IEM_MC_BEGIN(0, 1);
4733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4736 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4737 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4738 } IEM_MC_ELSE() {
4739 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4740 } IEM_MC_ENDIF();
4741 IEM_MC_ADVANCE_RIP();
4742 IEM_MC_END();
4743 }
4744 return VINF_SUCCESS;
4745}
4746
4747
4748/** Opcode 0x0f 0x98. */
4749FNIEMOP_DEF(iemOp_sets_Eb)
4750{
4751 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4752 IEMOP_HLP_MIN_386();
4753 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4754
4755 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4756 * any way. AMD says it's "unused", whatever that means. We're
4757 * ignoring for now. */
4758 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4759 {
4760 /* register target */
4761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4762 IEM_MC_BEGIN(0, 0);
4763 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4764 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4765 } IEM_MC_ELSE() {
4766 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4767 } IEM_MC_ENDIF();
4768 IEM_MC_ADVANCE_RIP();
4769 IEM_MC_END();
4770 }
4771 else
4772 {
4773 /* memory target */
4774 IEM_MC_BEGIN(0, 1);
4775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4778 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4779 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4780 } IEM_MC_ELSE() {
4781 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4782 } IEM_MC_ENDIF();
4783 IEM_MC_ADVANCE_RIP();
4784 IEM_MC_END();
4785 }
4786 return VINF_SUCCESS;
4787}
4788
4789
4790/** Opcode 0x0f 0x99. */
4791FNIEMOP_DEF(iemOp_setns_Eb)
4792{
4793 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4794 IEMOP_HLP_MIN_386();
4795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4796
4797 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4798 * any way. AMD says it's "unused", whatever that means. We're
4799 * ignoring for now. */
4800 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4801 {
4802 /* register target */
4803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4804 IEM_MC_BEGIN(0, 0);
4805 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4806 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4807 } IEM_MC_ELSE() {
4808 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4809 } IEM_MC_ENDIF();
4810 IEM_MC_ADVANCE_RIP();
4811 IEM_MC_END();
4812 }
4813 else
4814 {
4815 /* memory target */
4816 IEM_MC_BEGIN(0, 1);
4817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4820 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4821 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4822 } IEM_MC_ELSE() {
4823 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4824 } IEM_MC_ENDIF();
4825 IEM_MC_ADVANCE_RIP();
4826 IEM_MC_END();
4827 }
4828 return VINF_SUCCESS;
4829}
4830
4831
4832/** Opcode 0x0f 0x9a. */
4833FNIEMOP_DEF(iemOp_setp_Eb)
4834{
4835 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4836 IEMOP_HLP_MIN_386();
4837 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4838
4839 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4840 * any way. AMD says it's "unused", whatever that means. We're
4841 * ignoring for now. */
4842 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4843 {
4844 /* register target */
4845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4846 IEM_MC_BEGIN(0, 0);
4847 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4848 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4849 } IEM_MC_ELSE() {
4850 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4851 } IEM_MC_ENDIF();
4852 IEM_MC_ADVANCE_RIP();
4853 IEM_MC_END();
4854 }
4855 else
4856 {
4857 /* memory target */
4858 IEM_MC_BEGIN(0, 1);
4859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4862 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4863 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4864 } IEM_MC_ELSE() {
4865 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4866 } IEM_MC_ENDIF();
4867 IEM_MC_ADVANCE_RIP();
4868 IEM_MC_END();
4869 }
4870 return VINF_SUCCESS;
4871}
4872
4873
4874/** Opcode 0x0f 0x9b. */
4875FNIEMOP_DEF(iemOp_setnp_Eb)
4876{
4877 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4878 IEMOP_HLP_MIN_386();
4879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4880
4881 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4882 * any way. AMD says it's "unused", whatever that means. We're
4883 * ignoring for now. */
4884 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4885 {
4886 /* register target */
4887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4888 IEM_MC_BEGIN(0, 0);
4889 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4890 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4891 } IEM_MC_ELSE() {
4892 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4893 } IEM_MC_ENDIF();
4894 IEM_MC_ADVANCE_RIP();
4895 IEM_MC_END();
4896 }
4897 else
4898 {
4899 /* memory target */
4900 IEM_MC_BEGIN(0, 1);
4901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4904 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4905 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4906 } IEM_MC_ELSE() {
4907 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4908 } IEM_MC_ENDIF();
4909 IEM_MC_ADVANCE_RIP();
4910 IEM_MC_END();
4911 }
4912 return VINF_SUCCESS;
4913}
4914
4915
4916/** Opcode 0x0f 0x9c. */
4917FNIEMOP_DEF(iemOp_setl_Eb)
4918{
4919 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4920 IEMOP_HLP_MIN_386();
4921 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4922
4923 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4924 * any way. AMD says it's "unused", whatever that means. We're
4925 * ignoring for now. */
4926 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4927 {
4928 /* register target */
4929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4930 IEM_MC_BEGIN(0, 0);
4931 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4932 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4933 } IEM_MC_ELSE() {
4934 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4935 } IEM_MC_ENDIF();
4936 IEM_MC_ADVANCE_RIP();
4937 IEM_MC_END();
4938 }
4939 else
4940 {
4941 /* memory target */
4942 IEM_MC_BEGIN(0, 1);
4943 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4946 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4947 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4948 } IEM_MC_ELSE() {
4949 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4950 } IEM_MC_ENDIF();
4951 IEM_MC_ADVANCE_RIP();
4952 IEM_MC_END();
4953 }
4954 return VINF_SUCCESS;
4955}
4956
4957
4958/** Opcode 0x0f 0x9d. */
4959FNIEMOP_DEF(iemOp_setnl_Eb)
4960{
4961 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4962 IEMOP_HLP_MIN_386();
4963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4964
4965 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4966 * any way. AMD says it's "unused", whatever that means. We're
4967 * ignoring for now. */
4968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4969 {
4970 /* register target */
4971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4972 IEM_MC_BEGIN(0, 0);
4973 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4974 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4975 } IEM_MC_ELSE() {
4976 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4977 } IEM_MC_ENDIF();
4978 IEM_MC_ADVANCE_RIP();
4979 IEM_MC_END();
4980 }
4981 else
4982 {
4983 /* memory target */
4984 IEM_MC_BEGIN(0, 1);
4985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4988 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4989 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4990 } IEM_MC_ELSE() {
4991 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4992 } IEM_MC_ENDIF();
4993 IEM_MC_ADVANCE_RIP();
4994 IEM_MC_END();
4995 }
4996 return VINF_SUCCESS;
4997}
4998
4999
5000/** Opcode 0x0f 0x9e. */
5001FNIEMOP_DEF(iemOp_setle_Eb)
5002{
5003 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5004 IEMOP_HLP_MIN_386();
5005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5006
5007 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5008 * any way. AMD says it's "unused", whatever that means. We're
5009 * ignoring for now. */
5010 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5011 {
5012 /* register target */
5013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5014 IEM_MC_BEGIN(0, 0);
5015 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5016 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5017 } IEM_MC_ELSE() {
5018 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5019 } IEM_MC_ENDIF();
5020 IEM_MC_ADVANCE_RIP();
5021 IEM_MC_END();
5022 }
5023 else
5024 {
5025 /* memory target */
5026 IEM_MC_BEGIN(0, 1);
5027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5030 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5031 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5032 } IEM_MC_ELSE() {
5033 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5034 } IEM_MC_ENDIF();
5035 IEM_MC_ADVANCE_RIP();
5036 IEM_MC_END();
5037 }
5038 return VINF_SUCCESS;
5039}
5040
5041
5042/** Opcode 0x0f 0x9f. */
5043FNIEMOP_DEF(iemOp_setnle_Eb)
5044{
5045 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5046 IEMOP_HLP_MIN_386();
5047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5048
5049 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5050 * any way. AMD says it's "unused", whatever that means. We're
5051 * ignoring for now. */
5052 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5053 {
5054 /* register target */
5055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5056 IEM_MC_BEGIN(0, 0);
5057 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5058 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5059 } IEM_MC_ELSE() {
5060 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5061 } IEM_MC_ENDIF();
5062 IEM_MC_ADVANCE_RIP();
5063 IEM_MC_END();
5064 }
5065 else
5066 {
5067 /* memory target */
5068 IEM_MC_BEGIN(0, 1);
5069 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5072 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5073 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5074 } IEM_MC_ELSE() {
5075 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5076 } IEM_MC_ENDIF();
5077 IEM_MC_ADVANCE_RIP();
5078 IEM_MC_END();
5079 }
5080 return VINF_SUCCESS;
5081}
5082
5083
5084/**
5085 * Common 'push segment-register' helper.
5086 */
5087FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5088{
5089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5090 if (iReg < X86_SREG_FS)
5091 IEMOP_HLP_NO_64BIT();
5092 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5093
5094 switch (pVCpu->iem.s.enmEffOpSize)
5095 {
5096 case IEMMODE_16BIT:
5097 IEM_MC_BEGIN(0, 1);
5098 IEM_MC_LOCAL(uint16_t, u16Value);
5099 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5100 IEM_MC_PUSH_U16(u16Value);
5101 IEM_MC_ADVANCE_RIP();
5102 IEM_MC_END();
5103 break;
5104
5105 case IEMMODE_32BIT:
5106 IEM_MC_BEGIN(0, 1);
5107 IEM_MC_LOCAL(uint32_t, u32Value);
5108 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5109 IEM_MC_PUSH_U32_SREG(u32Value);
5110 IEM_MC_ADVANCE_RIP();
5111 IEM_MC_END();
5112 break;
5113
5114 case IEMMODE_64BIT:
5115 IEM_MC_BEGIN(0, 1);
5116 IEM_MC_LOCAL(uint64_t, u64Value);
5117 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5118 IEM_MC_PUSH_U64(u64Value);
5119 IEM_MC_ADVANCE_RIP();
5120 IEM_MC_END();
5121 break;
5122 }
5123
5124 return VINF_SUCCESS;
5125}
5126
5127
5128/** Opcode 0x0f 0xa0. */
5129FNIEMOP_DEF(iemOp_push_fs)
5130{
5131 IEMOP_MNEMONIC(push_fs, "push fs");
5132 IEMOP_HLP_MIN_386();
5133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5134 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5135}
5136
5137
5138/** Opcode 0x0f 0xa1. */
5139FNIEMOP_DEF(iemOp_pop_fs)
5140{
5141 IEMOP_MNEMONIC(pop_fs, "pop fs");
5142 IEMOP_HLP_MIN_386();
5143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5144 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5145}
5146
5147
5148/** Opcode 0x0f 0xa2. */
5149FNIEMOP_DEF(iemOp_cpuid)
5150{
5151 IEMOP_MNEMONIC(cpuid, "cpuid");
5152 IEMOP_HLP_MIN_486(); /* not all 486es. */
5153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5154 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5155}
5156
5157
5158/**
5159 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5160 * iemOp_bts_Ev_Gv.
5161 */
5162FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5163{
5164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5165 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5166
5167 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5168 {
5169 /* register destination. */
5170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5171 switch (pVCpu->iem.s.enmEffOpSize)
5172 {
5173 case IEMMODE_16BIT:
5174 IEM_MC_BEGIN(3, 0);
5175 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5176 IEM_MC_ARG(uint16_t, u16Src, 1);
5177 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5178
5179 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5180 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5181 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5182 IEM_MC_REF_EFLAGS(pEFlags);
5183 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5184
5185 IEM_MC_ADVANCE_RIP();
5186 IEM_MC_END();
5187 return VINF_SUCCESS;
5188
5189 case IEMMODE_32BIT:
5190 IEM_MC_BEGIN(3, 0);
5191 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5192 IEM_MC_ARG(uint32_t, u32Src, 1);
5193 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5194
5195 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5196 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5197 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5198 IEM_MC_REF_EFLAGS(pEFlags);
5199 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5200
5201 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5202 IEM_MC_ADVANCE_RIP();
5203 IEM_MC_END();
5204 return VINF_SUCCESS;
5205
5206 case IEMMODE_64BIT:
5207 IEM_MC_BEGIN(3, 0);
5208 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5209 IEM_MC_ARG(uint64_t, u64Src, 1);
5210 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5211
5212 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5213 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5214 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5215 IEM_MC_REF_EFLAGS(pEFlags);
5216 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5217
5218 IEM_MC_ADVANCE_RIP();
5219 IEM_MC_END();
5220 return VINF_SUCCESS;
5221
5222 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5223 }
5224 }
5225 else
5226 {
5227 /* memory destination. */
5228
5229 uint32_t fAccess;
5230 if (pImpl->pfnLockedU16)
5231 fAccess = IEM_ACCESS_DATA_RW;
5232 else /* BT */
5233 fAccess = IEM_ACCESS_DATA_R;
5234
5235 /** @todo test negative bit offsets! */
5236 switch (pVCpu->iem.s.enmEffOpSize)
5237 {
5238 case IEMMODE_16BIT:
5239 IEM_MC_BEGIN(3, 2);
5240 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5241 IEM_MC_ARG(uint16_t, u16Src, 1);
5242 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5244 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5245
5246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5247 if (pImpl->pfnLockedU16)
5248 IEMOP_HLP_DONE_DECODING();
5249 else
5250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5251 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5252 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5253 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5254 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5255 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
5256 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5257 IEM_MC_FETCH_EFLAGS(EFlags);
5258
5259 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5260 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5261 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5262 else
5263 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5264 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5265
5266 IEM_MC_COMMIT_EFLAGS(EFlags);
5267 IEM_MC_ADVANCE_RIP();
5268 IEM_MC_END();
5269 return VINF_SUCCESS;
5270
5271 case IEMMODE_32BIT:
5272 IEM_MC_BEGIN(3, 2);
5273 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5274 IEM_MC_ARG(uint32_t, u32Src, 1);
5275 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5277 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5278
5279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5280 if (pImpl->pfnLockedU16)
5281 IEMOP_HLP_DONE_DECODING();
5282 else
5283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5284 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5285 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5286 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5287 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5288 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5289 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5290 IEM_MC_FETCH_EFLAGS(EFlags);
5291
5292 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5293 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5294 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5295 else
5296 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5297 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5298
5299 IEM_MC_COMMIT_EFLAGS(EFlags);
5300 IEM_MC_ADVANCE_RIP();
5301 IEM_MC_END();
5302 return VINF_SUCCESS;
5303
5304 case IEMMODE_64BIT:
5305 IEM_MC_BEGIN(3, 2);
5306 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5307 IEM_MC_ARG(uint64_t, u64Src, 1);
5308 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5310 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5311
5312 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5313 if (pImpl->pfnLockedU16)
5314 IEMOP_HLP_DONE_DECODING();
5315 else
5316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5317 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5318 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5319 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5320 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5321 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5322 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5323 IEM_MC_FETCH_EFLAGS(EFlags);
5324
5325 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5326 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5327 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5328 else
5329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5330 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5331
5332 IEM_MC_COMMIT_EFLAGS(EFlags);
5333 IEM_MC_ADVANCE_RIP();
5334 IEM_MC_END();
5335 return VINF_SUCCESS;
5336
5337 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5338 }
5339 }
5340}
5341
5342
5343/** Opcode 0x0f 0xa3. */
5344FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5345{
5346 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5347 IEMOP_HLP_MIN_386();
5348 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5349}
5350
5351
5352/**
5353 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5354 */
5355FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5356{
5357 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5358 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5359
5360 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5361 {
5362 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5364
5365 switch (pVCpu->iem.s.enmEffOpSize)
5366 {
5367 case IEMMODE_16BIT:
5368 IEM_MC_BEGIN(4, 0);
5369 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5370 IEM_MC_ARG(uint16_t, u16Src, 1);
5371 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5372 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5373
5374 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5375 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5376 IEM_MC_REF_EFLAGS(pEFlags);
5377 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5378
5379 IEM_MC_ADVANCE_RIP();
5380 IEM_MC_END();
5381 return VINF_SUCCESS;
5382
5383 case IEMMODE_32BIT:
5384 IEM_MC_BEGIN(4, 0);
5385 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5386 IEM_MC_ARG(uint32_t, u32Src, 1);
5387 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5388 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5389
5390 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5391 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5392 IEM_MC_REF_EFLAGS(pEFlags);
5393 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5394
5395 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5396 IEM_MC_ADVANCE_RIP();
5397 IEM_MC_END();
5398 return VINF_SUCCESS;
5399
5400 case IEMMODE_64BIT:
5401 IEM_MC_BEGIN(4, 0);
5402 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5403 IEM_MC_ARG(uint64_t, u64Src, 1);
5404 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5405 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5406
5407 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5408 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5409 IEM_MC_REF_EFLAGS(pEFlags);
5410 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5411
5412 IEM_MC_ADVANCE_RIP();
5413 IEM_MC_END();
5414 return VINF_SUCCESS;
5415
5416 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5417 }
5418 }
5419 else
5420 {
5421 switch (pVCpu->iem.s.enmEffOpSize)
5422 {
5423 case IEMMODE_16BIT:
5424 IEM_MC_BEGIN(4, 2);
5425 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5426 IEM_MC_ARG(uint16_t, u16Src, 1);
5427 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5428 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5430
5431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5432 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5433 IEM_MC_ASSIGN(cShiftArg, cShift);
5434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5435 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5436 IEM_MC_FETCH_EFLAGS(EFlags);
5437 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5438 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5439
5440 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5441 IEM_MC_COMMIT_EFLAGS(EFlags);
5442 IEM_MC_ADVANCE_RIP();
5443 IEM_MC_END();
5444 return VINF_SUCCESS;
5445
5446 case IEMMODE_32BIT:
5447 IEM_MC_BEGIN(4, 2);
5448 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5449 IEM_MC_ARG(uint32_t, u32Src, 1);
5450 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5451 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5453
5454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5455 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5456 IEM_MC_ASSIGN(cShiftArg, cShift);
5457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5458 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5459 IEM_MC_FETCH_EFLAGS(EFlags);
5460 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5461 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5462
5463 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5464 IEM_MC_COMMIT_EFLAGS(EFlags);
5465 IEM_MC_ADVANCE_RIP();
5466 IEM_MC_END();
5467 return VINF_SUCCESS;
5468
5469 case IEMMODE_64BIT:
5470 IEM_MC_BEGIN(4, 2);
5471 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5472 IEM_MC_ARG(uint64_t, u64Src, 1);
5473 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5474 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5476
5477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5478 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5479 IEM_MC_ASSIGN(cShiftArg, cShift);
5480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5481 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5482 IEM_MC_FETCH_EFLAGS(EFlags);
5483 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5484 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5485
5486 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5487 IEM_MC_COMMIT_EFLAGS(EFlags);
5488 IEM_MC_ADVANCE_RIP();
5489 IEM_MC_END();
5490 return VINF_SUCCESS;
5491
5492 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5493 }
5494 }
5495}
5496
5497
5498/**
5499 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5500 */
5501FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5502{
5503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5504 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5505
5506 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5507 {
5508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5509
5510 switch (pVCpu->iem.s.enmEffOpSize)
5511 {
5512 case IEMMODE_16BIT:
5513 IEM_MC_BEGIN(4, 0);
5514 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5515 IEM_MC_ARG(uint16_t, u16Src, 1);
5516 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5517 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5518
5519 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5520 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5521 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5522 IEM_MC_REF_EFLAGS(pEFlags);
5523 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5524
5525 IEM_MC_ADVANCE_RIP();
5526 IEM_MC_END();
5527 return VINF_SUCCESS;
5528
5529 case IEMMODE_32BIT:
5530 IEM_MC_BEGIN(4, 0);
5531 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5532 IEM_MC_ARG(uint32_t, u32Src, 1);
5533 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5534 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5535
5536 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5537 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5538 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5539 IEM_MC_REF_EFLAGS(pEFlags);
5540 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5541
5542 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5543 IEM_MC_ADVANCE_RIP();
5544 IEM_MC_END();
5545 return VINF_SUCCESS;
5546
5547 case IEMMODE_64BIT:
5548 IEM_MC_BEGIN(4, 0);
5549 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5550 IEM_MC_ARG(uint64_t, u64Src, 1);
5551 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5552 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5553
5554 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5555 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5556 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5557 IEM_MC_REF_EFLAGS(pEFlags);
5558 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5559
5560 IEM_MC_ADVANCE_RIP();
5561 IEM_MC_END();
5562 return VINF_SUCCESS;
5563
5564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5565 }
5566 }
5567 else
5568 {
5569 switch (pVCpu->iem.s.enmEffOpSize)
5570 {
5571 case IEMMODE_16BIT:
5572 IEM_MC_BEGIN(4, 2);
5573 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5574 IEM_MC_ARG(uint16_t, u16Src, 1);
5575 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5576 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5578
5579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5581 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5582 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5583 IEM_MC_FETCH_EFLAGS(EFlags);
5584 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5585 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5586
5587 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5588 IEM_MC_COMMIT_EFLAGS(EFlags);
5589 IEM_MC_ADVANCE_RIP();
5590 IEM_MC_END();
5591 return VINF_SUCCESS;
5592
5593 case IEMMODE_32BIT:
5594 IEM_MC_BEGIN(4, 2);
5595 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5596 IEM_MC_ARG(uint32_t, u32Src, 1);
5597 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5598 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5600
5601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5603 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5604 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5605 IEM_MC_FETCH_EFLAGS(EFlags);
5606 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5607 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5608
5609 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5610 IEM_MC_COMMIT_EFLAGS(EFlags);
5611 IEM_MC_ADVANCE_RIP();
5612 IEM_MC_END();
5613 return VINF_SUCCESS;
5614
5615 case IEMMODE_64BIT:
5616 IEM_MC_BEGIN(4, 2);
5617 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5618 IEM_MC_ARG(uint64_t, u64Src, 1);
5619 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5620 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5622
5623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5625 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5626 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5627 IEM_MC_FETCH_EFLAGS(EFlags);
5628 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5629 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5630
5631 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5632 IEM_MC_COMMIT_EFLAGS(EFlags);
5633 IEM_MC_ADVANCE_RIP();
5634 IEM_MC_END();
5635 return VINF_SUCCESS;
5636
5637 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5638 }
5639 }
5640}
5641
5642
5643
5644/** Opcode 0x0f 0xa4. */
5645FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5646{
5647 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5648 IEMOP_HLP_MIN_386();
5649 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5650}
5651
5652
5653/** Opcode 0x0f 0xa5. */
5654FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5655{
5656 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5657 IEMOP_HLP_MIN_386();
5658 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5659}
5660
5661
5662/** Opcode 0x0f 0xa8. */
5663FNIEMOP_DEF(iemOp_push_gs)
5664{
5665 IEMOP_MNEMONIC(push_gs, "push gs");
5666 IEMOP_HLP_MIN_386();
5667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5668 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5669}
5670
5671
5672/** Opcode 0x0f 0xa9. */
5673FNIEMOP_DEF(iemOp_pop_gs)
5674{
5675 IEMOP_MNEMONIC(pop_gs, "pop gs");
5676 IEMOP_HLP_MIN_386();
5677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5678 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5679}
5680
5681
5682/** Opcode 0x0f 0xaa. */
5683FNIEMOP_STUB(iemOp_rsm);
5684//IEMOP_HLP_MIN_386();
5685
5686
5687/** Opcode 0x0f 0xab. */
5688FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5689{
5690 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5691 IEMOP_HLP_MIN_386();
5692 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5693}
5694
5695
5696/** Opcode 0x0f 0xac. */
5697FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5698{
5699 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5700 IEMOP_HLP_MIN_386();
5701 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5702}
5703
5704
5705/** Opcode 0x0f 0xad. */
5706FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5707{
5708 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5709 IEMOP_HLP_MIN_386();
5710 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5711}
5712
5713
5714/** Opcode 0x0f 0xae mem/0. */
5715FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5716{
5717 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5718 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5719 return IEMOP_RAISE_INVALID_OPCODE();
5720
5721 IEM_MC_BEGIN(3, 1);
5722 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5723 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5724 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5727 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5728 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5729 IEM_MC_END();
5730 return VINF_SUCCESS;
5731}
5732
5733
5734/** Opcode 0x0f 0xae mem/1. */
5735FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5736{
5737 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5738 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5739 return IEMOP_RAISE_INVALID_OPCODE();
5740
5741 IEM_MC_BEGIN(3, 1);
5742 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5743 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5744 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5747 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5748 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5749 IEM_MC_END();
5750 return VINF_SUCCESS;
5751}
5752
5753
5754/** Opcode 0x0f 0xae mem/2. */
5755FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5756
5757/** Opcode 0x0f 0xae mem/3. */
5758FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5759
5760/** Opcode 0x0f 0xae mem/4. */
5761FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5762
5763/** Opcode 0x0f 0xae mem/5. */
5764FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5765
5766/** Opcode 0x0f 0xae mem/6. */
5767FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5768
5769/** Opcode 0x0f 0xae mem/7. */
5770FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5771
5772
5773/** Opcode 0x0f 0xae 11b/5. */
5774FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5775{
5776 RT_NOREF_PV(bRm);
5777 IEMOP_MNEMONIC(lfence, "lfence");
5778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5779 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5780 return IEMOP_RAISE_INVALID_OPCODE();
5781
5782 IEM_MC_BEGIN(0, 0);
5783 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5784 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5785 else
5786 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5787 IEM_MC_ADVANCE_RIP();
5788 IEM_MC_END();
5789 return VINF_SUCCESS;
5790}
5791
5792
5793/** Opcode 0x0f 0xae 11b/6. */
5794FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5795{
5796 RT_NOREF_PV(bRm);
5797 IEMOP_MNEMONIC(mfence, "mfence");
5798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5799 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5800 return IEMOP_RAISE_INVALID_OPCODE();
5801
5802 IEM_MC_BEGIN(0, 0);
5803 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5804 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5805 else
5806 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5807 IEM_MC_ADVANCE_RIP();
5808 IEM_MC_END();
5809 return VINF_SUCCESS;
5810}
5811
5812
5813/** Opcode 0x0f 0xae 11b/7. */
5814FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5815{
5816 RT_NOREF_PV(bRm);
5817 IEMOP_MNEMONIC(sfence, "sfence");
5818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5819 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5820 return IEMOP_RAISE_INVALID_OPCODE();
5821
5822 IEM_MC_BEGIN(0, 0);
5823 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5824 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5825 else
5826 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5827 IEM_MC_ADVANCE_RIP();
5828 IEM_MC_END();
5829 return VINF_SUCCESS;
5830}
5831
5832
5833/** Opcode 0xf3 0x0f 0xae 11b/0. */
5834FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5835
5836/** Opcode 0xf3 0x0f 0xae 11b/1. */
5837FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5838
5839/** Opcode 0xf3 0x0f 0xae 11b/2. */
5840FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5841
5842/** Opcode 0xf3 0x0f 0xae 11b/3. */
5843FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5844
5845
5846/** Opcode 0x0f 0xae. */
5847FNIEMOP_DEF(iemOp_Grp15)
5848{
5849 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5851 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5852 {
5853 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5854 {
5855 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5856 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5857 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5858 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5859 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5860 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5861 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5862 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5863 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5864 }
5865 }
5866 else
5867 {
5868 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5869 {
5870 case 0:
5871 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5872 {
5873 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5874 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5875 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5876 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5877 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5878 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5879 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5880 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5881 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5882 }
5883 break;
5884
5885 case IEM_OP_PRF_REPZ:
5886 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5887 {
5888 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5889 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5890 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5891 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5892 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5893 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5894 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5895 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5897 }
5898 break;
5899
5900 default:
5901 return IEMOP_RAISE_INVALID_OPCODE();
5902 }
5903 }
5904}
5905
5906
5907/** Opcode 0x0f 0xaf. */
5908FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5909{
5910 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5911 IEMOP_HLP_MIN_386();
5912 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5913 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5914}
5915
5916
5917/** Opcode 0x0f 0xb0. */
5918FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5919{
5920 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5921 IEMOP_HLP_MIN_486();
5922 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5923
5924 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5925 {
5926 IEMOP_HLP_DONE_DECODING();
5927 IEM_MC_BEGIN(4, 0);
5928 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5929 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5930 IEM_MC_ARG(uint8_t, u8Src, 2);
5931 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5932
5933 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5934 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5935 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5936 IEM_MC_REF_EFLAGS(pEFlags);
5937 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5938 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5939 else
5940 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5941
5942 IEM_MC_ADVANCE_RIP();
5943 IEM_MC_END();
5944 }
5945 else
5946 {
5947 IEM_MC_BEGIN(4, 3);
5948 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5949 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5950 IEM_MC_ARG(uint8_t, u8Src, 2);
5951 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5953 IEM_MC_LOCAL(uint8_t, u8Al);
5954
5955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5956 IEMOP_HLP_DONE_DECODING();
5957 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5958 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5959 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5960 IEM_MC_FETCH_EFLAGS(EFlags);
5961 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5962 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5963 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5964 else
5965 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5966
5967 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5968 IEM_MC_COMMIT_EFLAGS(EFlags);
5969 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5970 IEM_MC_ADVANCE_RIP();
5971 IEM_MC_END();
5972 }
5973 return VINF_SUCCESS;
5974}
5975
5976/** Opcode 0x0f 0xb1. */
5977FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5978{
5979 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
5980 IEMOP_HLP_MIN_486();
5981 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5982
5983 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5984 {
5985 IEMOP_HLP_DONE_DECODING();
5986 switch (pVCpu->iem.s.enmEffOpSize)
5987 {
5988 case IEMMODE_16BIT:
5989 IEM_MC_BEGIN(4, 0);
5990 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5991 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5992 IEM_MC_ARG(uint16_t, u16Src, 2);
5993 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5994
5995 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5996 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5997 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5998 IEM_MC_REF_EFLAGS(pEFlags);
5999 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6000 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6001 else
6002 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6003
6004 IEM_MC_ADVANCE_RIP();
6005 IEM_MC_END();
6006 return VINF_SUCCESS;
6007
6008 case IEMMODE_32BIT:
6009 IEM_MC_BEGIN(4, 0);
6010 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6011 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6012 IEM_MC_ARG(uint32_t, u32Src, 2);
6013 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6014
6015 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6016 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6017 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6018 IEM_MC_REF_EFLAGS(pEFlags);
6019 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6020 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6021 else
6022 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6023
6024 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6025 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6026 IEM_MC_ADVANCE_RIP();
6027 IEM_MC_END();
6028 return VINF_SUCCESS;
6029
6030 case IEMMODE_64BIT:
6031 IEM_MC_BEGIN(4, 0);
6032 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6033 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6034#ifdef RT_ARCH_X86
6035 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6036#else
6037 IEM_MC_ARG(uint64_t, u64Src, 2);
6038#endif
6039 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6040
6041 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6042 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6043 IEM_MC_REF_EFLAGS(pEFlags);
6044#ifdef RT_ARCH_X86
6045 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6046 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6047 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6048 else
6049 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6050#else
6051 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6052 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6053 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6054 else
6055 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6056#endif
6057
6058 IEM_MC_ADVANCE_RIP();
6059 IEM_MC_END();
6060 return VINF_SUCCESS;
6061
6062 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6063 }
6064 }
6065 else
6066 {
6067 switch (pVCpu->iem.s.enmEffOpSize)
6068 {
6069 case IEMMODE_16BIT:
6070 IEM_MC_BEGIN(4, 3);
6071 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6072 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6073 IEM_MC_ARG(uint16_t, u16Src, 2);
6074 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6076 IEM_MC_LOCAL(uint16_t, u16Ax);
6077
6078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6079 IEMOP_HLP_DONE_DECODING();
6080 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6081 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6082 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6083 IEM_MC_FETCH_EFLAGS(EFlags);
6084 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6085 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6086 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6087 else
6088 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6089
6090 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6091 IEM_MC_COMMIT_EFLAGS(EFlags);
6092 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6093 IEM_MC_ADVANCE_RIP();
6094 IEM_MC_END();
6095 return VINF_SUCCESS;
6096
6097 case IEMMODE_32BIT:
6098 IEM_MC_BEGIN(4, 3);
6099 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6100 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6101 IEM_MC_ARG(uint32_t, u32Src, 2);
6102 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6104 IEM_MC_LOCAL(uint32_t, u32Eax);
6105
6106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6107 IEMOP_HLP_DONE_DECODING();
6108 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6109 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6110 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6111 IEM_MC_FETCH_EFLAGS(EFlags);
6112 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6113 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6114 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6115 else
6116 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6117
6118 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6119 IEM_MC_COMMIT_EFLAGS(EFlags);
6120 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6121 IEM_MC_ADVANCE_RIP();
6122 IEM_MC_END();
6123 return VINF_SUCCESS;
6124
6125 case IEMMODE_64BIT:
6126 IEM_MC_BEGIN(4, 3);
6127 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6128 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6129#ifdef RT_ARCH_X86
6130 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6131#else
6132 IEM_MC_ARG(uint64_t, u64Src, 2);
6133#endif
6134 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6136 IEM_MC_LOCAL(uint64_t, u64Rax);
6137
6138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6139 IEMOP_HLP_DONE_DECODING();
6140 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6141 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6142 IEM_MC_FETCH_EFLAGS(EFlags);
6143 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6144#ifdef RT_ARCH_X86
6145 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6146 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6147 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6148 else
6149 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6150#else
6151 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6152 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6153 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6154 else
6155 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6156#endif
6157
6158 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6159 IEM_MC_COMMIT_EFLAGS(EFlags);
6160 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6161 IEM_MC_ADVANCE_RIP();
6162 IEM_MC_END();
6163 return VINF_SUCCESS;
6164
6165 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6166 }
6167 }
6168}
6169
6170
6171FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6172{
6173 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6174 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6175
6176 switch (pVCpu->iem.s.enmEffOpSize)
6177 {
6178 case IEMMODE_16BIT:
6179 IEM_MC_BEGIN(5, 1);
6180 IEM_MC_ARG(uint16_t, uSel, 0);
6181 IEM_MC_ARG(uint16_t, offSeg, 1);
6182 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6183 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6184 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6185 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6188 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6189 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6190 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6191 IEM_MC_END();
6192 return VINF_SUCCESS;
6193
6194 case IEMMODE_32BIT:
6195 IEM_MC_BEGIN(5, 1);
6196 IEM_MC_ARG(uint16_t, uSel, 0);
6197 IEM_MC_ARG(uint32_t, offSeg, 1);
6198 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6199 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6200 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6201 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6204 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6205 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6206 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6207 IEM_MC_END();
6208 return VINF_SUCCESS;
6209
6210 case IEMMODE_64BIT:
6211 IEM_MC_BEGIN(5, 1);
6212 IEM_MC_ARG(uint16_t, uSel, 0);
6213 IEM_MC_ARG(uint64_t, offSeg, 1);
6214 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6215 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6216 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6217 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6220 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6221 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6222 else
6223 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6224 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6225 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6226 IEM_MC_END();
6227 return VINF_SUCCESS;
6228
6229 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6230 }
6231}
6232
6233
6234/** Opcode 0x0f 0xb2. */
6235FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6236{
6237 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6238 IEMOP_HLP_MIN_386();
6239 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6240 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6241 return IEMOP_RAISE_INVALID_OPCODE();
6242 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6243}
6244
6245
6246/** Opcode 0x0f 0xb3. */
6247FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6248{
6249 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6250 IEMOP_HLP_MIN_386();
6251 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6252}
6253
6254
6255/** Opcode 0x0f 0xb4. */
6256FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6257{
6258 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6259 IEMOP_HLP_MIN_386();
6260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6262 return IEMOP_RAISE_INVALID_OPCODE();
6263 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6264}
6265
6266
6267/** Opcode 0x0f 0xb5. */
6268FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6269{
6270 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6271 IEMOP_HLP_MIN_386();
6272 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6273 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6274 return IEMOP_RAISE_INVALID_OPCODE();
6275 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6276}
6277
6278
6279/** Opcode 0x0f 0xb6. */
6280FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6281{
6282 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6283 IEMOP_HLP_MIN_386();
6284
6285 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6286
6287 /*
6288 * If rm is denoting a register, no more instruction bytes.
6289 */
6290 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6291 {
6292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6293 switch (pVCpu->iem.s.enmEffOpSize)
6294 {
6295 case IEMMODE_16BIT:
6296 IEM_MC_BEGIN(0, 1);
6297 IEM_MC_LOCAL(uint16_t, u16Value);
6298 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6299 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6300 IEM_MC_ADVANCE_RIP();
6301 IEM_MC_END();
6302 return VINF_SUCCESS;
6303
6304 case IEMMODE_32BIT:
6305 IEM_MC_BEGIN(0, 1);
6306 IEM_MC_LOCAL(uint32_t, u32Value);
6307 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6308 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6309 IEM_MC_ADVANCE_RIP();
6310 IEM_MC_END();
6311 return VINF_SUCCESS;
6312
6313 case IEMMODE_64BIT:
6314 IEM_MC_BEGIN(0, 1);
6315 IEM_MC_LOCAL(uint64_t, u64Value);
6316 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6317 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6318 IEM_MC_ADVANCE_RIP();
6319 IEM_MC_END();
6320 return VINF_SUCCESS;
6321
6322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6323 }
6324 }
6325 else
6326 {
6327 /*
6328 * We're loading a register from memory.
6329 */
6330 switch (pVCpu->iem.s.enmEffOpSize)
6331 {
6332 case IEMMODE_16BIT:
6333 IEM_MC_BEGIN(0, 2);
6334 IEM_MC_LOCAL(uint16_t, u16Value);
6335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6338 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6339 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6340 IEM_MC_ADVANCE_RIP();
6341 IEM_MC_END();
6342 return VINF_SUCCESS;
6343
6344 case IEMMODE_32BIT:
6345 IEM_MC_BEGIN(0, 2);
6346 IEM_MC_LOCAL(uint32_t, u32Value);
6347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6350 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6351 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6352 IEM_MC_ADVANCE_RIP();
6353 IEM_MC_END();
6354 return VINF_SUCCESS;
6355
6356 case IEMMODE_64BIT:
6357 IEM_MC_BEGIN(0, 2);
6358 IEM_MC_LOCAL(uint64_t, u64Value);
6359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6362 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6363 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6364 IEM_MC_ADVANCE_RIP();
6365 IEM_MC_END();
6366 return VINF_SUCCESS;
6367
6368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6369 }
6370 }
6371}
6372
6373
6374/** Opcode 0x0f 0xb7. */
6375FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6376{
6377 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6378 IEMOP_HLP_MIN_386();
6379
6380 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6381
6382 /** @todo Not entirely sure how the operand size prefix is handled here,
6383 * assuming that it will be ignored. Would be nice to have a few
6384 * test for this. */
6385 /*
6386 * If rm is denoting a register, no more instruction bytes.
6387 */
6388 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6389 {
6390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6391 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6392 {
6393 IEM_MC_BEGIN(0, 1);
6394 IEM_MC_LOCAL(uint32_t, u32Value);
6395 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6396 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6397 IEM_MC_ADVANCE_RIP();
6398 IEM_MC_END();
6399 }
6400 else
6401 {
6402 IEM_MC_BEGIN(0, 1);
6403 IEM_MC_LOCAL(uint64_t, u64Value);
6404 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6405 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6406 IEM_MC_ADVANCE_RIP();
6407 IEM_MC_END();
6408 }
6409 }
6410 else
6411 {
6412 /*
6413 * We're loading a register from memory.
6414 */
6415 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6416 {
6417 IEM_MC_BEGIN(0, 2);
6418 IEM_MC_LOCAL(uint32_t, u32Value);
6419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6422 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6423 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6424 IEM_MC_ADVANCE_RIP();
6425 IEM_MC_END();
6426 }
6427 else
6428 {
6429 IEM_MC_BEGIN(0, 2);
6430 IEM_MC_LOCAL(uint64_t, u64Value);
6431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6434 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6435 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6436 IEM_MC_ADVANCE_RIP();
6437 IEM_MC_END();
6438 }
6439 }
6440 return VINF_SUCCESS;
6441}
6442
6443
6444/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6445FNIEMOP_UD_STUB(iemOp_jmpe);
6446/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6447FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6448
6449
6450/** Opcode 0x0f 0xb9. */
6451FNIEMOP_DEF(iemOp_Grp10)
6452{
6453 Log(("iemOp_Grp10 -> #UD\n"));
6454 return IEMOP_RAISE_INVALID_OPCODE();
6455}
6456
6457
6458/** Opcode 0x0f 0xba. */
6459FNIEMOP_DEF(iemOp_Grp8)
6460{
6461 IEMOP_HLP_MIN_386();
6462 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6463 PCIEMOPBINSIZES pImpl;
6464 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6465 {
6466 case 0: case 1: case 2: case 3:
6467 return IEMOP_RAISE_INVALID_OPCODE();
6468 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6469 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6470 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6471 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6472 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6473 }
6474 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6475
6476 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6477 {
6478 /* register destination. */
6479 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6481
6482 switch (pVCpu->iem.s.enmEffOpSize)
6483 {
6484 case IEMMODE_16BIT:
6485 IEM_MC_BEGIN(3, 0);
6486 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6487 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6488 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6489
6490 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6491 IEM_MC_REF_EFLAGS(pEFlags);
6492 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6493
6494 IEM_MC_ADVANCE_RIP();
6495 IEM_MC_END();
6496 return VINF_SUCCESS;
6497
6498 case IEMMODE_32BIT:
6499 IEM_MC_BEGIN(3, 0);
6500 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6501 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6502 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6503
6504 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6505 IEM_MC_REF_EFLAGS(pEFlags);
6506 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6507
6508 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6509 IEM_MC_ADVANCE_RIP();
6510 IEM_MC_END();
6511 return VINF_SUCCESS;
6512
6513 case IEMMODE_64BIT:
6514 IEM_MC_BEGIN(3, 0);
6515 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6516 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6517 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6518
6519 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6520 IEM_MC_REF_EFLAGS(pEFlags);
6521 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6522
6523 IEM_MC_ADVANCE_RIP();
6524 IEM_MC_END();
6525 return VINF_SUCCESS;
6526
6527 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6528 }
6529 }
6530 else
6531 {
6532 /* memory destination. */
6533
6534 uint32_t fAccess;
6535 if (pImpl->pfnLockedU16)
6536 fAccess = IEM_ACCESS_DATA_RW;
6537 else /* BT */
6538 fAccess = IEM_ACCESS_DATA_R;
6539
6540 /** @todo test negative bit offsets! */
6541 switch (pVCpu->iem.s.enmEffOpSize)
6542 {
6543 case IEMMODE_16BIT:
6544 IEM_MC_BEGIN(3, 1);
6545 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6546 IEM_MC_ARG(uint16_t, u16Src, 1);
6547 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6549
6550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6551 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6552 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6553 if (pImpl->pfnLockedU16)
6554 IEMOP_HLP_DONE_DECODING();
6555 else
6556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6557 IEM_MC_FETCH_EFLAGS(EFlags);
6558 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6559 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6560 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6561 else
6562 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6563 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6564
6565 IEM_MC_COMMIT_EFLAGS(EFlags);
6566 IEM_MC_ADVANCE_RIP();
6567 IEM_MC_END();
6568 return VINF_SUCCESS;
6569
6570 case IEMMODE_32BIT:
6571 IEM_MC_BEGIN(3, 1);
6572 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6573 IEM_MC_ARG(uint32_t, u32Src, 1);
6574 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6576
6577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6578 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6579 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6580 if (pImpl->pfnLockedU16)
6581 IEMOP_HLP_DONE_DECODING();
6582 else
6583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6584 IEM_MC_FETCH_EFLAGS(EFlags);
6585 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6586 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6587 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6588 else
6589 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6590 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6591
6592 IEM_MC_COMMIT_EFLAGS(EFlags);
6593 IEM_MC_ADVANCE_RIP();
6594 IEM_MC_END();
6595 return VINF_SUCCESS;
6596
6597 case IEMMODE_64BIT:
6598 IEM_MC_BEGIN(3, 1);
6599 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6600 IEM_MC_ARG(uint64_t, u64Src, 1);
6601 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6603
6604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6605 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6606 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6607 if (pImpl->pfnLockedU16)
6608 IEMOP_HLP_DONE_DECODING();
6609 else
6610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6611 IEM_MC_FETCH_EFLAGS(EFlags);
6612 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6613 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6614 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6615 else
6616 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6617 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6618
6619 IEM_MC_COMMIT_EFLAGS(EFlags);
6620 IEM_MC_ADVANCE_RIP();
6621 IEM_MC_END();
6622 return VINF_SUCCESS;
6623
6624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6625 }
6626 }
6627
6628}
6629
6630
6631/** Opcode 0x0f 0xbb. */
6632FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6633{
6634 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6635 IEMOP_HLP_MIN_386();
6636 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6637}
6638
6639
6640/** Opcode 0x0f 0xbc. */
6641FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6642{
6643 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6644 IEMOP_HLP_MIN_386();
6645 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6646 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6647}
6648
6649
6650/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6651FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6652
6653
6654/** Opcode 0x0f 0xbd. */
6655FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6656{
6657 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6658 IEMOP_HLP_MIN_386();
6659 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6660 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6661}
6662
6663
6664/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6665FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6666
6667
6668/** Opcode 0x0f 0xbe. */
6669FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6670{
6671 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6672 IEMOP_HLP_MIN_386();
6673
6674 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6675
6676 /*
6677 * If rm is denoting a register, no more instruction bytes.
6678 */
6679 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6680 {
6681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6682 switch (pVCpu->iem.s.enmEffOpSize)
6683 {
6684 case IEMMODE_16BIT:
6685 IEM_MC_BEGIN(0, 1);
6686 IEM_MC_LOCAL(uint16_t, u16Value);
6687 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6688 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6689 IEM_MC_ADVANCE_RIP();
6690 IEM_MC_END();
6691 return VINF_SUCCESS;
6692
6693 case IEMMODE_32BIT:
6694 IEM_MC_BEGIN(0, 1);
6695 IEM_MC_LOCAL(uint32_t, u32Value);
6696 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6697 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6698 IEM_MC_ADVANCE_RIP();
6699 IEM_MC_END();
6700 return VINF_SUCCESS;
6701
6702 case IEMMODE_64BIT:
6703 IEM_MC_BEGIN(0, 1);
6704 IEM_MC_LOCAL(uint64_t, u64Value);
6705 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6706 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6707 IEM_MC_ADVANCE_RIP();
6708 IEM_MC_END();
6709 return VINF_SUCCESS;
6710
6711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6712 }
6713 }
6714 else
6715 {
6716 /*
6717 * We're loading a register from memory.
6718 */
6719 switch (pVCpu->iem.s.enmEffOpSize)
6720 {
6721 case IEMMODE_16BIT:
6722 IEM_MC_BEGIN(0, 2);
6723 IEM_MC_LOCAL(uint16_t, u16Value);
6724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6727 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6728 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6729 IEM_MC_ADVANCE_RIP();
6730 IEM_MC_END();
6731 return VINF_SUCCESS;
6732
6733 case IEMMODE_32BIT:
6734 IEM_MC_BEGIN(0, 2);
6735 IEM_MC_LOCAL(uint32_t, u32Value);
6736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6739 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6740 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6741 IEM_MC_ADVANCE_RIP();
6742 IEM_MC_END();
6743 return VINF_SUCCESS;
6744
6745 case IEMMODE_64BIT:
6746 IEM_MC_BEGIN(0, 2);
6747 IEM_MC_LOCAL(uint64_t, u64Value);
6748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6751 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6752 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6753 IEM_MC_ADVANCE_RIP();
6754 IEM_MC_END();
6755 return VINF_SUCCESS;
6756
6757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6758 }
6759 }
6760}
6761
6762
6763/** Opcode 0x0f 0xbf. */
6764FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6765{
6766 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6767 IEMOP_HLP_MIN_386();
6768
6769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6770
6771 /** @todo Not entirely sure how the operand size prefix is handled here,
6772 * assuming that it will be ignored. Would be nice to have a few
6773 * test for this. */
6774 /*
6775 * If rm is denoting a register, no more instruction bytes.
6776 */
6777 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6778 {
6779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6780 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6781 {
6782 IEM_MC_BEGIN(0, 1);
6783 IEM_MC_LOCAL(uint32_t, u32Value);
6784 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6785 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6786 IEM_MC_ADVANCE_RIP();
6787 IEM_MC_END();
6788 }
6789 else
6790 {
6791 IEM_MC_BEGIN(0, 1);
6792 IEM_MC_LOCAL(uint64_t, u64Value);
6793 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6794 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6795 IEM_MC_ADVANCE_RIP();
6796 IEM_MC_END();
6797 }
6798 }
6799 else
6800 {
6801 /*
6802 * We're loading a register from memory.
6803 */
6804 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6805 {
6806 IEM_MC_BEGIN(0, 2);
6807 IEM_MC_LOCAL(uint32_t, u32Value);
6808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6811 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6812 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6813 IEM_MC_ADVANCE_RIP();
6814 IEM_MC_END();
6815 }
6816 else
6817 {
6818 IEM_MC_BEGIN(0, 2);
6819 IEM_MC_LOCAL(uint64_t, u64Value);
6820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6823 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6824 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6825 IEM_MC_ADVANCE_RIP();
6826 IEM_MC_END();
6827 }
6828 }
6829 return VINF_SUCCESS;
6830}
6831
6832
6833/** Opcode 0x0f 0xc0. */
6834FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6835{
6836 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6837 IEMOP_HLP_MIN_486();
6838 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6839
6840 /*
6841 * If rm is denoting a register, no more instruction bytes.
6842 */
6843 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6844 {
6845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6846
6847 IEM_MC_BEGIN(3, 0);
6848 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6849 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6850 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6851
6852 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6853 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6854 IEM_MC_REF_EFLAGS(pEFlags);
6855 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6856
6857 IEM_MC_ADVANCE_RIP();
6858 IEM_MC_END();
6859 }
6860 else
6861 {
6862 /*
6863 * We're accessing memory.
6864 */
6865 IEM_MC_BEGIN(3, 3);
6866 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6867 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6868 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6869 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6871
6872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6873 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6874 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6875 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6876 IEM_MC_FETCH_EFLAGS(EFlags);
6877 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6878 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6879 else
6880 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6881
6882 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6883 IEM_MC_COMMIT_EFLAGS(EFlags);
6884 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6885 IEM_MC_ADVANCE_RIP();
6886 IEM_MC_END();
6887 return VINF_SUCCESS;
6888 }
6889 return VINF_SUCCESS;
6890}
6891
6892
6893/** Opcode 0x0f 0xc1. */
6894FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6895{
6896 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6897 IEMOP_HLP_MIN_486();
6898 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6899
6900 /*
6901 * If rm is denoting a register, no more instruction bytes.
6902 */
6903 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6904 {
6905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6906
6907 switch (pVCpu->iem.s.enmEffOpSize)
6908 {
6909 case IEMMODE_16BIT:
6910 IEM_MC_BEGIN(3, 0);
6911 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6912 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6913 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6914
6915 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6916 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6917 IEM_MC_REF_EFLAGS(pEFlags);
6918 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6919
6920 IEM_MC_ADVANCE_RIP();
6921 IEM_MC_END();
6922 return VINF_SUCCESS;
6923
6924 case IEMMODE_32BIT:
6925 IEM_MC_BEGIN(3, 0);
6926 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6927 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6928 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6929
6930 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6931 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6932 IEM_MC_REF_EFLAGS(pEFlags);
6933 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6934
6935 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6936 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6937 IEM_MC_ADVANCE_RIP();
6938 IEM_MC_END();
6939 return VINF_SUCCESS;
6940
6941 case IEMMODE_64BIT:
6942 IEM_MC_BEGIN(3, 0);
6943 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6944 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6945 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6946
6947 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6948 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6949 IEM_MC_REF_EFLAGS(pEFlags);
6950 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6951
6952 IEM_MC_ADVANCE_RIP();
6953 IEM_MC_END();
6954 return VINF_SUCCESS;
6955
6956 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6957 }
6958 }
6959 else
6960 {
6961 /*
6962 * We're accessing memory.
6963 */
6964 switch (pVCpu->iem.s.enmEffOpSize)
6965 {
6966 case IEMMODE_16BIT:
6967 IEM_MC_BEGIN(3, 3);
6968 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6969 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6970 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6971 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6973
6974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6975 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6976 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6977 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6978 IEM_MC_FETCH_EFLAGS(EFlags);
6979 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6980 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6981 else
6982 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6983
6984 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6985 IEM_MC_COMMIT_EFLAGS(EFlags);
6986 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
6987 IEM_MC_ADVANCE_RIP();
6988 IEM_MC_END();
6989 return VINF_SUCCESS;
6990
6991 case IEMMODE_32BIT:
6992 IEM_MC_BEGIN(3, 3);
6993 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6994 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6995 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6996 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6997 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6998
6999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7000 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7001 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7002 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7003 IEM_MC_FETCH_EFLAGS(EFlags);
7004 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7005 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7006 else
7007 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7008
7009 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7010 IEM_MC_COMMIT_EFLAGS(EFlags);
7011 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7012 IEM_MC_ADVANCE_RIP();
7013 IEM_MC_END();
7014 return VINF_SUCCESS;
7015
7016 case IEMMODE_64BIT:
7017 IEM_MC_BEGIN(3, 3);
7018 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7019 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7020 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7021 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7023
7024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7025 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7026 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7027 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7028 IEM_MC_FETCH_EFLAGS(EFlags);
7029 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7030 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7031 else
7032 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7033
7034 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7035 IEM_MC_COMMIT_EFLAGS(EFlags);
7036 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7037 IEM_MC_ADVANCE_RIP();
7038 IEM_MC_END();
7039 return VINF_SUCCESS;
7040
7041 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7042 }
7043 }
7044}
7045
7046
7047/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
7048FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
7049/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
7050FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
7051/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
7052FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
7053/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
7054FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
7055
7056
7057/** Opcode 0x0f 0xc3. */
7058FNIEMOP_DEF(iemOp_movnti_My_Gy)
7059{
7060 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7061
7062 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7063
7064 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7065 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7066 {
7067 switch (pVCpu->iem.s.enmEffOpSize)
7068 {
7069 case IEMMODE_32BIT:
7070 IEM_MC_BEGIN(0, 2);
7071 IEM_MC_LOCAL(uint32_t, u32Value);
7072 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7073
7074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7076 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7077 return IEMOP_RAISE_INVALID_OPCODE();
7078
7079 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7080 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7081 IEM_MC_ADVANCE_RIP();
7082 IEM_MC_END();
7083 break;
7084
7085 case IEMMODE_64BIT:
7086 IEM_MC_BEGIN(0, 2);
7087 IEM_MC_LOCAL(uint64_t, u64Value);
7088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7089
7090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7092 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7093 return IEMOP_RAISE_INVALID_OPCODE();
7094
7095 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7096 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7097 IEM_MC_ADVANCE_RIP();
7098 IEM_MC_END();
7099 break;
7100
7101 case IEMMODE_16BIT:
7102 /** @todo check this form. */
7103 return IEMOP_RAISE_INVALID_OPCODE();
7104 }
7105 }
7106 else
7107 return IEMOP_RAISE_INVALID_OPCODE();
7108 return VINF_SUCCESS;
7109}
7110/* Opcode 0x66 0x0f 0xc3 - invalid */
7111/* Opcode 0xf3 0x0f 0xc3 - invalid */
7112/* Opcode 0xf2 0x0f 0xc3 - invalid */
7113
7114/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
7115FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7116/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
7117FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
7118/* Opcode 0xf3 0x0f 0xc4 - invalid */
7119/* Opcode 0xf2 0x0f 0xc4 - invalid */
7120
7121/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7122FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7123/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
7124FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
7125/* Opcode 0xf3 0x0f 0xc5 - invalid */
7126/* Opcode 0xf2 0x0f 0xc5 - invalid */
7127
7128/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
7129FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
7130/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
7131FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
7132/* Opcode 0xf3 0x0f 0xc6 - invalid */
7133/* Opcode 0xf2 0x0f 0xc6 - invalid */
7134
7135
7136/** Opcode 0x0f 0xc7 !11/1. */
7137FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7138{
7139 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7140
7141 IEM_MC_BEGIN(4, 3);
7142 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7143 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7144 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7145 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7146 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7147 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7149
7150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7151 IEMOP_HLP_DONE_DECODING();
7152 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7153
7154 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7155 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7156 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7157
7158 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7159 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7160 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7161
7162 IEM_MC_FETCH_EFLAGS(EFlags);
7163 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7164 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7165 else
7166 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7167
7168 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7169 IEM_MC_COMMIT_EFLAGS(EFlags);
7170 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7171 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7172 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7173 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7174 IEM_MC_ENDIF();
7175 IEM_MC_ADVANCE_RIP();
7176
7177 IEM_MC_END();
7178 return VINF_SUCCESS;
7179}
7180
7181
7182/** Opcode REX.W 0x0f 0xc7 !11/1. */
7183FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7184{
7185 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7186 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7187 {
7188#if 0
7189 RT_NOREF(bRm);
7190 IEMOP_BITCH_ABOUT_STUB();
7191 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7192#else
7193 IEM_MC_BEGIN(4, 3);
7194 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7195 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7196 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7197 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7198 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7199 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7201
7202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7203 IEMOP_HLP_DONE_DECODING();
7204 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7205 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7206
7207 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7208 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7209 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7210
7211 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7212 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7213 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7214
7215 IEM_MC_FETCH_EFLAGS(EFlags);
7216# ifdef RT_ARCH_AMD64
7217 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7218 {
7219 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7220 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7221 else
7222 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7223 }
7224 else
7225# endif
7226 {
7227 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7228 accesses and not all all atomic, which works fine on in UNI CPU guest
7229 configuration (ignoring DMA). If guest SMP is active we have no choice
7230 but to use a rendezvous callback here. Sigh. */
7231 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7232 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7233 else
7234 {
7235 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7236 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7237 }
7238 }
7239
7240 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7241 IEM_MC_COMMIT_EFLAGS(EFlags);
7242 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7243 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7244 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7245 IEM_MC_ENDIF();
7246 IEM_MC_ADVANCE_RIP();
7247
7248 IEM_MC_END();
7249 return VINF_SUCCESS;
7250#endif
7251 }
7252 Log(("cmpxchg16b -> #UD\n"));
7253 return IEMOP_RAISE_INVALID_OPCODE();
7254}
7255
7256
7257/** Opcode 0x0f 0xc7 11/6. */
7258FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7259
7260/** Opcode 0x0f 0xc7 !11/6. */
7261FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7262
7263/** Opcode 0x66 0x0f 0xc7 !11/6. */
7264FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7265
7266/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7267FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7268
7269/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7270FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7271
7272
7273/** Opcode 0x0f 0xc7. */
7274FNIEMOP_DEF(iemOp_Grp9)
7275{
7276 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
7277 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7278 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7279 {
7280 case 0: case 2: case 3: case 4: case 5:
7281 return IEMOP_RAISE_INVALID_OPCODE();
7282 case 1:
7283 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
7284 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
7285 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
7286 return IEMOP_RAISE_INVALID_OPCODE();
7287 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7288 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7289 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7290 case 6:
7291 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7292 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
7293 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7294 {
7295 case 0:
7296 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
7297 case IEM_OP_PRF_SIZE_OP:
7298 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
7299 case IEM_OP_PRF_REPZ:
7300 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
7301 default:
7302 return IEMOP_RAISE_INVALID_OPCODE();
7303 }
7304 case 7:
7305 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7306 {
7307 case 0:
7308 case IEM_OP_PRF_REPZ:
7309 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
7310 default:
7311 return IEMOP_RAISE_INVALID_OPCODE();
7312 }
7313 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7314 }
7315}
7316
7317
7318/**
7319 * Common 'bswap register' helper.
7320 */
7321FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7322{
7323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7324 switch (pVCpu->iem.s.enmEffOpSize)
7325 {
7326 case IEMMODE_16BIT:
7327 IEM_MC_BEGIN(1, 0);
7328 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7329 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7330 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7331 IEM_MC_ADVANCE_RIP();
7332 IEM_MC_END();
7333 return VINF_SUCCESS;
7334
7335 case IEMMODE_32BIT:
7336 IEM_MC_BEGIN(1, 0);
7337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7338 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7339 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7340 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7341 IEM_MC_ADVANCE_RIP();
7342 IEM_MC_END();
7343 return VINF_SUCCESS;
7344
7345 case IEMMODE_64BIT:
7346 IEM_MC_BEGIN(1, 0);
7347 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7348 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7349 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7350 IEM_MC_ADVANCE_RIP();
7351 IEM_MC_END();
7352 return VINF_SUCCESS;
7353
7354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7355 }
7356}
7357
7358
7359/** Opcode 0x0f 0xc8. */
7360FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7361{
7362 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7363 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7364 prefix. REX.B is the correct prefix it appears. For a parallel
7365 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7366 IEMOP_HLP_MIN_486();
7367 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7368}
7369
7370
7371/** Opcode 0x0f 0xc9. */
7372FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7373{
7374 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7375 IEMOP_HLP_MIN_486();
7376 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7377}
7378
7379
7380/** Opcode 0x0f 0xca. */
7381FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7382{
7383 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7384 IEMOP_HLP_MIN_486();
7385 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7386}
7387
7388
7389/** Opcode 0x0f 0xcb. */
7390FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7391{
7392 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7393 IEMOP_HLP_MIN_486();
7394 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7395}
7396
7397
7398/** Opcode 0x0f 0xcc. */
7399FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7400{
7401 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7402 IEMOP_HLP_MIN_486();
7403 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7404}
7405
7406
7407/** Opcode 0x0f 0xcd. */
7408FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7409{
7410 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7411 IEMOP_HLP_MIN_486();
7412 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7413}
7414
7415
7416/** Opcode 0x0f 0xce. */
7417FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7418{
7419 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7420 IEMOP_HLP_MIN_486();
7421 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7422}
7423
7424
7425/** Opcode 0x0f 0xcf. */
7426FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7427{
7428 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7429 IEMOP_HLP_MIN_486();
7430 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7431}
7432
7433
7434/* Opcode 0x0f 0xd0 - invalid */
7435/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7436FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7437/* Opcode 0xf3 0x0f 0xd0 - invalid */
7438/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7439FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7440
7441/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7442FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7443/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7444FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7445/* Opcode 0xf3 0x0f 0xd1 - invalid */
7446/* Opcode 0xf2 0x0f 0xd1 - invalid */
7447
7448/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7449FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7450/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7451FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7452/* Opcode 0xf3 0x0f 0xd2 - invalid */
7453/* Opcode 0xf2 0x0f 0xd2 - invalid */
7454
7455/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7456FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7457/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7458FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7459/* Opcode 0xf3 0x0f 0xd3 - invalid */
7460/* Opcode 0xf2 0x0f 0xd3 - invalid */
7461
7462/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7463FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7464/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7465FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7466/* Opcode 0xf3 0x0f 0xd4 - invalid */
7467/* Opcode 0xf2 0x0f 0xd4 - invalid */
7468
7469/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7470FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7471/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7472FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7473/* Opcode 0xf3 0x0f 0xd5 - invalid */
7474/* Opcode 0xf2 0x0f 0xd5 - invalid */
7475
7476/* Opcode 0x0f 0xd6 - invalid */
7477/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7478FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7479/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7480FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7481/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7482FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7483#if 0
7484FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7485{
7486 /* Docs says register only. */
7487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7488
7489 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7490 {
7491 case IEM_OP_PRF_SIZE_OP: /* SSE */
7492 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7493 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7494 IEM_MC_BEGIN(2, 0);
7495 IEM_MC_ARG(uint64_t *, pDst, 0);
7496 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7497 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7498 IEM_MC_PREPARE_SSE_USAGE();
7499 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7500 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7501 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7502 IEM_MC_ADVANCE_RIP();
7503 IEM_MC_END();
7504 return VINF_SUCCESS;
7505
7506 case 0: /* MMX */
7507 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7508 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7509 IEM_MC_BEGIN(2, 0);
7510 IEM_MC_ARG(uint64_t *, pDst, 0);
7511 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7512 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7513 IEM_MC_PREPARE_FPU_USAGE();
7514 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7515 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7516 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7517 IEM_MC_ADVANCE_RIP();
7518 IEM_MC_END();
7519 return VINF_SUCCESS;
7520
7521 default:
7522 return IEMOP_RAISE_INVALID_OPCODE();
7523 }
7524}
7525#endif
7526
7527
7528/** Opcode 0x0f 0xd7. */
7529FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
7530{
7531 /* Docs says register only. */
7532 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7533 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7534 return IEMOP_RAISE_INVALID_OPCODE();
7535
7536 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7537 /** @todo testcase: Check that the instruction implicitly clears the high
7538 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7539 * and opcode modifications are made to work with the whole width (not
7540 * just 128). */
7541 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7542 {
7543 case IEM_OP_PRF_SIZE_OP: /* SSE */
7544 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "pmovmskb Gd,Nq");
7545 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7546 IEM_MC_BEGIN(2, 0);
7547 IEM_MC_ARG(uint64_t *, pDst, 0);
7548 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7549 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7550 IEM_MC_PREPARE_SSE_USAGE();
7551 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7552 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7553 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7554 IEM_MC_ADVANCE_RIP();
7555 IEM_MC_END();
7556 return VINF_SUCCESS;
7557
7558 case 0: /* MMX */
7559 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7560 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7561 IEM_MC_BEGIN(2, 0);
7562 IEM_MC_ARG(uint64_t *, pDst, 0);
7563 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7564 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7565 IEM_MC_PREPARE_FPU_USAGE();
7566 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7567 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7568 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7569 IEM_MC_ADVANCE_RIP();
7570 IEM_MC_END();
7571 return VINF_SUCCESS;
7572
7573 default:
7574 return IEMOP_RAISE_INVALID_OPCODE();
7575 }
7576}
7577
7578
7579/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7580FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7581/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7582FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7583/* Opcode 0xf3 0x0f 0xd8 - invalid */
7584/* Opcode 0xf2 0x0f 0xd8 - invalid */
7585
7586/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7587FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7588/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7589FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7590/* Opcode 0xf3 0x0f 0xd9 - invalid */
7591/* Opcode 0xf2 0x0f 0xd9 - invalid */
7592
7593/** Opcode 0x0f 0xda - pminub Pq, Qq */
7594FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7595/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7596FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7597/* Opcode 0xf3 0x0f 0xda - invalid */
7598/* Opcode 0xf2 0x0f 0xda - invalid */
7599
7600/** Opcode 0x0f 0xdb - pand Pq, Qq */
7601FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7602/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7603FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7604/* Opcode 0xf3 0x0f 0xdb - invalid */
7605/* Opcode 0xf2 0x0f 0xdb - invalid */
7606
7607/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7608FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7609/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7610FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7611/* Opcode 0xf3 0x0f 0xdc - invalid */
7612/* Opcode 0xf2 0x0f 0xdc - invalid */
7613
7614/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7615FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7616/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7617FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7618/* Opcode 0xf3 0x0f 0xdd - invalid */
7619/* Opcode 0xf2 0x0f 0xdd - invalid */
7620
7621/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7622FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7623/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7624FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7625/* Opcode 0xf3 0x0f 0xde - invalid */
7626/* Opcode 0xf2 0x0f 0xde - invalid */
7627
7628/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7629FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7630/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7631FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7632/* Opcode 0xf3 0x0f 0xdf - invalid */
7633/* Opcode 0xf2 0x0f 0xdf - invalid */
7634
7635/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7636FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7637/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7638FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7639/* Opcode 0xf3 0x0f 0xe0 - invalid */
7640/* Opcode 0xf2 0x0f 0xe0 - invalid */
7641
7642/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7643FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7644/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7645FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7646/* Opcode 0xf3 0x0f 0xe1 - invalid */
7647/* Opcode 0xf2 0x0f 0xe1 - invalid */
7648
7649/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7650FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7651/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7652FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7653/* Opcode 0xf3 0x0f 0xe2 - invalid */
7654/* Opcode 0xf2 0x0f 0xe2 - invalid */
7655
7656/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7657FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7658/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7659FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7660/* Opcode 0xf3 0x0f 0xe3 - invalid */
7661/* Opcode 0xf2 0x0f 0xe3 - invalid */
7662
7663/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7664FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7665/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7666FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7667/* Opcode 0xf3 0x0f 0xe4 - invalid */
7668/* Opcode 0xf2 0x0f 0xe4 - invalid */
7669
7670/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7671FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7672/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7673FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7674/* Opcode 0xf3 0x0f 0xe5 - invalid */
7675/* Opcode 0xf2 0x0f 0xe5 - invalid */
7676
7677/* Opcode 0x0f 0xe6 - invalid */
7678/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7679FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7680/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7681FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7682/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7683FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7684
7685
7686/** Opcode 0x0f 0xe7. */
7687FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
7688{
7689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7690 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7691 {
7692 /*
7693 * Register, memory.
7694 */
7695/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
7696 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7697 {
7698
7699 case IEM_OP_PRF_SIZE_OP: /* SSE */
7700 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7701 IEM_MC_BEGIN(0, 2);
7702 IEM_MC_LOCAL(uint128_t, uSrc);
7703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7704
7705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7707 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7708 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7709
7710 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7711 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7712
7713 IEM_MC_ADVANCE_RIP();
7714 IEM_MC_END();
7715 break;
7716
7717 case 0: /* MMX */
7718 IEMOP_MNEMONIC(movntdq_Mdq_Vdq, "movntdq Mdq,Vdq");
7719 IEM_MC_BEGIN(0, 2);
7720 IEM_MC_LOCAL(uint64_t, uSrc);
7721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7722
7723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7725 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7726 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7727
7728 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7729 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7730
7731 IEM_MC_ADVANCE_RIP();
7732 IEM_MC_END();
7733 break;
7734
7735 default:
7736 return IEMOP_RAISE_INVALID_OPCODE();
7737 }
7738 }
7739 /* The register, register encoding is invalid. */
7740 else
7741 return IEMOP_RAISE_INVALID_OPCODE();
7742 return VINF_SUCCESS;
7743}
7744
7745
7746/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7747FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7748/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7749FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7750/* Opcode 0xf3 0x0f 0xe8 - invalid */
7751/* Opcode 0xf2 0x0f 0xe8 - invalid */
7752
7753/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7754FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7755/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7756FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7757/* Opcode 0xf3 0x0f 0xe9 - invalid */
7758/* Opcode 0xf2 0x0f 0xe9 - invalid */
7759
7760/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7761FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7762/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7763FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7764/* Opcode 0xf3 0x0f 0xea - invalid */
7765/* Opcode 0xf2 0x0f 0xea - invalid */
7766
7767/** Opcode 0x0f 0xeb - por Pq, Qq */
7768FNIEMOP_STUB(iemOp_por_Pq_Qq);
7769/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7770FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7771/* Opcode 0xf3 0x0f 0xeb - invalid */
7772/* Opcode 0xf2 0x0f 0xeb - invalid */
7773
7774/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7775FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7776/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7777FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7778/* Opcode 0xf3 0x0f 0xec - invalid */
7779/* Opcode 0xf2 0x0f 0xec - invalid */
7780
7781/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7782FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7783/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7784FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7785/* Opcode 0xf3 0x0f 0xed - invalid */
7786/* Opcode 0xf2 0x0f 0xed - invalid */
7787
7788/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7789FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7790/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7791FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7792/* Opcode 0xf3 0x0f 0xee - invalid */
7793/* Opcode 0xf2 0x0f 0xee - invalid */
7794
7795
7796/** Opcode 0x0f 0xef. */
7797FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
7798{
7799 IEMOP_MNEMONIC(pxor, "pxor");
7800 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7801}
7802/* Opcode 0xf3 0x0f 0xef - invalid */
7803/* Opcode 0xf2 0x0f 0xef - invalid */
7804
7805/* Opcode 0x0f 0xf0 - invalid */
7806/* Opcode 0x66 0x0f 0xf0 - invalid */
7807/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7808FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7809
7810/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7811FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7812/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7813FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7814/* Opcode 0xf2 0x0f 0xf1 - invalid */
7815
7816/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7817FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7818/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7819FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7820/* Opcode 0xf2 0x0f 0xf2 - invalid */
7821
7822/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7823FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7824/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7825FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7826/* Opcode 0xf2 0x0f 0xf3 - invalid */
7827
7828/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7829FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7830/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7831FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7832/* Opcode 0xf2 0x0f 0xf4 - invalid */
7833
7834/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7835FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7836/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7837FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7838/* Opcode 0xf2 0x0f 0xf5 - invalid */
7839
7840/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7841FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7842/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7843FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7844/* Opcode 0xf2 0x0f 0xf6 - invalid */
7845
7846/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7847FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7848/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7849FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7850/* Opcode 0xf2 0x0f 0xf7 - invalid */
7851
7852/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7853FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7854/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7855FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7856/* Opcode 0xf2 0x0f 0xf8 - invalid */
7857
7858/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7859FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7860/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7861FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7862/* Opcode 0xf2 0x0f 0xf9 - invalid */
7863
7864/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7865FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7866/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7867FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7868/* Opcode 0xf2 0x0f 0xfa - invalid */
7869
7870/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7871FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7872/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7873FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7874/* Opcode 0xf2 0x0f 0xfb - invalid */
7875
7876/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7877FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7878/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7879FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7880/* Opcode 0xf2 0x0f 0xfc - invalid */
7881
7882/** Opcode 0x0f 0xfd - paddw Pq, Qq */
7883FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
7884/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
7885FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
7886/* Opcode 0xf2 0x0f 0xfd - invalid */
7887
7888/** Opcode 0x0f 0xfe - paddd Pq, Qq */
7889FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
7890/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
7891FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
7892/* Opcode 0xf2 0x0f 0xfe - invalid */
7893
7894
7895/** Opcode **** 0x0f 0xff - UD0 */
7896FNIEMOP_DEF(iemOp_ud0)
7897{
7898 IEMOP_MNEMONIC(ud0, "ud0");
7899 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7900 {
7901 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7902#ifndef TST_IEM_CHECK_MC
7903 RTGCPTR GCPtrEff;
7904 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
7905 if (rcStrict != VINF_SUCCESS)
7906 return rcStrict;
7907#endif
7908 IEMOP_HLP_DONE_DECODING();
7909 }
7910 return IEMOP_RAISE_INVALID_OPCODE();
7911}
7912
7913
7914
7915/** Repeats a_fn four times. For decoding tables. */
7916#define IEMOP_X4(a_fn) a_fn, a_fn, a_fn, a_fn
7917
7918IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
7919{
7920 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7921 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
7922 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
7923 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
7924 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
7925 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
7926 /* 0x05 */ IEMOP_X4(iemOp_syscall),
7927 /* 0x06 */ IEMOP_X4(iemOp_clts),
7928 /* 0x07 */ IEMOP_X4(iemOp_sysret),
7929 /* 0x08 */ IEMOP_X4(iemOp_invd),
7930 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
7931 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
7932 /* 0x0b */ IEMOP_X4(iemOp_ud2),
7933 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
7934 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
7935 /* 0x0e */ IEMOP_X4(iemOp_femms),
7936 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
7937
7938 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7939 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7940 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7941 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7942 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7943 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7944 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7945 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7946 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
7947 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
7948 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
7949 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
7950 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
7951 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
7952 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
7953 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
7954
7955 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
7956 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
7957 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
7958 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
7959 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
7960 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7961 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
7962 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7963 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7964 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM,
7965 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7966 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7967 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7968 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7969 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7970 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7971
7972 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
7973 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
7974 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
7975 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
7976 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
7977 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
7978 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
7979 /* 0x37 */ IEMOP_X4(iemOp_getsec),
7980 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
7981 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7982 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
7983 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7984 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7985 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7986 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7987 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7988
7989 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
7990 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
7991 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
7992 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
7993 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
7994 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
7995 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
7996 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
7997 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
7998 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
7999 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8000 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8001 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8002 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8003 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8004 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8005
8006 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8007 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8008 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8009 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8010 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8011 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8012 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8013 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8014 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8015 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8016 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8017 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8018 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8019 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8020 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8021 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8022
8023 /* 0x60 */ IEMOP_X4(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq),
8024 /* 0x61 */ IEMOP_X4(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq),
8025 /* 0x62 */ IEMOP_X4(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq),
8026 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8027 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8028 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8029 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8030 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8031 /* 0x68 */ IEMOP_X4(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq),
8032 /* 0x69 */ IEMOP_X4(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq),
8033 /* 0x6a */ IEMOP_X4(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq),
8034 /* 0x6b */ IEMOP_X4(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq),
8035 /* 0x6c */ IEMOP_X4(iemOp_punpcklqdq_Vdq_Wdq),
8036 /* 0x6d */ IEMOP_X4(iemOp_punpckhqdq_Vdq_Wdq),
8037 /* 0x6e */ IEMOP_X4(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey),
8038 /* 0x6f */ IEMOP_X4(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq),
8039
8040 /* 0x70 */ IEMOP_X4(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib),
8041 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8042 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8043 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8044 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq, iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8045 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq, iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8046 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8047 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8048
8049 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8050 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8051 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8052 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8053 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8054 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8055 /* 0x7e */ IEMOP_X4(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq),
8056 /* 0x7f */ IEMOP_X4(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq),
8057
8058 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8059 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8060 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8061 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8062 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8063 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8064 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8065 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8066 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8067 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8068 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8069 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8070 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8071 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8072 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8073 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8074
8075 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8076 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8077 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8078 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8079 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8080 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8081 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8082 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8083 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8084 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8085 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8086 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8087 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8088 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8089 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8090 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8091
8092 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8093 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8094 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8095 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8096 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8097 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8098 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8099 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8100 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8101 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8102 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8103 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8104 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8105 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8106 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8107 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8108
8109 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8110 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8111 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8112 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8113 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8114 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8115 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8116 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8117 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8118 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8119 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8120 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8121 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8122 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8123 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8124 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8125
8126 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8127 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8128 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8129 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8130 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8131 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8132 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8133 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8134 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8135 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8136 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8137 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8138 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8139 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8140 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8141 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8142
8143 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8144 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8145 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8146 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8147 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8148 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8149 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8150 /* 0xd7 */ IEMOP_X4(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq),
8151 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8152 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8153 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8154 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8155 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8156 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8157 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8158 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8159
8160 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8161 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8162 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8163 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8164 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8165 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8166 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8167 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8168 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8169 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8170 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8171 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8172 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8173 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8174 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8175 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq, iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8176
8177 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8178 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8179 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8180 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8181 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8182 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8183 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8184 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8185 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8186 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8187 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8188 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8189 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8190 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8191 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8192 /* 0xff */ IEMOP_X4(iemOp_ud0),
8193};
8194AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8195/** @} */
8196
8197
8198/** @name One byte opcodes.
8199 *
8200 * @{
8201 */
8202
8203/** Opcode 0x00. */
8204FNIEMOP_DEF(iemOp_add_Eb_Gb)
8205{
8206 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
8207 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
8208}
8209
8210
8211/** Opcode 0x01. */
8212FNIEMOP_DEF(iemOp_add_Ev_Gv)
8213{
8214 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
8215 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
8216}
8217
8218
8219/** Opcode 0x02. */
8220FNIEMOP_DEF(iemOp_add_Gb_Eb)
8221{
8222 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
8223 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
8224}
8225
8226
8227/** Opcode 0x03. */
8228FNIEMOP_DEF(iemOp_add_Gv_Ev)
8229{
8230 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
8231 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
8232}
8233
8234
8235/** Opcode 0x04. */
8236FNIEMOP_DEF(iemOp_add_Al_Ib)
8237{
8238 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
8239 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
8240}
8241
8242
8243/** Opcode 0x05. */
8244FNIEMOP_DEF(iemOp_add_eAX_Iz)
8245{
8246 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
8247 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
8248}
8249
8250
8251/** Opcode 0x06. */
8252FNIEMOP_DEF(iemOp_push_ES)
8253{
8254 IEMOP_MNEMONIC(push_es, "push es");
8255 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
8256}
8257
8258
8259/** Opcode 0x07. */
8260FNIEMOP_DEF(iemOp_pop_ES)
8261{
8262 IEMOP_MNEMONIC(pop_es, "pop es");
8263 IEMOP_HLP_NO_64BIT();
8264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8265 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
8266}
8267
8268
8269/** Opcode 0x08. */
8270FNIEMOP_DEF(iemOp_or_Eb_Gb)
8271{
8272 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
8273 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8274 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
8275}
8276
8277
8278/** Opcode 0x09. */
8279FNIEMOP_DEF(iemOp_or_Ev_Gv)
8280{
8281 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
8282 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8283 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
8284}
8285
8286
8287/** Opcode 0x0a. */
8288FNIEMOP_DEF(iemOp_or_Gb_Eb)
8289{
8290 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
8291 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8292 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
8293}
8294
8295
8296/** Opcode 0x0b. */
8297FNIEMOP_DEF(iemOp_or_Gv_Ev)
8298{
8299 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
8300 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8301 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
8302}
8303
8304
8305/** Opcode 0x0c. */
8306FNIEMOP_DEF(iemOp_or_Al_Ib)
8307{
8308 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
8309 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8310 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
8311}
8312
8313
8314/** Opcode 0x0d. */
8315FNIEMOP_DEF(iemOp_or_eAX_Iz)
8316{
8317 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
8318 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8319 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
8320}
8321
8322
8323/** Opcode 0x0e. */
8324FNIEMOP_DEF(iemOp_push_CS)
8325{
8326 IEMOP_MNEMONIC(push_cs, "push cs");
8327 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
8328}
8329
8330
8331/** Opcode 0x0f. */
8332FNIEMOP_DEF(iemOp_2byteEscape)
8333{
8334#ifdef VBOX_STRICT
8335 static bool s_fTested = false;
8336 if (RT_LIKELY(s_fTested)) { /* likely */ }
8337 else
8338 {
8339 s_fTested = true;
8340 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
8341 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
8342 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
8343 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
8344 }
8345#endif
8346
8347 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8348
8349 /** @todo PUSH CS on 8086, undefined on 80186. */
8350 IEMOP_HLP_MIN_286();
8351 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
8352}
8353
8354/** Opcode 0x10. */
8355FNIEMOP_DEF(iemOp_adc_Eb_Gb)
8356{
8357 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
8358 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
8359}
8360
8361
8362/** Opcode 0x11. */
8363FNIEMOP_DEF(iemOp_adc_Ev_Gv)
8364{
8365 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
8366 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
8367}
8368
8369
8370/** Opcode 0x12. */
8371FNIEMOP_DEF(iemOp_adc_Gb_Eb)
8372{
8373 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
8374 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
8375}
8376
8377
8378/** Opcode 0x13. */
8379FNIEMOP_DEF(iemOp_adc_Gv_Ev)
8380{
8381 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
8382 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
8383}
8384
8385
8386/** Opcode 0x14. */
8387FNIEMOP_DEF(iemOp_adc_Al_Ib)
8388{
8389 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
8390 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
8391}
8392
8393
8394/** Opcode 0x15. */
8395FNIEMOP_DEF(iemOp_adc_eAX_Iz)
8396{
8397 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
8398 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
8399}
8400
8401
8402/** Opcode 0x16. */
8403FNIEMOP_DEF(iemOp_push_SS)
8404{
8405 IEMOP_MNEMONIC(push_ss, "push ss");
8406 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
8407}
8408
8409
8410/** Opcode 0x17. */
8411FNIEMOP_DEF(iemOp_pop_SS)
8412{
8413 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
8414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8415 IEMOP_HLP_NO_64BIT();
8416 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
8417}
8418
8419
8420/** Opcode 0x18. */
8421FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
8422{
8423 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
8424 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
8425}
8426
8427
8428/** Opcode 0x19. */
8429FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
8430{
8431 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
8432 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
8433}
8434
8435
8436/** Opcode 0x1a. */
8437FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
8438{
8439 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
8440 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
8441}
8442
8443
8444/** Opcode 0x1b. */
8445FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
8446{
8447 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
8448 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
8449}
8450
8451
8452/** Opcode 0x1c. */
8453FNIEMOP_DEF(iemOp_sbb_Al_Ib)
8454{
8455 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
8456 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
8457}
8458
8459
8460/** Opcode 0x1d. */
8461FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
8462{
8463 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
8464 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
8465}
8466
8467
8468/** Opcode 0x1e. */
8469FNIEMOP_DEF(iemOp_push_DS)
8470{
8471 IEMOP_MNEMONIC(push_ds, "push ds");
8472 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
8473}
8474
8475
8476/** Opcode 0x1f. */
8477FNIEMOP_DEF(iemOp_pop_DS)
8478{
8479 IEMOP_MNEMONIC(pop_ds, "pop ds");
8480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8481 IEMOP_HLP_NO_64BIT();
8482 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
8483}
8484
8485
8486/** Opcode 0x20. */
8487FNIEMOP_DEF(iemOp_and_Eb_Gb)
8488{
8489 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
8490 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8491 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
8492}
8493
8494
8495/** Opcode 0x21. */
8496FNIEMOP_DEF(iemOp_and_Ev_Gv)
8497{
8498 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
8499 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8500 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
8501}
8502
8503
8504/** Opcode 0x22. */
8505FNIEMOP_DEF(iemOp_and_Gb_Eb)
8506{
8507 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
8508 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8509 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
8510}
8511
8512
8513/** Opcode 0x23. */
8514FNIEMOP_DEF(iemOp_and_Gv_Ev)
8515{
8516 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
8517 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8518 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
8519}
8520
8521
8522/** Opcode 0x24. */
8523FNIEMOP_DEF(iemOp_and_Al_Ib)
8524{
8525 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
8526 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8527 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
8528}
8529
8530
8531/** Opcode 0x25. */
8532FNIEMOP_DEF(iemOp_and_eAX_Iz)
8533{
8534 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
8535 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8536 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
8537}
8538
8539
8540/** Opcode 0x26. */
8541FNIEMOP_DEF(iemOp_seg_ES)
8542{
8543 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
8544 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
8545 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
8546
8547 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8548 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8549}
8550
8551
8552/** Opcode 0x27. */
8553FNIEMOP_DEF(iemOp_daa)
8554{
8555 IEMOP_MNEMONIC(daa_AL, "daa AL");
8556 IEMOP_HLP_NO_64BIT();
8557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8558 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8559 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
8560}
8561
8562
8563/** Opcode 0x28. */
8564FNIEMOP_DEF(iemOp_sub_Eb_Gb)
8565{
8566 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
8567 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
8568}
8569
8570
8571/** Opcode 0x29. */
8572FNIEMOP_DEF(iemOp_sub_Ev_Gv)
8573{
8574 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
8575 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
8576}
8577
8578
8579/** Opcode 0x2a. */
8580FNIEMOP_DEF(iemOp_sub_Gb_Eb)
8581{
8582 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
8583 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
8584}
8585
8586
8587/** Opcode 0x2b. */
8588FNIEMOP_DEF(iemOp_sub_Gv_Ev)
8589{
8590 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
8591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
8592}
8593
8594
8595/** Opcode 0x2c. */
8596FNIEMOP_DEF(iemOp_sub_Al_Ib)
8597{
8598 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
8599 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
8600}
8601
8602
8603/** Opcode 0x2d. */
8604FNIEMOP_DEF(iemOp_sub_eAX_Iz)
8605{
8606 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
8607 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
8608}
8609
8610
8611/** Opcode 0x2e. */
8612FNIEMOP_DEF(iemOp_seg_CS)
8613{
8614 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
8615 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
8616 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
8617
8618 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8619 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8620}
8621
8622
8623/** Opcode 0x2f. */
8624FNIEMOP_DEF(iemOp_das)
8625{
8626 IEMOP_MNEMONIC(das_AL, "das AL");
8627 IEMOP_HLP_NO_64BIT();
8628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8629 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8630 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
8631}
8632
8633
8634/** Opcode 0x30. */
8635FNIEMOP_DEF(iemOp_xor_Eb_Gb)
8636{
8637 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
8638 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8639 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
8640}
8641
8642
8643/** Opcode 0x31. */
8644FNIEMOP_DEF(iemOp_xor_Ev_Gv)
8645{
8646 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
8647 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8648 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
8649}
8650
8651
8652/** Opcode 0x32. */
8653FNIEMOP_DEF(iemOp_xor_Gb_Eb)
8654{
8655 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
8656 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8657 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
8658}
8659
8660
8661/** Opcode 0x33. */
8662FNIEMOP_DEF(iemOp_xor_Gv_Ev)
8663{
8664 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
8665 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8666 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
8667}
8668
8669
8670/** Opcode 0x34. */
8671FNIEMOP_DEF(iemOp_xor_Al_Ib)
8672{
8673 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
8674 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8675 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
8676}
8677
8678
8679/** Opcode 0x35. */
8680FNIEMOP_DEF(iemOp_xor_eAX_Iz)
8681{
8682 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
8683 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8684 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
8685}
8686
8687
8688/** Opcode 0x36. */
8689FNIEMOP_DEF(iemOp_seg_SS)
8690{
8691 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
8692 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
8693 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
8694
8695 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8696 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8697}
8698
8699
8700/** Opcode 0x37. */
8701FNIEMOP_STUB(iemOp_aaa);
8702
8703
8704/** Opcode 0x38. */
8705FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
8706{
8707 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
8708 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
8709}
8710
8711
8712/** Opcode 0x39. */
8713FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
8714{
8715 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
8716 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
8717}
8718
8719
8720/** Opcode 0x3a. */
8721FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
8722{
8723 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
8724 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
8725}
8726
8727
8728/** Opcode 0x3b. */
8729FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
8730{
8731 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
8732 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
8733}
8734
8735
8736/** Opcode 0x3c. */
8737FNIEMOP_DEF(iemOp_cmp_Al_Ib)
8738{
8739 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
8740 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
8741}
8742
8743
8744/** Opcode 0x3d. */
8745FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
8746{
8747 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
8748 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
8749}
8750
8751
8752/** Opcode 0x3e. */
8753FNIEMOP_DEF(iemOp_seg_DS)
8754{
8755 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
8756 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
8757 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
8758
8759 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8760 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8761}
8762
8763
8764/** Opcode 0x3f. */
8765FNIEMOP_STUB(iemOp_aas);
8766
8767/**
8768 * Common 'inc/dec/not/neg register' helper.
8769 */
8770FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
8771{
8772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8773 switch (pVCpu->iem.s.enmEffOpSize)
8774 {
8775 case IEMMODE_16BIT:
8776 IEM_MC_BEGIN(2, 0);
8777 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8778 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8779 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8780 IEM_MC_REF_EFLAGS(pEFlags);
8781 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
8782 IEM_MC_ADVANCE_RIP();
8783 IEM_MC_END();
8784 return VINF_SUCCESS;
8785
8786 case IEMMODE_32BIT:
8787 IEM_MC_BEGIN(2, 0);
8788 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8789 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8790 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8791 IEM_MC_REF_EFLAGS(pEFlags);
8792 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
8793 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8794 IEM_MC_ADVANCE_RIP();
8795 IEM_MC_END();
8796 return VINF_SUCCESS;
8797
8798 case IEMMODE_64BIT:
8799 IEM_MC_BEGIN(2, 0);
8800 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8801 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8802 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8803 IEM_MC_REF_EFLAGS(pEFlags);
8804 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
8805 IEM_MC_ADVANCE_RIP();
8806 IEM_MC_END();
8807 return VINF_SUCCESS;
8808 }
8809 return VINF_SUCCESS;
8810}
8811
8812
8813/** Opcode 0x40. */
8814FNIEMOP_DEF(iemOp_inc_eAX)
8815{
8816 /*
8817 * This is a REX prefix in 64-bit mode.
8818 */
8819 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8820 {
8821 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
8822 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
8823
8824 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8825 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8826 }
8827
8828 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
8829 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
8830}
8831
8832
8833/** Opcode 0x41. */
8834FNIEMOP_DEF(iemOp_inc_eCX)
8835{
8836 /*
8837 * This is a REX prefix in 64-bit mode.
8838 */
8839 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8840 {
8841 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
8842 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
8843 pVCpu->iem.s.uRexB = 1 << 3;
8844
8845 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8846 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8847 }
8848
8849 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
8850 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
8851}
8852
8853
8854/** Opcode 0x42. */
8855FNIEMOP_DEF(iemOp_inc_eDX)
8856{
8857 /*
8858 * This is a REX prefix in 64-bit mode.
8859 */
8860 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8861 {
8862 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
8863 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
8864 pVCpu->iem.s.uRexIndex = 1 << 3;
8865
8866 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8867 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8868 }
8869
8870 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
8871 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
8872}
8873
8874
8875
8876/** Opcode 0x43. */
8877FNIEMOP_DEF(iemOp_inc_eBX)
8878{
8879 /*
8880 * This is a REX prefix in 64-bit mode.
8881 */
8882 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8883 {
8884 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
8885 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8886 pVCpu->iem.s.uRexB = 1 << 3;
8887 pVCpu->iem.s.uRexIndex = 1 << 3;
8888
8889 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8890 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8891 }
8892
8893 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
8894 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
8895}
8896
8897
8898/** Opcode 0x44. */
8899FNIEMOP_DEF(iemOp_inc_eSP)
8900{
8901 /*
8902 * This is a REX prefix in 64-bit mode.
8903 */
8904 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8905 {
8906 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
8907 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
8908 pVCpu->iem.s.uRexReg = 1 << 3;
8909
8910 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8911 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8912 }
8913
8914 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
8915 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
8916}
8917
8918
8919/** Opcode 0x45. */
8920FNIEMOP_DEF(iemOp_inc_eBP)
8921{
8922 /*
8923 * This is a REX prefix in 64-bit mode.
8924 */
8925 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8926 {
8927 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
8928 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
8929 pVCpu->iem.s.uRexReg = 1 << 3;
8930 pVCpu->iem.s.uRexB = 1 << 3;
8931
8932 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8933 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8934 }
8935
8936 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
8937 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
8938}
8939
8940
8941/** Opcode 0x46. */
8942FNIEMOP_DEF(iemOp_inc_eSI)
8943{
8944 /*
8945 * This is a REX prefix in 64-bit mode.
8946 */
8947 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8948 {
8949 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
8950 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
8951 pVCpu->iem.s.uRexReg = 1 << 3;
8952 pVCpu->iem.s.uRexIndex = 1 << 3;
8953
8954 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8955 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8956 }
8957
8958 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
8959 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
8960}
8961
8962
8963/** Opcode 0x47. */
8964FNIEMOP_DEF(iemOp_inc_eDI)
8965{
8966 /*
8967 * This is a REX prefix in 64-bit mode.
8968 */
8969 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8970 {
8971 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
8972 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8973 pVCpu->iem.s.uRexReg = 1 << 3;
8974 pVCpu->iem.s.uRexB = 1 << 3;
8975 pVCpu->iem.s.uRexIndex = 1 << 3;
8976
8977 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8978 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8979 }
8980
8981 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
8982 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
8983}
8984
8985
8986/** Opcode 0x48. */
8987FNIEMOP_DEF(iemOp_dec_eAX)
8988{
8989 /*
8990 * This is a REX prefix in 64-bit mode.
8991 */
8992 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8993 {
8994 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
8995 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
8996 iemRecalEffOpSize(pVCpu);
8997
8998 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8999 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9000 }
9001
9002 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
9003 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
9004}
9005
9006
9007/** Opcode 0x49. */
9008FNIEMOP_DEF(iemOp_dec_eCX)
9009{
9010 /*
9011 * This is a REX prefix in 64-bit mode.
9012 */
9013 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9014 {
9015 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
9016 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9017 pVCpu->iem.s.uRexB = 1 << 3;
9018 iemRecalEffOpSize(pVCpu);
9019
9020 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9021 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9022 }
9023
9024 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
9025 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
9026}
9027
9028
9029/** Opcode 0x4a. */
9030FNIEMOP_DEF(iemOp_dec_eDX)
9031{
9032 /*
9033 * This is a REX prefix in 64-bit mode.
9034 */
9035 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9036 {
9037 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
9038 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9039 pVCpu->iem.s.uRexIndex = 1 << 3;
9040 iemRecalEffOpSize(pVCpu);
9041
9042 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9043 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9044 }
9045
9046 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
9047 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
9048}
9049
9050
9051/** Opcode 0x4b. */
9052FNIEMOP_DEF(iemOp_dec_eBX)
9053{
9054 /*
9055 * This is a REX prefix in 64-bit mode.
9056 */
9057 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9058 {
9059 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
9060 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9061 pVCpu->iem.s.uRexB = 1 << 3;
9062 pVCpu->iem.s.uRexIndex = 1 << 3;
9063 iemRecalEffOpSize(pVCpu);
9064
9065 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9066 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9067 }
9068
9069 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
9070 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
9071}
9072
9073
9074/** Opcode 0x4c. */
9075FNIEMOP_DEF(iemOp_dec_eSP)
9076{
9077 /*
9078 * This is a REX prefix in 64-bit mode.
9079 */
9080 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9081 {
9082 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
9083 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
9084 pVCpu->iem.s.uRexReg = 1 << 3;
9085 iemRecalEffOpSize(pVCpu);
9086
9087 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9088 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9089 }
9090
9091 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
9092 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
9093}
9094
9095
9096/** Opcode 0x4d. */
9097FNIEMOP_DEF(iemOp_dec_eBP)
9098{
9099 /*
9100 * This is a REX prefix in 64-bit mode.
9101 */
9102 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9103 {
9104 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
9105 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9106 pVCpu->iem.s.uRexReg = 1 << 3;
9107 pVCpu->iem.s.uRexB = 1 << 3;
9108 iemRecalEffOpSize(pVCpu);
9109
9110 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9111 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9112 }
9113
9114 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
9115 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
9116}
9117
9118
9119/** Opcode 0x4e. */
9120FNIEMOP_DEF(iemOp_dec_eSI)
9121{
9122 /*
9123 * This is a REX prefix in 64-bit mode.
9124 */
9125 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9126 {
9127 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
9128 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9129 pVCpu->iem.s.uRexReg = 1 << 3;
9130 pVCpu->iem.s.uRexIndex = 1 << 3;
9131 iemRecalEffOpSize(pVCpu);
9132
9133 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9134 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9135 }
9136
9137 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
9138 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
9139}
9140
9141
9142/** Opcode 0x4f. */
9143FNIEMOP_DEF(iemOp_dec_eDI)
9144{
9145 /*
9146 * This is a REX prefix in 64-bit mode.
9147 */
9148 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9149 {
9150 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
9151 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9152 pVCpu->iem.s.uRexReg = 1 << 3;
9153 pVCpu->iem.s.uRexB = 1 << 3;
9154 pVCpu->iem.s.uRexIndex = 1 << 3;
9155 iemRecalEffOpSize(pVCpu);
9156
9157 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9158 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9159 }
9160
9161 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
9162 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
9163}
9164
9165
9166/**
9167 * Common 'push register' helper.
9168 */
9169FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
9170{
9171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9172 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9173 {
9174 iReg |= pVCpu->iem.s.uRexB;
9175 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9176 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9177 }
9178
9179 switch (pVCpu->iem.s.enmEffOpSize)
9180 {
9181 case IEMMODE_16BIT:
9182 IEM_MC_BEGIN(0, 1);
9183 IEM_MC_LOCAL(uint16_t, u16Value);
9184 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
9185 IEM_MC_PUSH_U16(u16Value);
9186 IEM_MC_ADVANCE_RIP();
9187 IEM_MC_END();
9188 break;
9189
9190 case IEMMODE_32BIT:
9191 IEM_MC_BEGIN(0, 1);
9192 IEM_MC_LOCAL(uint32_t, u32Value);
9193 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
9194 IEM_MC_PUSH_U32(u32Value);
9195 IEM_MC_ADVANCE_RIP();
9196 IEM_MC_END();
9197 break;
9198
9199 case IEMMODE_64BIT:
9200 IEM_MC_BEGIN(0, 1);
9201 IEM_MC_LOCAL(uint64_t, u64Value);
9202 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
9203 IEM_MC_PUSH_U64(u64Value);
9204 IEM_MC_ADVANCE_RIP();
9205 IEM_MC_END();
9206 break;
9207 }
9208
9209 return VINF_SUCCESS;
9210}
9211
9212
9213/** Opcode 0x50. */
9214FNIEMOP_DEF(iemOp_push_eAX)
9215{
9216 IEMOP_MNEMONIC(push_rAX, "push rAX");
9217 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
9218}
9219
9220
9221/** Opcode 0x51. */
9222FNIEMOP_DEF(iemOp_push_eCX)
9223{
9224 IEMOP_MNEMONIC(push_rCX, "push rCX");
9225 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
9226}
9227
9228
9229/** Opcode 0x52. */
9230FNIEMOP_DEF(iemOp_push_eDX)
9231{
9232 IEMOP_MNEMONIC(push_rDX, "push rDX");
9233 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
9234}
9235
9236
9237/** Opcode 0x53. */
9238FNIEMOP_DEF(iemOp_push_eBX)
9239{
9240 IEMOP_MNEMONIC(push_rBX, "push rBX");
9241 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
9242}
9243
9244
9245/** Opcode 0x54. */
9246FNIEMOP_DEF(iemOp_push_eSP)
9247{
9248 IEMOP_MNEMONIC(push_rSP, "push rSP");
9249 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
9250 {
9251 IEM_MC_BEGIN(0, 1);
9252 IEM_MC_LOCAL(uint16_t, u16Value);
9253 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
9254 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
9255 IEM_MC_PUSH_U16(u16Value);
9256 IEM_MC_ADVANCE_RIP();
9257 IEM_MC_END();
9258 }
9259 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
9260}
9261
9262
9263/** Opcode 0x55. */
9264FNIEMOP_DEF(iemOp_push_eBP)
9265{
9266 IEMOP_MNEMONIC(push_rBP, "push rBP");
9267 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
9268}
9269
9270
9271/** Opcode 0x56. */
9272FNIEMOP_DEF(iemOp_push_eSI)
9273{
9274 IEMOP_MNEMONIC(push_rSI, "push rSI");
9275 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
9276}
9277
9278
9279/** Opcode 0x57. */
9280FNIEMOP_DEF(iemOp_push_eDI)
9281{
9282 IEMOP_MNEMONIC(push_rDI, "push rDI");
9283 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
9284}
9285
9286
9287/**
9288 * Common 'pop register' helper.
9289 */
9290FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
9291{
9292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9293 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9294 {
9295 iReg |= pVCpu->iem.s.uRexB;
9296 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9297 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9298 }
9299
9300 switch (pVCpu->iem.s.enmEffOpSize)
9301 {
9302 case IEMMODE_16BIT:
9303 IEM_MC_BEGIN(0, 1);
9304 IEM_MC_LOCAL(uint16_t *, pu16Dst);
9305 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
9306 IEM_MC_POP_U16(pu16Dst);
9307 IEM_MC_ADVANCE_RIP();
9308 IEM_MC_END();
9309 break;
9310
9311 case IEMMODE_32BIT:
9312 IEM_MC_BEGIN(0, 1);
9313 IEM_MC_LOCAL(uint32_t *, pu32Dst);
9314 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9315 IEM_MC_POP_U32(pu32Dst);
9316 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
9317 IEM_MC_ADVANCE_RIP();
9318 IEM_MC_END();
9319 break;
9320
9321 case IEMMODE_64BIT:
9322 IEM_MC_BEGIN(0, 1);
9323 IEM_MC_LOCAL(uint64_t *, pu64Dst);
9324 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9325 IEM_MC_POP_U64(pu64Dst);
9326 IEM_MC_ADVANCE_RIP();
9327 IEM_MC_END();
9328 break;
9329 }
9330
9331 return VINF_SUCCESS;
9332}
9333
9334
9335/** Opcode 0x58. */
9336FNIEMOP_DEF(iemOp_pop_eAX)
9337{
9338 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
9339 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
9340}
9341
9342
9343/** Opcode 0x59. */
9344FNIEMOP_DEF(iemOp_pop_eCX)
9345{
9346 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
9347 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
9348}
9349
9350
9351/** Opcode 0x5a. */
9352FNIEMOP_DEF(iemOp_pop_eDX)
9353{
9354 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
9355 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
9356}
9357
9358
9359/** Opcode 0x5b. */
9360FNIEMOP_DEF(iemOp_pop_eBX)
9361{
9362 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
9363 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
9364}
9365
9366
9367/** Opcode 0x5c. */
9368FNIEMOP_DEF(iemOp_pop_eSP)
9369{
9370 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
9371 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9372 {
9373 if (pVCpu->iem.s.uRexB)
9374 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
9375 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9376 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9377 }
9378
9379 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
9380 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
9381 /** @todo add testcase for this instruction. */
9382 switch (pVCpu->iem.s.enmEffOpSize)
9383 {
9384 case IEMMODE_16BIT:
9385 IEM_MC_BEGIN(0, 1);
9386 IEM_MC_LOCAL(uint16_t, u16Dst);
9387 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
9388 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
9389 IEM_MC_ADVANCE_RIP();
9390 IEM_MC_END();
9391 break;
9392
9393 case IEMMODE_32BIT:
9394 IEM_MC_BEGIN(0, 1);
9395 IEM_MC_LOCAL(uint32_t, u32Dst);
9396 IEM_MC_POP_U32(&u32Dst);
9397 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
9398 IEM_MC_ADVANCE_RIP();
9399 IEM_MC_END();
9400 break;
9401
9402 case IEMMODE_64BIT:
9403 IEM_MC_BEGIN(0, 1);
9404 IEM_MC_LOCAL(uint64_t, u64Dst);
9405 IEM_MC_POP_U64(&u64Dst);
9406 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
9407 IEM_MC_ADVANCE_RIP();
9408 IEM_MC_END();
9409 break;
9410 }
9411
9412 return VINF_SUCCESS;
9413}
9414
9415
9416/** Opcode 0x5d. */
9417FNIEMOP_DEF(iemOp_pop_eBP)
9418{
9419 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
9420 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
9421}
9422
9423
9424/** Opcode 0x5e. */
9425FNIEMOP_DEF(iemOp_pop_eSI)
9426{
9427 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
9428 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
9429}
9430
9431
9432/** Opcode 0x5f. */
9433FNIEMOP_DEF(iemOp_pop_eDI)
9434{
9435 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
9436 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
9437}
9438
9439
9440/** Opcode 0x60. */
9441FNIEMOP_DEF(iemOp_pusha)
9442{
9443 IEMOP_MNEMONIC(pusha, "pusha");
9444 IEMOP_HLP_MIN_186();
9445 IEMOP_HLP_NO_64BIT();
9446 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9447 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
9448 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9449 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
9450}
9451
9452
9453/** Opcode 0x61. */
9454FNIEMOP_DEF(iemOp_popa)
9455{
9456 IEMOP_MNEMONIC(popa, "popa");
9457 IEMOP_HLP_MIN_186();
9458 IEMOP_HLP_NO_64BIT();
9459 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9460 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
9461 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
9463}
9464
9465
9466/** Opcode 0x62. */
9467FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
9468// IEMOP_HLP_MIN_186();
9469
9470
9471/** Opcode 0x63 - non-64-bit modes. */
9472FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
9473{
9474 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
9475 IEMOP_HLP_MIN_286();
9476 IEMOP_HLP_NO_REAL_OR_V86_MODE();
9477 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9478
9479 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9480 {
9481 /* Register */
9482 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9483 IEM_MC_BEGIN(3, 0);
9484 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9485 IEM_MC_ARG(uint16_t, u16Src, 1);
9486 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9487
9488 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9489 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
9490 IEM_MC_REF_EFLAGS(pEFlags);
9491 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9492
9493 IEM_MC_ADVANCE_RIP();
9494 IEM_MC_END();
9495 }
9496 else
9497 {
9498 /* Memory */
9499 IEM_MC_BEGIN(3, 2);
9500 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9501 IEM_MC_ARG(uint16_t, u16Src, 1);
9502 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9504
9505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9506 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9507 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9508 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9509 IEM_MC_FETCH_EFLAGS(EFlags);
9510 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9511
9512 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9513 IEM_MC_COMMIT_EFLAGS(EFlags);
9514 IEM_MC_ADVANCE_RIP();
9515 IEM_MC_END();
9516 }
9517 return VINF_SUCCESS;
9518
9519}
9520
9521
9522/** Opcode 0x63.
9523 * @note This is a weird one. It works like a regular move instruction if
9524 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
9525 * @todo This definitely needs a testcase to verify the odd cases. */
9526FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
9527{
9528 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
9529
9530 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
9531 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9532
9533 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9534 {
9535 /*
9536 * Register to register.
9537 */
9538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9539 IEM_MC_BEGIN(0, 1);
9540 IEM_MC_LOCAL(uint64_t, u64Value);
9541 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9542 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9543 IEM_MC_ADVANCE_RIP();
9544 IEM_MC_END();
9545 }
9546 else
9547 {
9548 /*
9549 * We're loading a register from memory.
9550 */
9551 IEM_MC_BEGIN(0, 2);
9552 IEM_MC_LOCAL(uint64_t, u64Value);
9553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9556 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9557 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9558 IEM_MC_ADVANCE_RIP();
9559 IEM_MC_END();
9560 }
9561 return VINF_SUCCESS;
9562}
9563
9564
9565/** Opcode 0x64. */
9566FNIEMOP_DEF(iemOp_seg_FS)
9567{
9568 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
9569 IEMOP_HLP_MIN_386();
9570
9571 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
9572 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
9573
9574 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9575 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9576}
9577
9578
9579/** Opcode 0x65. */
9580FNIEMOP_DEF(iemOp_seg_GS)
9581{
9582 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
9583 IEMOP_HLP_MIN_386();
9584
9585 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
9586 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
9587
9588 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9589 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9590}
9591
9592
9593/** Opcode 0x66. */
9594FNIEMOP_DEF(iemOp_op_size)
9595{
9596 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
9597 IEMOP_HLP_MIN_386();
9598
9599 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
9600 iemRecalEffOpSize(pVCpu);
9601
9602 /* For the 4 entry opcode tables, the operand prefix doesn't not count
9603 when REPZ or REPNZ are present. */
9604 if (pVCpu->iem.s.idxPrefix == 0)
9605 pVCpu->iem.s.idxPrefix = 1;
9606
9607 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9608 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9609}
9610
9611
9612/** Opcode 0x67. */
9613FNIEMOP_DEF(iemOp_addr_size)
9614{
9615 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
9616 IEMOP_HLP_MIN_386();
9617
9618 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
9619 switch (pVCpu->iem.s.enmDefAddrMode)
9620 {
9621 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9622 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
9623 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9624 default: AssertFailed();
9625 }
9626
9627 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9628 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9629}
9630
9631
9632/** Opcode 0x68. */
9633FNIEMOP_DEF(iemOp_push_Iz)
9634{
9635 IEMOP_MNEMONIC(push_Iz, "push Iz");
9636 IEMOP_HLP_MIN_186();
9637 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9638 switch (pVCpu->iem.s.enmEffOpSize)
9639 {
9640 case IEMMODE_16BIT:
9641 {
9642 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9644 IEM_MC_BEGIN(0,0);
9645 IEM_MC_PUSH_U16(u16Imm);
9646 IEM_MC_ADVANCE_RIP();
9647 IEM_MC_END();
9648 return VINF_SUCCESS;
9649 }
9650
9651 case IEMMODE_32BIT:
9652 {
9653 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9655 IEM_MC_BEGIN(0,0);
9656 IEM_MC_PUSH_U32(u32Imm);
9657 IEM_MC_ADVANCE_RIP();
9658 IEM_MC_END();
9659 return VINF_SUCCESS;
9660 }
9661
9662 case IEMMODE_64BIT:
9663 {
9664 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9666 IEM_MC_BEGIN(0,0);
9667 IEM_MC_PUSH_U64(u64Imm);
9668 IEM_MC_ADVANCE_RIP();
9669 IEM_MC_END();
9670 return VINF_SUCCESS;
9671 }
9672
9673 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9674 }
9675}
9676
9677
9678/** Opcode 0x69. */
9679FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
9680{
9681 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
9682 IEMOP_HLP_MIN_186();
9683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9684 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9685
9686 switch (pVCpu->iem.s.enmEffOpSize)
9687 {
9688 case IEMMODE_16BIT:
9689 {
9690 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9691 {
9692 /* register operand */
9693 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9695
9696 IEM_MC_BEGIN(3, 1);
9697 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9698 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
9699 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9700 IEM_MC_LOCAL(uint16_t, u16Tmp);
9701
9702 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9703 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9704 IEM_MC_REF_EFLAGS(pEFlags);
9705 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9706 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9707
9708 IEM_MC_ADVANCE_RIP();
9709 IEM_MC_END();
9710 }
9711 else
9712 {
9713 /* memory operand */
9714 IEM_MC_BEGIN(3, 2);
9715 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9716 IEM_MC_ARG(uint16_t, u16Src, 1);
9717 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9718 IEM_MC_LOCAL(uint16_t, u16Tmp);
9719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9720
9721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9722 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9723 IEM_MC_ASSIGN(u16Src, u16Imm);
9724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9725 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9726 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9727 IEM_MC_REF_EFLAGS(pEFlags);
9728 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9729 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9730
9731 IEM_MC_ADVANCE_RIP();
9732 IEM_MC_END();
9733 }
9734 return VINF_SUCCESS;
9735 }
9736
9737 case IEMMODE_32BIT:
9738 {
9739 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9740 {
9741 /* register operand */
9742 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9744
9745 IEM_MC_BEGIN(3, 1);
9746 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9747 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
9748 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9749 IEM_MC_LOCAL(uint32_t, u32Tmp);
9750
9751 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9752 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9753 IEM_MC_REF_EFLAGS(pEFlags);
9754 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9755 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9756
9757 IEM_MC_ADVANCE_RIP();
9758 IEM_MC_END();
9759 }
9760 else
9761 {
9762 /* memory operand */
9763 IEM_MC_BEGIN(3, 2);
9764 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9765 IEM_MC_ARG(uint32_t, u32Src, 1);
9766 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9767 IEM_MC_LOCAL(uint32_t, u32Tmp);
9768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9769
9770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9771 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9772 IEM_MC_ASSIGN(u32Src, u32Imm);
9773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9774 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9775 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9776 IEM_MC_REF_EFLAGS(pEFlags);
9777 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9778 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9779
9780 IEM_MC_ADVANCE_RIP();
9781 IEM_MC_END();
9782 }
9783 return VINF_SUCCESS;
9784 }
9785
9786 case IEMMODE_64BIT:
9787 {
9788 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9789 {
9790 /* register operand */
9791 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9793
9794 IEM_MC_BEGIN(3, 1);
9795 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9796 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
9797 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9798 IEM_MC_LOCAL(uint64_t, u64Tmp);
9799
9800 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9801 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9802 IEM_MC_REF_EFLAGS(pEFlags);
9803 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9804 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9805
9806 IEM_MC_ADVANCE_RIP();
9807 IEM_MC_END();
9808 }
9809 else
9810 {
9811 /* memory operand */
9812 IEM_MC_BEGIN(3, 2);
9813 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9814 IEM_MC_ARG(uint64_t, u64Src, 1);
9815 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9816 IEM_MC_LOCAL(uint64_t, u64Tmp);
9817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9818
9819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9820 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9821 IEM_MC_ASSIGN(u64Src, u64Imm);
9822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9823 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9824 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9825 IEM_MC_REF_EFLAGS(pEFlags);
9826 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9827 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9828
9829 IEM_MC_ADVANCE_RIP();
9830 IEM_MC_END();
9831 }
9832 return VINF_SUCCESS;
9833 }
9834 }
9835 AssertFailedReturn(VERR_IEM_IPE_9);
9836}
9837
9838
9839/** Opcode 0x6a. */
9840FNIEMOP_DEF(iemOp_push_Ib)
9841{
9842 IEMOP_MNEMONIC(push_Ib, "push Ib");
9843 IEMOP_HLP_MIN_186();
9844 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9846 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9847
9848 IEM_MC_BEGIN(0,0);
9849 switch (pVCpu->iem.s.enmEffOpSize)
9850 {
9851 case IEMMODE_16BIT:
9852 IEM_MC_PUSH_U16(i8Imm);
9853 break;
9854 case IEMMODE_32BIT:
9855 IEM_MC_PUSH_U32(i8Imm);
9856 break;
9857 case IEMMODE_64BIT:
9858 IEM_MC_PUSH_U64(i8Imm);
9859 break;
9860 }
9861 IEM_MC_ADVANCE_RIP();
9862 IEM_MC_END();
9863 return VINF_SUCCESS;
9864}
9865
9866
9867/** Opcode 0x6b. */
9868FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
9869{
9870 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
9871 IEMOP_HLP_MIN_186();
9872 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9873 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9874
9875 switch (pVCpu->iem.s.enmEffOpSize)
9876 {
9877 case IEMMODE_16BIT:
9878 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9879 {
9880 /* register operand */
9881 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9883
9884 IEM_MC_BEGIN(3, 1);
9885 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9886 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
9887 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9888 IEM_MC_LOCAL(uint16_t, u16Tmp);
9889
9890 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9891 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9892 IEM_MC_REF_EFLAGS(pEFlags);
9893 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9894 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9895
9896 IEM_MC_ADVANCE_RIP();
9897 IEM_MC_END();
9898 }
9899 else
9900 {
9901 /* memory operand */
9902 IEM_MC_BEGIN(3, 2);
9903 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9904 IEM_MC_ARG(uint16_t, u16Src, 1);
9905 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9906 IEM_MC_LOCAL(uint16_t, u16Tmp);
9907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9908
9909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9910 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
9911 IEM_MC_ASSIGN(u16Src, u16Imm);
9912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9913 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9914 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9915 IEM_MC_REF_EFLAGS(pEFlags);
9916 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9917 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9918
9919 IEM_MC_ADVANCE_RIP();
9920 IEM_MC_END();
9921 }
9922 return VINF_SUCCESS;
9923
9924 case IEMMODE_32BIT:
9925 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9926 {
9927 /* register operand */
9928 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9930
9931 IEM_MC_BEGIN(3, 1);
9932 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9933 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
9934 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9935 IEM_MC_LOCAL(uint32_t, u32Tmp);
9936
9937 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9938 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9939 IEM_MC_REF_EFLAGS(pEFlags);
9940 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9941 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9942
9943 IEM_MC_ADVANCE_RIP();
9944 IEM_MC_END();
9945 }
9946 else
9947 {
9948 /* memory operand */
9949 IEM_MC_BEGIN(3, 2);
9950 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9951 IEM_MC_ARG(uint32_t, u32Src, 1);
9952 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9953 IEM_MC_LOCAL(uint32_t, u32Tmp);
9954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9955
9956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9957 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
9958 IEM_MC_ASSIGN(u32Src, u32Imm);
9959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9960 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9961 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9962 IEM_MC_REF_EFLAGS(pEFlags);
9963 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9964 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9965
9966 IEM_MC_ADVANCE_RIP();
9967 IEM_MC_END();
9968 }
9969 return VINF_SUCCESS;
9970
9971 case IEMMODE_64BIT:
9972 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9973 {
9974 /* register operand */
9975 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9977
9978 IEM_MC_BEGIN(3, 1);
9979 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9980 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
9981 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9982 IEM_MC_LOCAL(uint64_t, u64Tmp);
9983
9984 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9985 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9986 IEM_MC_REF_EFLAGS(pEFlags);
9987 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9988 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9989
9990 IEM_MC_ADVANCE_RIP();
9991 IEM_MC_END();
9992 }
9993 else
9994 {
9995 /* memory operand */
9996 IEM_MC_BEGIN(3, 2);
9997 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9998 IEM_MC_ARG(uint64_t, u64Src, 1);
9999 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10000 IEM_MC_LOCAL(uint64_t, u64Tmp);
10001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10002
10003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10004 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
10005 IEM_MC_ASSIGN(u64Src, u64Imm);
10006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10007 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10008 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10009 IEM_MC_REF_EFLAGS(pEFlags);
10010 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10011 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10012
10013 IEM_MC_ADVANCE_RIP();
10014 IEM_MC_END();
10015 }
10016 return VINF_SUCCESS;
10017 }
10018 AssertFailedReturn(VERR_IEM_IPE_8);
10019}
10020
10021
10022/** Opcode 0x6c. */
10023FNIEMOP_DEF(iemOp_insb_Yb_DX)
10024{
10025 IEMOP_HLP_MIN_186();
10026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10027 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10028 {
10029 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
10030 switch (pVCpu->iem.s.enmEffAddrMode)
10031 {
10032 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
10033 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
10034 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
10035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10036 }
10037 }
10038 else
10039 {
10040 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
10041 switch (pVCpu->iem.s.enmEffAddrMode)
10042 {
10043 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
10044 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
10045 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
10046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10047 }
10048 }
10049}
10050
10051
10052/** Opcode 0x6d. */
10053FNIEMOP_DEF(iemOp_inswd_Yv_DX)
10054{
10055 IEMOP_HLP_MIN_186();
10056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10057 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10058 {
10059 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
10060 switch (pVCpu->iem.s.enmEffOpSize)
10061 {
10062 case IEMMODE_16BIT:
10063 switch (pVCpu->iem.s.enmEffAddrMode)
10064 {
10065 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
10066 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
10067 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
10068 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10069 }
10070 break;
10071 case IEMMODE_64BIT:
10072 case IEMMODE_32BIT:
10073 switch (pVCpu->iem.s.enmEffAddrMode)
10074 {
10075 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
10076 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
10077 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
10078 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10079 }
10080 break;
10081 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10082 }
10083 }
10084 else
10085 {
10086 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
10087 switch (pVCpu->iem.s.enmEffOpSize)
10088 {
10089 case IEMMODE_16BIT:
10090 switch (pVCpu->iem.s.enmEffAddrMode)
10091 {
10092 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
10093 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
10094 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
10095 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10096 }
10097 break;
10098 case IEMMODE_64BIT:
10099 case IEMMODE_32BIT:
10100 switch (pVCpu->iem.s.enmEffAddrMode)
10101 {
10102 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
10103 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
10104 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
10105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10106 }
10107 break;
10108 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10109 }
10110 }
10111}
10112
10113
10114/** Opcode 0x6e. */
10115FNIEMOP_DEF(iemOp_outsb_Yb_DX)
10116{
10117 IEMOP_HLP_MIN_186();
10118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10119 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10120 {
10121 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
10122 switch (pVCpu->iem.s.enmEffAddrMode)
10123 {
10124 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10125 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10126 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10128 }
10129 }
10130 else
10131 {
10132 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
10133 switch (pVCpu->iem.s.enmEffAddrMode)
10134 {
10135 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10136 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10137 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10139 }
10140 }
10141}
10142
10143
10144/** Opcode 0x6f. */
10145FNIEMOP_DEF(iemOp_outswd_Yv_DX)
10146{
10147 IEMOP_HLP_MIN_186();
10148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10149 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10150 {
10151 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
10152 switch (pVCpu->iem.s.enmEffOpSize)
10153 {
10154 case IEMMODE_16BIT:
10155 switch (pVCpu->iem.s.enmEffAddrMode)
10156 {
10157 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10158 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10159 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10161 }
10162 break;
10163 case IEMMODE_64BIT:
10164 case IEMMODE_32BIT:
10165 switch (pVCpu->iem.s.enmEffAddrMode)
10166 {
10167 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10168 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10169 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10171 }
10172 break;
10173 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10174 }
10175 }
10176 else
10177 {
10178 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
10179 switch (pVCpu->iem.s.enmEffOpSize)
10180 {
10181 case IEMMODE_16BIT:
10182 switch (pVCpu->iem.s.enmEffAddrMode)
10183 {
10184 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10185 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10186 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10187 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10188 }
10189 break;
10190 case IEMMODE_64BIT:
10191 case IEMMODE_32BIT:
10192 switch (pVCpu->iem.s.enmEffAddrMode)
10193 {
10194 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10195 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10196 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10198 }
10199 break;
10200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10201 }
10202 }
10203}
10204
10205
10206/** Opcode 0x70. */
10207FNIEMOP_DEF(iemOp_jo_Jb)
10208{
10209 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
10210 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10212 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10213
10214 IEM_MC_BEGIN(0, 0);
10215 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10216 IEM_MC_REL_JMP_S8(i8Imm);
10217 } IEM_MC_ELSE() {
10218 IEM_MC_ADVANCE_RIP();
10219 } IEM_MC_ENDIF();
10220 IEM_MC_END();
10221 return VINF_SUCCESS;
10222}
10223
10224
10225/** Opcode 0x71. */
10226FNIEMOP_DEF(iemOp_jno_Jb)
10227{
10228 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
10229 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10231 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10232
10233 IEM_MC_BEGIN(0, 0);
10234 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10235 IEM_MC_ADVANCE_RIP();
10236 } IEM_MC_ELSE() {
10237 IEM_MC_REL_JMP_S8(i8Imm);
10238 } IEM_MC_ENDIF();
10239 IEM_MC_END();
10240 return VINF_SUCCESS;
10241}
10242
10243/** Opcode 0x72. */
10244FNIEMOP_DEF(iemOp_jc_Jb)
10245{
10246 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
10247 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10249 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10250
10251 IEM_MC_BEGIN(0, 0);
10252 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10253 IEM_MC_REL_JMP_S8(i8Imm);
10254 } IEM_MC_ELSE() {
10255 IEM_MC_ADVANCE_RIP();
10256 } IEM_MC_ENDIF();
10257 IEM_MC_END();
10258 return VINF_SUCCESS;
10259}
10260
10261
10262/** Opcode 0x73. */
10263FNIEMOP_DEF(iemOp_jnc_Jb)
10264{
10265 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
10266 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10268 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10269
10270 IEM_MC_BEGIN(0, 0);
10271 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10272 IEM_MC_ADVANCE_RIP();
10273 } IEM_MC_ELSE() {
10274 IEM_MC_REL_JMP_S8(i8Imm);
10275 } IEM_MC_ENDIF();
10276 IEM_MC_END();
10277 return VINF_SUCCESS;
10278}
10279
10280
10281/** Opcode 0x74. */
10282FNIEMOP_DEF(iemOp_je_Jb)
10283{
10284 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
10285 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10287 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10288
10289 IEM_MC_BEGIN(0, 0);
10290 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10291 IEM_MC_REL_JMP_S8(i8Imm);
10292 } IEM_MC_ELSE() {
10293 IEM_MC_ADVANCE_RIP();
10294 } IEM_MC_ENDIF();
10295 IEM_MC_END();
10296 return VINF_SUCCESS;
10297}
10298
10299
10300/** Opcode 0x75. */
10301FNIEMOP_DEF(iemOp_jne_Jb)
10302{
10303 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
10304 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10306 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10307
10308 IEM_MC_BEGIN(0, 0);
10309 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10310 IEM_MC_ADVANCE_RIP();
10311 } IEM_MC_ELSE() {
10312 IEM_MC_REL_JMP_S8(i8Imm);
10313 } IEM_MC_ENDIF();
10314 IEM_MC_END();
10315 return VINF_SUCCESS;
10316}
10317
10318
10319/** Opcode 0x76. */
10320FNIEMOP_DEF(iemOp_jbe_Jb)
10321{
10322 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
10323 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10325 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10326
10327 IEM_MC_BEGIN(0, 0);
10328 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10329 IEM_MC_REL_JMP_S8(i8Imm);
10330 } IEM_MC_ELSE() {
10331 IEM_MC_ADVANCE_RIP();
10332 } IEM_MC_ENDIF();
10333 IEM_MC_END();
10334 return VINF_SUCCESS;
10335}
10336
10337
10338/** Opcode 0x77. */
10339FNIEMOP_DEF(iemOp_jnbe_Jb)
10340{
10341 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
10342 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10344 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10345
10346 IEM_MC_BEGIN(0, 0);
10347 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10348 IEM_MC_ADVANCE_RIP();
10349 } IEM_MC_ELSE() {
10350 IEM_MC_REL_JMP_S8(i8Imm);
10351 } IEM_MC_ENDIF();
10352 IEM_MC_END();
10353 return VINF_SUCCESS;
10354}
10355
10356
10357/** Opcode 0x78. */
10358FNIEMOP_DEF(iemOp_js_Jb)
10359{
10360 IEMOP_MNEMONIC(js_Jb, "js Jb");
10361 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10363 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10364
10365 IEM_MC_BEGIN(0, 0);
10366 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10367 IEM_MC_REL_JMP_S8(i8Imm);
10368 } IEM_MC_ELSE() {
10369 IEM_MC_ADVANCE_RIP();
10370 } IEM_MC_ENDIF();
10371 IEM_MC_END();
10372 return VINF_SUCCESS;
10373}
10374
10375
10376/** Opcode 0x79. */
10377FNIEMOP_DEF(iemOp_jns_Jb)
10378{
10379 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
10380 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10382 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10383
10384 IEM_MC_BEGIN(0, 0);
10385 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10386 IEM_MC_ADVANCE_RIP();
10387 } IEM_MC_ELSE() {
10388 IEM_MC_REL_JMP_S8(i8Imm);
10389 } IEM_MC_ENDIF();
10390 IEM_MC_END();
10391 return VINF_SUCCESS;
10392}
10393
10394
10395/** Opcode 0x7a. */
10396FNIEMOP_DEF(iemOp_jp_Jb)
10397{
10398 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
10399 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10401 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10402
10403 IEM_MC_BEGIN(0, 0);
10404 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10405 IEM_MC_REL_JMP_S8(i8Imm);
10406 } IEM_MC_ELSE() {
10407 IEM_MC_ADVANCE_RIP();
10408 } IEM_MC_ENDIF();
10409 IEM_MC_END();
10410 return VINF_SUCCESS;
10411}
10412
10413
10414/** Opcode 0x7b. */
10415FNIEMOP_DEF(iemOp_jnp_Jb)
10416{
10417 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
10418 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10420 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10421
10422 IEM_MC_BEGIN(0, 0);
10423 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10424 IEM_MC_ADVANCE_RIP();
10425 } IEM_MC_ELSE() {
10426 IEM_MC_REL_JMP_S8(i8Imm);
10427 } IEM_MC_ENDIF();
10428 IEM_MC_END();
10429 return VINF_SUCCESS;
10430}
10431
10432
10433/** Opcode 0x7c. */
10434FNIEMOP_DEF(iemOp_jl_Jb)
10435{
10436 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
10437 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10439 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10440
10441 IEM_MC_BEGIN(0, 0);
10442 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10443 IEM_MC_REL_JMP_S8(i8Imm);
10444 } IEM_MC_ELSE() {
10445 IEM_MC_ADVANCE_RIP();
10446 } IEM_MC_ENDIF();
10447 IEM_MC_END();
10448 return VINF_SUCCESS;
10449}
10450
10451
10452/** Opcode 0x7d. */
10453FNIEMOP_DEF(iemOp_jnl_Jb)
10454{
10455 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
10456 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10458 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10459
10460 IEM_MC_BEGIN(0, 0);
10461 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10462 IEM_MC_ADVANCE_RIP();
10463 } IEM_MC_ELSE() {
10464 IEM_MC_REL_JMP_S8(i8Imm);
10465 } IEM_MC_ENDIF();
10466 IEM_MC_END();
10467 return VINF_SUCCESS;
10468}
10469
10470
10471/** Opcode 0x7e. */
10472FNIEMOP_DEF(iemOp_jle_Jb)
10473{
10474 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
10475 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10477 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10478
10479 IEM_MC_BEGIN(0, 0);
10480 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10481 IEM_MC_REL_JMP_S8(i8Imm);
10482 } IEM_MC_ELSE() {
10483 IEM_MC_ADVANCE_RIP();
10484 } IEM_MC_ENDIF();
10485 IEM_MC_END();
10486 return VINF_SUCCESS;
10487}
10488
10489
10490/** Opcode 0x7f. */
10491FNIEMOP_DEF(iemOp_jnle_Jb)
10492{
10493 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
10494 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10496 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10497
10498 IEM_MC_BEGIN(0, 0);
10499 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10500 IEM_MC_ADVANCE_RIP();
10501 } IEM_MC_ELSE() {
10502 IEM_MC_REL_JMP_S8(i8Imm);
10503 } IEM_MC_ENDIF();
10504 IEM_MC_END();
10505 return VINF_SUCCESS;
10506}
10507
10508
10509/** Opcode 0x80. */
10510FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
10511{
10512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10513 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10514 {
10515 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
10516 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
10517 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
10518 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
10519 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
10520 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
10521 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
10522 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
10523 }
10524 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10525
10526 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10527 {
10528 /* register target */
10529 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10531 IEM_MC_BEGIN(3, 0);
10532 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10533 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10534 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10535
10536 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10537 IEM_MC_REF_EFLAGS(pEFlags);
10538 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10539
10540 IEM_MC_ADVANCE_RIP();
10541 IEM_MC_END();
10542 }
10543 else
10544 {
10545 /* memory target */
10546 uint32_t fAccess;
10547 if (pImpl->pfnLockedU8)
10548 fAccess = IEM_ACCESS_DATA_RW;
10549 else /* CMP */
10550 fAccess = IEM_ACCESS_DATA_R;
10551 IEM_MC_BEGIN(3, 2);
10552 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10553 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10555
10556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10557 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10558 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10559 if (pImpl->pfnLockedU8)
10560 IEMOP_HLP_DONE_DECODING();
10561 else
10562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10563
10564 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10565 IEM_MC_FETCH_EFLAGS(EFlags);
10566 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10567 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10568 else
10569 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
10570
10571 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
10572 IEM_MC_COMMIT_EFLAGS(EFlags);
10573 IEM_MC_ADVANCE_RIP();
10574 IEM_MC_END();
10575 }
10576 return VINF_SUCCESS;
10577}
10578
10579
10580/** Opcode 0x81. */
10581FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
10582{
10583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10584 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10585 {
10586 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
10587 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
10588 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
10589 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
10590 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
10591 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
10592 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
10593 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
10594 }
10595 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10596
10597 switch (pVCpu->iem.s.enmEffOpSize)
10598 {
10599 case IEMMODE_16BIT:
10600 {
10601 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10602 {
10603 /* register target */
10604 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10606 IEM_MC_BEGIN(3, 0);
10607 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10608 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
10609 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10610
10611 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10612 IEM_MC_REF_EFLAGS(pEFlags);
10613 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10614
10615 IEM_MC_ADVANCE_RIP();
10616 IEM_MC_END();
10617 }
10618 else
10619 {
10620 /* memory target */
10621 uint32_t fAccess;
10622 if (pImpl->pfnLockedU16)
10623 fAccess = IEM_ACCESS_DATA_RW;
10624 else /* CMP, TEST */
10625 fAccess = IEM_ACCESS_DATA_R;
10626 IEM_MC_BEGIN(3, 2);
10627 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10628 IEM_MC_ARG(uint16_t, u16Src, 1);
10629 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10631
10632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10633 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10634 IEM_MC_ASSIGN(u16Src, u16Imm);
10635 if (pImpl->pfnLockedU16)
10636 IEMOP_HLP_DONE_DECODING();
10637 else
10638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10639 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10640 IEM_MC_FETCH_EFLAGS(EFlags);
10641 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10642 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10643 else
10644 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10645
10646 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10647 IEM_MC_COMMIT_EFLAGS(EFlags);
10648 IEM_MC_ADVANCE_RIP();
10649 IEM_MC_END();
10650 }
10651 break;
10652 }
10653
10654 case IEMMODE_32BIT:
10655 {
10656 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10657 {
10658 /* register target */
10659 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10661 IEM_MC_BEGIN(3, 0);
10662 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10663 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
10664 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10665
10666 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10667 IEM_MC_REF_EFLAGS(pEFlags);
10668 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10669 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10670
10671 IEM_MC_ADVANCE_RIP();
10672 IEM_MC_END();
10673 }
10674 else
10675 {
10676 /* memory target */
10677 uint32_t fAccess;
10678 if (pImpl->pfnLockedU32)
10679 fAccess = IEM_ACCESS_DATA_RW;
10680 else /* CMP, TEST */
10681 fAccess = IEM_ACCESS_DATA_R;
10682 IEM_MC_BEGIN(3, 2);
10683 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10684 IEM_MC_ARG(uint32_t, u32Src, 1);
10685 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10687
10688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10689 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10690 IEM_MC_ASSIGN(u32Src, u32Imm);
10691 if (pImpl->pfnLockedU32)
10692 IEMOP_HLP_DONE_DECODING();
10693 else
10694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10695 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10696 IEM_MC_FETCH_EFLAGS(EFlags);
10697 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10698 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10699 else
10700 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10701
10702 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10703 IEM_MC_COMMIT_EFLAGS(EFlags);
10704 IEM_MC_ADVANCE_RIP();
10705 IEM_MC_END();
10706 }
10707 break;
10708 }
10709
10710 case IEMMODE_64BIT:
10711 {
10712 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10713 {
10714 /* register target */
10715 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10717 IEM_MC_BEGIN(3, 0);
10718 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10719 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
10720 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10721
10722 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10723 IEM_MC_REF_EFLAGS(pEFlags);
10724 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10725
10726 IEM_MC_ADVANCE_RIP();
10727 IEM_MC_END();
10728 }
10729 else
10730 {
10731 /* memory target */
10732 uint32_t fAccess;
10733 if (pImpl->pfnLockedU64)
10734 fAccess = IEM_ACCESS_DATA_RW;
10735 else /* CMP */
10736 fAccess = IEM_ACCESS_DATA_R;
10737 IEM_MC_BEGIN(3, 2);
10738 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10739 IEM_MC_ARG(uint64_t, u64Src, 1);
10740 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10742
10743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10744 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10745 if (pImpl->pfnLockedU64)
10746 IEMOP_HLP_DONE_DECODING();
10747 else
10748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10749 IEM_MC_ASSIGN(u64Src, u64Imm);
10750 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10751 IEM_MC_FETCH_EFLAGS(EFlags);
10752 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10753 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10754 else
10755 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10756
10757 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10758 IEM_MC_COMMIT_EFLAGS(EFlags);
10759 IEM_MC_ADVANCE_RIP();
10760 IEM_MC_END();
10761 }
10762 break;
10763 }
10764 }
10765 return VINF_SUCCESS;
10766}
10767
10768
10769/** Opcode 0x82. */
10770FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
10771{
10772 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
10773 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
10774}
10775
10776
10777/** Opcode 0x83. */
10778FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
10779{
10780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10781 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10782 {
10783 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
10784 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
10785 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
10786 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
10787 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
10788 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
10789 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
10790 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
10791 }
10792 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
10793 to the 386 even if absent in the intel reference manuals and some
10794 3rd party opcode listings. */
10795 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10796
10797 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10798 {
10799 /*
10800 * Register target
10801 */
10802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10803 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10804 switch (pVCpu->iem.s.enmEffOpSize)
10805 {
10806 case IEMMODE_16BIT:
10807 {
10808 IEM_MC_BEGIN(3, 0);
10809 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10810 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
10811 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10812
10813 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10814 IEM_MC_REF_EFLAGS(pEFlags);
10815 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10816
10817 IEM_MC_ADVANCE_RIP();
10818 IEM_MC_END();
10819 break;
10820 }
10821
10822 case IEMMODE_32BIT:
10823 {
10824 IEM_MC_BEGIN(3, 0);
10825 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10826 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
10827 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10828
10829 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10830 IEM_MC_REF_EFLAGS(pEFlags);
10831 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10832 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10833
10834 IEM_MC_ADVANCE_RIP();
10835 IEM_MC_END();
10836 break;
10837 }
10838
10839 case IEMMODE_64BIT:
10840 {
10841 IEM_MC_BEGIN(3, 0);
10842 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10843 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
10844 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10845
10846 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10847 IEM_MC_REF_EFLAGS(pEFlags);
10848 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10849
10850 IEM_MC_ADVANCE_RIP();
10851 IEM_MC_END();
10852 break;
10853 }
10854 }
10855 }
10856 else
10857 {
10858 /*
10859 * Memory target.
10860 */
10861 uint32_t fAccess;
10862 if (pImpl->pfnLockedU16)
10863 fAccess = IEM_ACCESS_DATA_RW;
10864 else /* CMP */
10865 fAccess = IEM_ACCESS_DATA_R;
10866
10867 switch (pVCpu->iem.s.enmEffOpSize)
10868 {
10869 case IEMMODE_16BIT:
10870 {
10871 IEM_MC_BEGIN(3, 2);
10872 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10873 IEM_MC_ARG(uint16_t, u16Src, 1);
10874 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10876
10877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10878 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10879 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
10880 if (pImpl->pfnLockedU16)
10881 IEMOP_HLP_DONE_DECODING();
10882 else
10883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10884 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10885 IEM_MC_FETCH_EFLAGS(EFlags);
10886 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10887 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10888 else
10889 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10890
10891 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10892 IEM_MC_COMMIT_EFLAGS(EFlags);
10893 IEM_MC_ADVANCE_RIP();
10894 IEM_MC_END();
10895 break;
10896 }
10897
10898 case IEMMODE_32BIT:
10899 {
10900 IEM_MC_BEGIN(3, 2);
10901 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10902 IEM_MC_ARG(uint32_t, u32Src, 1);
10903 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10905
10906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10907 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10908 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
10909 if (pImpl->pfnLockedU32)
10910 IEMOP_HLP_DONE_DECODING();
10911 else
10912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10913 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10914 IEM_MC_FETCH_EFLAGS(EFlags);
10915 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10916 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10917 else
10918 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10919
10920 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10921 IEM_MC_COMMIT_EFLAGS(EFlags);
10922 IEM_MC_ADVANCE_RIP();
10923 IEM_MC_END();
10924 break;
10925 }
10926
10927 case IEMMODE_64BIT:
10928 {
10929 IEM_MC_BEGIN(3, 2);
10930 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10931 IEM_MC_ARG(uint64_t, u64Src, 1);
10932 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10934
10935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10936 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10937 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
10938 if (pImpl->pfnLockedU64)
10939 IEMOP_HLP_DONE_DECODING();
10940 else
10941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10942 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10943 IEM_MC_FETCH_EFLAGS(EFlags);
10944 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10945 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10946 else
10947 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10948
10949 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10950 IEM_MC_COMMIT_EFLAGS(EFlags);
10951 IEM_MC_ADVANCE_RIP();
10952 IEM_MC_END();
10953 break;
10954 }
10955 }
10956 }
10957 return VINF_SUCCESS;
10958}
10959
10960
10961/** Opcode 0x84. */
10962FNIEMOP_DEF(iemOp_test_Eb_Gb)
10963{
10964 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
10965 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10966 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
10967}
10968
10969
10970/** Opcode 0x85. */
10971FNIEMOP_DEF(iemOp_test_Ev_Gv)
10972{
10973 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
10974 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10975 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
10976}
10977
10978
10979/** Opcode 0x86. */
10980FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
10981{
10982 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10983 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
10984
10985 /*
10986 * If rm is denoting a register, no more instruction bytes.
10987 */
10988 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10989 {
10990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10991
10992 IEM_MC_BEGIN(0, 2);
10993 IEM_MC_LOCAL(uint8_t, uTmp1);
10994 IEM_MC_LOCAL(uint8_t, uTmp2);
10995
10996 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10997 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10998 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10999 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11000
11001 IEM_MC_ADVANCE_RIP();
11002 IEM_MC_END();
11003 }
11004 else
11005 {
11006 /*
11007 * We're accessing memory.
11008 */
11009/** @todo the register must be committed separately! */
11010 IEM_MC_BEGIN(2, 2);
11011 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
11012 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11014
11015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11016 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11017 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11018 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
11019 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
11020
11021 IEM_MC_ADVANCE_RIP();
11022 IEM_MC_END();
11023 }
11024 return VINF_SUCCESS;
11025}
11026
11027
11028/** Opcode 0x87. */
11029FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
11030{
11031 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
11032 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11033
11034 /*
11035 * If rm is denoting a register, no more instruction bytes.
11036 */
11037 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11038 {
11039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11040
11041 switch (pVCpu->iem.s.enmEffOpSize)
11042 {
11043 case IEMMODE_16BIT:
11044 IEM_MC_BEGIN(0, 2);
11045 IEM_MC_LOCAL(uint16_t, uTmp1);
11046 IEM_MC_LOCAL(uint16_t, uTmp2);
11047
11048 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11049 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11050 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11051 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11052
11053 IEM_MC_ADVANCE_RIP();
11054 IEM_MC_END();
11055 return VINF_SUCCESS;
11056
11057 case IEMMODE_32BIT:
11058 IEM_MC_BEGIN(0, 2);
11059 IEM_MC_LOCAL(uint32_t, uTmp1);
11060 IEM_MC_LOCAL(uint32_t, uTmp2);
11061
11062 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11063 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11064 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11065 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11066
11067 IEM_MC_ADVANCE_RIP();
11068 IEM_MC_END();
11069 return VINF_SUCCESS;
11070
11071 case IEMMODE_64BIT:
11072 IEM_MC_BEGIN(0, 2);
11073 IEM_MC_LOCAL(uint64_t, uTmp1);
11074 IEM_MC_LOCAL(uint64_t, uTmp2);
11075
11076 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11077 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11078 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11079 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11080
11081 IEM_MC_ADVANCE_RIP();
11082 IEM_MC_END();
11083 return VINF_SUCCESS;
11084
11085 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11086 }
11087 }
11088 else
11089 {
11090 /*
11091 * We're accessing memory.
11092 */
11093 switch (pVCpu->iem.s.enmEffOpSize)
11094 {
11095/** @todo the register must be committed separately! */
11096 case IEMMODE_16BIT:
11097 IEM_MC_BEGIN(2, 2);
11098 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
11099 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11101
11102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11103 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11104 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11105 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
11106 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
11107
11108 IEM_MC_ADVANCE_RIP();
11109 IEM_MC_END();
11110 return VINF_SUCCESS;
11111
11112 case IEMMODE_32BIT:
11113 IEM_MC_BEGIN(2, 2);
11114 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
11115 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11117
11118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11119 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11120 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11121 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
11122 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
11123
11124 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11125 IEM_MC_ADVANCE_RIP();
11126 IEM_MC_END();
11127 return VINF_SUCCESS;
11128
11129 case IEMMODE_64BIT:
11130 IEM_MC_BEGIN(2, 2);
11131 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
11132 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11134
11135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11136 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11137 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11138 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
11139 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
11140
11141 IEM_MC_ADVANCE_RIP();
11142 IEM_MC_END();
11143 return VINF_SUCCESS;
11144
11145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11146 }
11147 }
11148}
11149
11150
11151/** Opcode 0x88. */
11152FNIEMOP_DEF(iemOp_mov_Eb_Gb)
11153{
11154 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
11155
11156 uint8_t bRm;
11157 IEM_OPCODE_GET_NEXT_U8(&bRm);
11158
11159 /*
11160 * If rm is denoting a register, no more instruction bytes.
11161 */
11162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11163 {
11164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11165 IEM_MC_BEGIN(0, 1);
11166 IEM_MC_LOCAL(uint8_t, u8Value);
11167 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11168 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
11169 IEM_MC_ADVANCE_RIP();
11170 IEM_MC_END();
11171 }
11172 else
11173 {
11174 /*
11175 * We're writing a register to memory.
11176 */
11177 IEM_MC_BEGIN(0, 2);
11178 IEM_MC_LOCAL(uint8_t, u8Value);
11179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11182 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11183 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
11184 IEM_MC_ADVANCE_RIP();
11185 IEM_MC_END();
11186 }
11187 return VINF_SUCCESS;
11188
11189}
11190
11191
11192/** Opcode 0x89. */
11193FNIEMOP_DEF(iemOp_mov_Ev_Gv)
11194{
11195 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
11196
11197 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11198
11199 /*
11200 * If rm is denoting a register, no more instruction bytes.
11201 */
11202 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11203 {
11204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11205 switch (pVCpu->iem.s.enmEffOpSize)
11206 {
11207 case IEMMODE_16BIT:
11208 IEM_MC_BEGIN(0, 1);
11209 IEM_MC_LOCAL(uint16_t, u16Value);
11210 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11211 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11212 IEM_MC_ADVANCE_RIP();
11213 IEM_MC_END();
11214 break;
11215
11216 case IEMMODE_32BIT:
11217 IEM_MC_BEGIN(0, 1);
11218 IEM_MC_LOCAL(uint32_t, u32Value);
11219 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11220 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11221 IEM_MC_ADVANCE_RIP();
11222 IEM_MC_END();
11223 break;
11224
11225 case IEMMODE_64BIT:
11226 IEM_MC_BEGIN(0, 1);
11227 IEM_MC_LOCAL(uint64_t, u64Value);
11228 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11229 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11230 IEM_MC_ADVANCE_RIP();
11231 IEM_MC_END();
11232 break;
11233 }
11234 }
11235 else
11236 {
11237 /*
11238 * We're writing a register to memory.
11239 */
11240 switch (pVCpu->iem.s.enmEffOpSize)
11241 {
11242 case IEMMODE_16BIT:
11243 IEM_MC_BEGIN(0, 2);
11244 IEM_MC_LOCAL(uint16_t, u16Value);
11245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11248 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11249 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11250 IEM_MC_ADVANCE_RIP();
11251 IEM_MC_END();
11252 break;
11253
11254 case IEMMODE_32BIT:
11255 IEM_MC_BEGIN(0, 2);
11256 IEM_MC_LOCAL(uint32_t, u32Value);
11257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11260 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11261 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11262 IEM_MC_ADVANCE_RIP();
11263 IEM_MC_END();
11264 break;
11265
11266 case IEMMODE_64BIT:
11267 IEM_MC_BEGIN(0, 2);
11268 IEM_MC_LOCAL(uint64_t, u64Value);
11269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11272 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11273 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11274 IEM_MC_ADVANCE_RIP();
11275 IEM_MC_END();
11276 break;
11277 }
11278 }
11279 return VINF_SUCCESS;
11280}
11281
11282
11283/** Opcode 0x8a. */
11284FNIEMOP_DEF(iemOp_mov_Gb_Eb)
11285{
11286 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
11287
11288 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11289
11290 /*
11291 * If rm is denoting a register, no more instruction bytes.
11292 */
11293 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11294 {
11295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11296 IEM_MC_BEGIN(0, 1);
11297 IEM_MC_LOCAL(uint8_t, u8Value);
11298 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11299 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11300 IEM_MC_ADVANCE_RIP();
11301 IEM_MC_END();
11302 }
11303 else
11304 {
11305 /*
11306 * We're loading a register from memory.
11307 */
11308 IEM_MC_BEGIN(0, 2);
11309 IEM_MC_LOCAL(uint8_t, u8Value);
11310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11313 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11314 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11315 IEM_MC_ADVANCE_RIP();
11316 IEM_MC_END();
11317 }
11318 return VINF_SUCCESS;
11319}
11320
11321
11322/** Opcode 0x8b. */
11323FNIEMOP_DEF(iemOp_mov_Gv_Ev)
11324{
11325 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
11326
11327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11328
11329 /*
11330 * If rm is denoting a register, no more instruction bytes.
11331 */
11332 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11333 {
11334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11335 switch (pVCpu->iem.s.enmEffOpSize)
11336 {
11337 case IEMMODE_16BIT:
11338 IEM_MC_BEGIN(0, 1);
11339 IEM_MC_LOCAL(uint16_t, u16Value);
11340 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11341 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11342 IEM_MC_ADVANCE_RIP();
11343 IEM_MC_END();
11344 break;
11345
11346 case IEMMODE_32BIT:
11347 IEM_MC_BEGIN(0, 1);
11348 IEM_MC_LOCAL(uint32_t, u32Value);
11349 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11350 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11351 IEM_MC_ADVANCE_RIP();
11352 IEM_MC_END();
11353 break;
11354
11355 case IEMMODE_64BIT:
11356 IEM_MC_BEGIN(0, 1);
11357 IEM_MC_LOCAL(uint64_t, u64Value);
11358 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11359 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11360 IEM_MC_ADVANCE_RIP();
11361 IEM_MC_END();
11362 break;
11363 }
11364 }
11365 else
11366 {
11367 /*
11368 * We're loading a register from memory.
11369 */
11370 switch (pVCpu->iem.s.enmEffOpSize)
11371 {
11372 case IEMMODE_16BIT:
11373 IEM_MC_BEGIN(0, 2);
11374 IEM_MC_LOCAL(uint16_t, u16Value);
11375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11378 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11379 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11380 IEM_MC_ADVANCE_RIP();
11381 IEM_MC_END();
11382 break;
11383
11384 case IEMMODE_32BIT:
11385 IEM_MC_BEGIN(0, 2);
11386 IEM_MC_LOCAL(uint32_t, u32Value);
11387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11390 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11391 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11392 IEM_MC_ADVANCE_RIP();
11393 IEM_MC_END();
11394 break;
11395
11396 case IEMMODE_64BIT:
11397 IEM_MC_BEGIN(0, 2);
11398 IEM_MC_LOCAL(uint64_t, u64Value);
11399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11402 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11403 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11404 IEM_MC_ADVANCE_RIP();
11405 IEM_MC_END();
11406 break;
11407 }
11408 }
11409 return VINF_SUCCESS;
11410}
11411
11412
11413/** Opcode 0x63. */
11414FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
11415{
11416 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
11417 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
11418 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11419 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
11420 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
11421}
11422
11423
11424/** Opcode 0x8c. */
11425FNIEMOP_DEF(iemOp_mov_Ev_Sw)
11426{
11427 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
11428
11429 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11430
11431 /*
11432 * Check that the destination register exists. The REX.R prefix is ignored.
11433 */
11434 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11435 if ( iSegReg > X86_SREG_GS)
11436 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11437
11438 /*
11439 * If rm is denoting a register, no more instruction bytes.
11440 * In that case, the operand size is respected and the upper bits are
11441 * cleared (starting with some pentium).
11442 */
11443 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11444 {
11445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11446 switch (pVCpu->iem.s.enmEffOpSize)
11447 {
11448 case IEMMODE_16BIT:
11449 IEM_MC_BEGIN(0, 1);
11450 IEM_MC_LOCAL(uint16_t, u16Value);
11451 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11452 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11453 IEM_MC_ADVANCE_RIP();
11454 IEM_MC_END();
11455 break;
11456
11457 case IEMMODE_32BIT:
11458 IEM_MC_BEGIN(0, 1);
11459 IEM_MC_LOCAL(uint32_t, u32Value);
11460 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
11461 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11462 IEM_MC_ADVANCE_RIP();
11463 IEM_MC_END();
11464 break;
11465
11466 case IEMMODE_64BIT:
11467 IEM_MC_BEGIN(0, 1);
11468 IEM_MC_LOCAL(uint64_t, u64Value);
11469 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
11470 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11471 IEM_MC_ADVANCE_RIP();
11472 IEM_MC_END();
11473 break;
11474 }
11475 }
11476 else
11477 {
11478 /*
11479 * We're saving the register to memory. The access is word sized
11480 * regardless of operand size prefixes.
11481 */
11482#if 0 /* not necessary */
11483 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11484#endif
11485 IEM_MC_BEGIN(0, 2);
11486 IEM_MC_LOCAL(uint16_t, u16Value);
11487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11490 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11491 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11492 IEM_MC_ADVANCE_RIP();
11493 IEM_MC_END();
11494 }
11495 return VINF_SUCCESS;
11496}
11497
11498
11499
11500
11501/** Opcode 0x8d. */
11502FNIEMOP_DEF(iemOp_lea_Gv_M)
11503{
11504 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
11505 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11506 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11507 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
11508
11509 switch (pVCpu->iem.s.enmEffOpSize)
11510 {
11511 case IEMMODE_16BIT:
11512 IEM_MC_BEGIN(0, 2);
11513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11514 IEM_MC_LOCAL(uint16_t, u16Cast);
11515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11517 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
11518 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
11519 IEM_MC_ADVANCE_RIP();
11520 IEM_MC_END();
11521 return VINF_SUCCESS;
11522
11523 case IEMMODE_32BIT:
11524 IEM_MC_BEGIN(0, 2);
11525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11526 IEM_MC_LOCAL(uint32_t, u32Cast);
11527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11529 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
11530 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
11531 IEM_MC_ADVANCE_RIP();
11532 IEM_MC_END();
11533 return VINF_SUCCESS;
11534
11535 case IEMMODE_64BIT:
11536 IEM_MC_BEGIN(0, 1);
11537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11540 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
11541 IEM_MC_ADVANCE_RIP();
11542 IEM_MC_END();
11543 return VINF_SUCCESS;
11544 }
11545 AssertFailedReturn(VERR_IEM_IPE_7);
11546}
11547
11548
11549/** Opcode 0x8e. */
11550FNIEMOP_DEF(iemOp_mov_Sw_Ev)
11551{
11552 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
11553
11554 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11555
11556 /*
11557 * The practical operand size is 16-bit.
11558 */
11559#if 0 /* not necessary */
11560 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11561#endif
11562
11563 /*
11564 * Check that the destination register exists and can be used with this
11565 * instruction. The REX.R prefix is ignored.
11566 */
11567 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11568 if ( iSegReg == X86_SREG_CS
11569 || iSegReg > X86_SREG_GS)
11570 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11571
11572 /*
11573 * If rm is denoting a register, no more instruction bytes.
11574 */
11575 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11576 {
11577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11578 IEM_MC_BEGIN(2, 0);
11579 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11580 IEM_MC_ARG(uint16_t, u16Value, 1);
11581 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11582 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11583 IEM_MC_END();
11584 }
11585 else
11586 {
11587 /*
11588 * We're loading the register from memory. The access is word sized
11589 * regardless of operand size prefixes.
11590 */
11591 IEM_MC_BEGIN(2, 1);
11592 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11593 IEM_MC_ARG(uint16_t, u16Value, 1);
11594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11597 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11598 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11599 IEM_MC_END();
11600 }
11601 return VINF_SUCCESS;
11602}
11603
11604
11605/** Opcode 0x8f /0. */
11606FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
11607{
11608 /* This bugger is rather annoying as it requires rSP to be updated before
11609 doing the effective address calculations. Will eventually require a
11610 split between the R/M+SIB decoding and the effective address
11611 calculation - which is something that is required for any attempt at
11612 reusing this code for a recompiler. It may also be good to have if we
11613 need to delay #UD exception caused by invalid lock prefixes.
11614
11615 For now, we'll do a mostly safe interpreter-only implementation here. */
11616 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
11617 * now until tests show it's checked.. */
11618 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
11619
11620 /* Register access is relatively easy and can share code. */
11621 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11622 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11623
11624 /*
11625 * Memory target.
11626 *
11627 * Intel says that RSP is incremented before it's used in any effective
11628 * address calcuations. This means some serious extra annoyance here since
11629 * we decode and calculate the effective address in one step and like to
11630 * delay committing registers till everything is done.
11631 *
11632 * So, we'll decode and calculate the effective address twice. This will
11633 * require some recoding if turned into a recompiler.
11634 */
11635 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
11636
11637#ifndef TST_IEM_CHECK_MC
11638 /* Calc effective address with modified ESP. */
11639/** @todo testcase */
11640 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
11641 RTGCPTR GCPtrEff;
11642 VBOXSTRICTRC rcStrict;
11643 switch (pVCpu->iem.s.enmEffOpSize)
11644 {
11645 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
11646 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
11647 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
11648 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11649 }
11650 if (rcStrict != VINF_SUCCESS)
11651 return rcStrict;
11652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11653
11654 /* Perform the operation - this should be CImpl. */
11655 RTUINT64U TmpRsp;
11656 TmpRsp.u = pCtx->rsp;
11657 switch (pVCpu->iem.s.enmEffOpSize)
11658 {
11659 case IEMMODE_16BIT:
11660 {
11661 uint16_t u16Value;
11662 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
11663 if (rcStrict == VINF_SUCCESS)
11664 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
11665 break;
11666 }
11667
11668 case IEMMODE_32BIT:
11669 {
11670 uint32_t u32Value;
11671 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
11672 if (rcStrict == VINF_SUCCESS)
11673 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
11674 break;
11675 }
11676
11677 case IEMMODE_64BIT:
11678 {
11679 uint64_t u64Value;
11680 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
11681 if (rcStrict == VINF_SUCCESS)
11682 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
11683 break;
11684 }
11685
11686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11687 }
11688 if (rcStrict == VINF_SUCCESS)
11689 {
11690 pCtx->rsp = TmpRsp.u;
11691 iemRegUpdateRipAndClearRF(pVCpu);
11692 }
11693 return rcStrict;
11694
11695#else
11696 return VERR_IEM_IPE_2;
11697#endif
11698}
11699
11700
11701/** Opcode 0x8f. */
11702FNIEMOP_DEF(iemOp_Grp1A)
11703{
11704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11705 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
11706 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
11707
11708 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
11709 /** @todo XOP decoding. */
11710 IEMOP_MNEMONIC(xop_amd, "3-byte-xop");
11711 return IEMOP_RAISE_INVALID_OPCODE();
11712}
11713
11714
11715/**
11716 * Common 'xchg reg,rAX' helper.
11717 */
11718FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
11719{
11720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11721
11722 iReg |= pVCpu->iem.s.uRexB;
11723 switch (pVCpu->iem.s.enmEffOpSize)
11724 {
11725 case IEMMODE_16BIT:
11726 IEM_MC_BEGIN(0, 2);
11727 IEM_MC_LOCAL(uint16_t, u16Tmp1);
11728 IEM_MC_LOCAL(uint16_t, u16Tmp2);
11729 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
11730 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
11731 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
11732 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
11733 IEM_MC_ADVANCE_RIP();
11734 IEM_MC_END();
11735 return VINF_SUCCESS;
11736
11737 case IEMMODE_32BIT:
11738 IEM_MC_BEGIN(0, 2);
11739 IEM_MC_LOCAL(uint32_t, u32Tmp1);
11740 IEM_MC_LOCAL(uint32_t, u32Tmp2);
11741 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
11742 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
11743 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
11744 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
11745 IEM_MC_ADVANCE_RIP();
11746 IEM_MC_END();
11747 return VINF_SUCCESS;
11748
11749 case IEMMODE_64BIT:
11750 IEM_MC_BEGIN(0, 2);
11751 IEM_MC_LOCAL(uint64_t, u64Tmp1);
11752 IEM_MC_LOCAL(uint64_t, u64Tmp2);
11753 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
11754 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
11755 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
11756 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
11757 IEM_MC_ADVANCE_RIP();
11758 IEM_MC_END();
11759 return VINF_SUCCESS;
11760
11761 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11762 }
11763}
11764
11765
11766/** Opcode 0x90. */
11767FNIEMOP_DEF(iemOp_nop)
11768{
11769 /* R8/R8D and RAX/EAX can be exchanged. */
11770 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
11771 {
11772 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
11773 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
11774 }
11775
11776 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
11777 IEMOP_MNEMONIC(pause, "pause");
11778 else
11779 IEMOP_MNEMONIC(nop, "nop");
11780 IEM_MC_BEGIN(0, 0);
11781 IEM_MC_ADVANCE_RIP();
11782 IEM_MC_END();
11783 return VINF_SUCCESS;
11784}
11785
11786
11787/** Opcode 0x91. */
11788FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
11789{
11790 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
11791 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
11792}
11793
11794
11795/** Opcode 0x92. */
11796FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
11797{
11798 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
11799 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
11800}
11801
11802
11803/** Opcode 0x93. */
11804FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
11805{
11806 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
11807 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
11808}
11809
11810
11811/** Opcode 0x94. */
11812FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
11813{
11814 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
11815 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
11816}
11817
11818
11819/** Opcode 0x95. */
11820FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
11821{
11822 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
11823 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
11824}
11825
11826
11827/** Opcode 0x96. */
11828FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
11829{
11830 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
11831 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
11832}
11833
11834
11835/** Opcode 0x97. */
11836FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
11837{
11838 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
11839 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
11840}
11841
11842
11843/** Opcode 0x98. */
11844FNIEMOP_DEF(iemOp_cbw)
11845{
11846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11847 switch (pVCpu->iem.s.enmEffOpSize)
11848 {
11849 case IEMMODE_16BIT:
11850 IEMOP_MNEMONIC(cbw, "cbw");
11851 IEM_MC_BEGIN(0, 1);
11852 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
11853 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
11854 } IEM_MC_ELSE() {
11855 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
11856 } IEM_MC_ENDIF();
11857 IEM_MC_ADVANCE_RIP();
11858 IEM_MC_END();
11859 return VINF_SUCCESS;
11860
11861 case IEMMODE_32BIT:
11862 IEMOP_MNEMONIC(cwde, "cwde");
11863 IEM_MC_BEGIN(0, 1);
11864 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11865 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
11866 } IEM_MC_ELSE() {
11867 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
11868 } IEM_MC_ENDIF();
11869 IEM_MC_ADVANCE_RIP();
11870 IEM_MC_END();
11871 return VINF_SUCCESS;
11872
11873 case IEMMODE_64BIT:
11874 IEMOP_MNEMONIC(cdqe, "cdqe");
11875 IEM_MC_BEGIN(0, 1);
11876 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11877 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
11878 } IEM_MC_ELSE() {
11879 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
11880 } IEM_MC_ENDIF();
11881 IEM_MC_ADVANCE_RIP();
11882 IEM_MC_END();
11883 return VINF_SUCCESS;
11884
11885 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11886 }
11887}
11888
11889
11890/** Opcode 0x99. */
11891FNIEMOP_DEF(iemOp_cwd)
11892{
11893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11894 switch (pVCpu->iem.s.enmEffOpSize)
11895 {
11896 case IEMMODE_16BIT:
11897 IEMOP_MNEMONIC(cwd, "cwd");
11898 IEM_MC_BEGIN(0, 1);
11899 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11900 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
11901 } IEM_MC_ELSE() {
11902 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
11903 } IEM_MC_ENDIF();
11904 IEM_MC_ADVANCE_RIP();
11905 IEM_MC_END();
11906 return VINF_SUCCESS;
11907
11908 case IEMMODE_32BIT:
11909 IEMOP_MNEMONIC(cdq, "cdq");
11910 IEM_MC_BEGIN(0, 1);
11911 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11912 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
11913 } IEM_MC_ELSE() {
11914 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
11915 } IEM_MC_ENDIF();
11916 IEM_MC_ADVANCE_RIP();
11917 IEM_MC_END();
11918 return VINF_SUCCESS;
11919
11920 case IEMMODE_64BIT:
11921 IEMOP_MNEMONIC(cqo, "cqo");
11922 IEM_MC_BEGIN(0, 1);
11923 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
11924 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
11925 } IEM_MC_ELSE() {
11926 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
11927 } IEM_MC_ENDIF();
11928 IEM_MC_ADVANCE_RIP();
11929 IEM_MC_END();
11930 return VINF_SUCCESS;
11931
11932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11933 }
11934}
11935
11936
11937/** Opcode 0x9a. */
11938FNIEMOP_DEF(iemOp_call_Ap)
11939{
11940 IEMOP_MNEMONIC(call_Ap, "call Ap");
11941 IEMOP_HLP_NO_64BIT();
11942
11943 /* Decode the far pointer address and pass it on to the far call C implementation. */
11944 uint32_t offSeg;
11945 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
11946 IEM_OPCODE_GET_NEXT_U32(&offSeg);
11947 else
11948 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
11949 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
11950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11951 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
11952}
11953
11954
11955/** Opcode 0x9b. (aka fwait) */
11956FNIEMOP_DEF(iemOp_wait)
11957{
11958 IEMOP_MNEMONIC(wait, "wait");
11959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11960
11961 IEM_MC_BEGIN(0, 0);
11962 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
11963 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11964 IEM_MC_ADVANCE_RIP();
11965 IEM_MC_END();
11966 return VINF_SUCCESS;
11967}
11968
11969
11970/** Opcode 0x9c. */
11971FNIEMOP_DEF(iemOp_pushf_Fv)
11972{
11973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11974 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11975 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
11976}
11977
11978
11979/** Opcode 0x9d. */
11980FNIEMOP_DEF(iemOp_popf_Fv)
11981{
11982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11983 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11984 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
11985}
11986
11987
11988/** Opcode 0x9e. */
11989FNIEMOP_DEF(iemOp_sahf)
11990{
11991 IEMOP_MNEMONIC(sahf, "sahf");
11992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11993 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
11994 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
11995 return IEMOP_RAISE_INVALID_OPCODE();
11996 IEM_MC_BEGIN(0, 2);
11997 IEM_MC_LOCAL(uint32_t, u32Flags);
11998 IEM_MC_LOCAL(uint32_t, EFlags);
11999 IEM_MC_FETCH_EFLAGS(EFlags);
12000 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
12001 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
12002 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
12003 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
12004 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
12005 IEM_MC_COMMIT_EFLAGS(EFlags);
12006 IEM_MC_ADVANCE_RIP();
12007 IEM_MC_END();
12008 return VINF_SUCCESS;
12009}
12010
12011
12012/** Opcode 0x9f. */
12013FNIEMOP_DEF(iemOp_lahf)
12014{
12015 IEMOP_MNEMONIC(lahf, "lahf");
12016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12017 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12018 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
12019 return IEMOP_RAISE_INVALID_OPCODE();
12020 IEM_MC_BEGIN(0, 1);
12021 IEM_MC_LOCAL(uint8_t, u8Flags);
12022 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
12023 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
12024 IEM_MC_ADVANCE_RIP();
12025 IEM_MC_END();
12026 return VINF_SUCCESS;
12027}
12028
12029
12030/**
12031 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
12032 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
12033 * prefixes. Will return on failures.
12034 * @param a_GCPtrMemOff The variable to store the offset in.
12035 */
12036#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
12037 do \
12038 { \
12039 switch (pVCpu->iem.s.enmEffAddrMode) \
12040 { \
12041 case IEMMODE_16BIT: \
12042 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
12043 break; \
12044 case IEMMODE_32BIT: \
12045 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
12046 break; \
12047 case IEMMODE_64BIT: \
12048 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
12049 break; \
12050 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12051 } \
12052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12053 } while (0)
12054
12055/** Opcode 0xa0. */
12056FNIEMOP_DEF(iemOp_mov_Al_Ob)
12057{
12058 /*
12059 * Get the offset and fend of lock prefixes.
12060 */
12061 RTGCPTR GCPtrMemOff;
12062 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12063
12064 /*
12065 * Fetch AL.
12066 */
12067 IEM_MC_BEGIN(0,1);
12068 IEM_MC_LOCAL(uint8_t, u8Tmp);
12069 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12070 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12071 IEM_MC_ADVANCE_RIP();
12072 IEM_MC_END();
12073 return VINF_SUCCESS;
12074}
12075
12076
12077/** Opcode 0xa1. */
12078FNIEMOP_DEF(iemOp_mov_rAX_Ov)
12079{
12080 /*
12081 * Get the offset and fend of lock prefixes.
12082 */
12083 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
12084 RTGCPTR GCPtrMemOff;
12085 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12086
12087 /*
12088 * Fetch rAX.
12089 */
12090 switch (pVCpu->iem.s.enmEffOpSize)
12091 {
12092 case IEMMODE_16BIT:
12093 IEM_MC_BEGIN(0,1);
12094 IEM_MC_LOCAL(uint16_t, u16Tmp);
12095 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12096 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12097 IEM_MC_ADVANCE_RIP();
12098 IEM_MC_END();
12099 return VINF_SUCCESS;
12100
12101 case IEMMODE_32BIT:
12102 IEM_MC_BEGIN(0,1);
12103 IEM_MC_LOCAL(uint32_t, u32Tmp);
12104 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12105 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
12106 IEM_MC_ADVANCE_RIP();
12107 IEM_MC_END();
12108 return VINF_SUCCESS;
12109
12110 case IEMMODE_64BIT:
12111 IEM_MC_BEGIN(0,1);
12112 IEM_MC_LOCAL(uint64_t, u64Tmp);
12113 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12114 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
12115 IEM_MC_ADVANCE_RIP();
12116 IEM_MC_END();
12117 return VINF_SUCCESS;
12118
12119 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12120 }
12121}
12122
12123
12124/** Opcode 0xa2. */
12125FNIEMOP_DEF(iemOp_mov_Ob_AL)
12126{
12127 /*
12128 * Get the offset and fend of lock prefixes.
12129 */
12130 RTGCPTR GCPtrMemOff;
12131 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12132
12133 /*
12134 * Store AL.
12135 */
12136 IEM_MC_BEGIN(0,1);
12137 IEM_MC_LOCAL(uint8_t, u8Tmp);
12138 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
12139 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
12140 IEM_MC_ADVANCE_RIP();
12141 IEM_MC_END();
12142 return VINF_SUCCESS;
12143}
12144
12145
12146/** Opcode 0xa3. */
12147FNIEMOP_DEF(iemOp_mov_Ov_rAX)
12148{
12149 /*
12150 * Get the offset and fend of lock prefixes.
12151 */
12152 RTGCPTR GCPtrMemOff;
12153 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12154
12155 /*
12156 * Store rAX.
12157 */
12158 switch (pVCpu->iem.s.enmEffOpSize)
12159 {
12160 case IEMMODE_16BIT:
12161 IEM_MC_BEGIN(0,1);
12162 IEM_MC_LOCAL(uint16_t, u16Tmp);
12163 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
12164 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
12165 IEM_MC_ADVANCE_RIP();
12166 IEM_MC_END();
12167 return VINF_SUCCESS;
12168
12169 case IEMMODE_32BIT:
12170 IEM_MC_BEGIN(0,1);
12171 IEM_MC_LOCAL(uint32_t, u32Tmp);
12172 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
12173 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
12174 IEM_MC_ADVANCE_RIP();
12175 IEM_MC_END();
12176 return VINF_SUCCESS;
12177
12178 case IEMMODE_64BIT:
12179 IEM_MC_BEGIN(0,1);
12180 IEM_MC_LOCAL(uint64_t, u64Tmp);
12181 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
12182 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
12183 IEM_MC_ADVANCE_RIP();
12184 IEM_MC_END();
12185 return VINF_SUCCESS;
12186
12187 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12188 }
12189}
12190
12191/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
12192#define IEM_MOVS_CASE(ValBits, AddrBits) \
12193 IEM_MC_BEGIN(0, 2); \
12194 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12195 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12196 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12197 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12198 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12199 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12200 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12201 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12202 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12203 } IEM_MC_ELSE() { \
12204 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12205 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12206 } IEM_MC_ENDIF(); \
12207 IEM_MC_ADVANCE_RIP(); \
12208 IEM_MC_END();
12209
12210/** Opcode 0xa4. */
12211FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
12212{
12213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12214
12215 /*
12216 * Use the C implementation if a repeat prefix is encountered.
12217 */
12218 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12219 {
12220 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
12221 switch (pVCpu->iem.s.enmEffAddrMode)
12222 {
12223 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
12224 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
12225 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
12226 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12227 }
12228 }
12229 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
12230
12231 /*
12232 * Sharing case implementation with movs[wdq] below.
12233 */
12234 switch (pVCpu->iem.s.enmEffAddrMode)
12235 {
12236 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
12237 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
12238 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
12239 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12240 }
12241 return VINF_SUCCESS;
12242}
12243
12244
12245/** Opcode 0xa5. */
12246FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
12247{
12248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12249
12250 /*
12251 * Use the C implementation if a repeat prefix is encountered.
12252 */
12253 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12254 {
12255 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
12256 switch (pVCpu->iem.s.enmEffOpSize)
12257 {
12258 case IEMMODE_16BIT:
12259 switch (pVCpu->iem.s.enmEffAddrMode)
12260 {
12261 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
12262 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
12263 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
12264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12265 }
12266 break;
12267 case IEMMODE_32BIT:
12268 switch (pVCpu->iem.s.enmEffAddrMode)
12269 {
12270 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
12271 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
12272 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
12273 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12274 }
12275 case IEMMODE_64BIT:
12276 switch (pVCpu->iem.s.enmEffAddrMode)
12277 {
12278 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
12279 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
12280 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
12281 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12282 }
12283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12284 }
12285 }
12286 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
12287
12288 /*
12289 * Annoying double switch here.
12290 * Using ugly macro for implementing the cases, sharing it with movsb.
12291 */
12292 switch (pVCpu->iem.s.enmEffOpSize)
12293 {
12294 case IEMMODE_16BIT:
12295 switch (pVCpu->iem.s.enmEffAddrMode)
12296 {
12297 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
12298 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
12299 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
12300 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12301 }
12302 break;
12303
12304 case IEMMODE_32BIT:
12305 switch (pVCpu->iem.s.enmEffAddrMode)
12306 {
12307 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
12308 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
12309 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
12310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12311 }
12312 break;
12313
12314 case IEMMODE_64BIT:
12315 switch (pVCpu->iem.s.enmEffAddrMode)
12316 {
12317 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12318 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
12319 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
12320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12321 }
12322 break;
12323 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12324 }
12325 return VINF_SUCCESS;
12326}
12327
12328#undef IEM_MOVS_CASE
12329
12330/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
12331#define IEM_CMPS_CASE(ValBits, AddrBits) \
12332 IEM_MC_BEGIN(3, 3); \
12333 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
12334 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
12335 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12336 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
12337 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12338 \
12339 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12340 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
12341 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12342 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
12343 IEM_MC_REF_LOCAL(puValue1, uValue1); \
12344 IEM_MC_REF_EFLAGS(pEFlags); \
12345 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
12346 \
12347 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12348 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12349 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12350 } IEM_MC_ELSE() { \
12351 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12352 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12353 } IEM_MC_ENDIF(); \
12354 IEM_MC_ADVANCE_RIP(); \
12355 IEM_MC_END(); \
12356
12357/** Opcode 0xa6. */
12358FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
12359{
12360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12361
12362 /*
12363 * Use the C implementation if a repeat prefix is encountered.
12364 */
12365 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12366 {
12367 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
12368 switch (pVCpu->iem.s.enmEffAddrMode)
12369 {
12370 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12371 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12372 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12374 }
12375 }
12376 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12377 {
12378 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
12379 switch (pVCpu->iem.s.enmEffAddrMode)
12380 {
12381 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12382 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12383 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12385 }
12386 }
12387 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
12388
12389 /*
12390 * Sharing case implementation with cmps[wdq] below.
12391 */
12392 switch (pVCpu->iem.s.enmEffAddrMode)
12393 {
12394 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
12395 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
12396 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
12397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12398 }
12399 return VINF_SUCCESS;
12400
12401}
12402
12403
12404/** Opcode 0xa7. */
12405FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
12406{
12407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12408
12409 /*
12410 * Use the C implementation if a repeat prefix is encountered.
12411 */
12412 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12413 {
12414 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
12415 switch (pVCpu->iem.s.enmEffOpSize)
12416 {
12417 case IEMMODE_16BIT:
12418 switch (pVCpu->iem.s.enmEffAddrMode)
12419 {
12420 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12421 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12422 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12423 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12424 }
12425 break;
12426 case IEMMODE_32BIT:
12427 switch (pVCpu->iem.s.enmEffAddrMode)
12428 {
12429 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12430 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12431 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12433 }
12434 case IEMMODE_64BIT:
12435 switch (pVCpu->iem.s.enmEffAddrMode)
12436 {
12437 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
12438 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12439 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12440 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12441 }
12442 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12443 }
12444 }
12445
12446 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12447 {
12448 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
12449 switch (pVCpu->iem.s.enmEffOpSize)
12450 {
12451 case IEMMODE_16BIT:
12452 switch (pVCpu->iem.s.enmEffAddrMode)
12453 {
12454 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12455 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12456 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12458 }
12459 break;
12460 case IEMMODE_32BIT:
12461 switch (pVCpu->iem.s.enmEffAddrMode)
12462 {
12463 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12464 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12465 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12467 }
12468 case IEMMODE_64BIT:
12469 switch (pVCpu->iem.s.enmEffAddrMode)
12470 {
12471 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
12472 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12473 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12475 }
12476 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12477 }
12478 }
12479
12480 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
12481
12482 /*
12483 * Annoying double switch here.
12484 * Using ugly macro for implementing the cases, sharing it with cmpsb.
12485 */
12486 switch (pVCpu->iem.s.enmEffOpSize)
12487 {
12488 case IEMMODE_16BIT:
12489 switch (pVCpu->iem.s.enmEffAddrMode)
12490 {
12491 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
12492 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
12493 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
12494 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12495 }
12496 break;
12497
12498 case IEMMODE_32BIT:
12499 switch (pVCpu->iem.s.enmEffAddrMode)
12500 {
12501 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
12502 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
12503 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
12504 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12505 }
12506 break;
12507
12508 case IEMMODE_64BIT:
12509 switch (pVCpu->iem.s.enmEffAddrMode)
12510 {
12511 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12512 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
12513 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
12514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12515 }
12516 break;
12517 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12518 }
12519 return VINF_SUCCESS;
12520
12521}
12522
12523#undef IEM_CMPS_CASE
12524
12525/** Opcode 0xa8. */
12526FNIEMOP_DEF(iemOp_test_AL_Ib)
12527{
12528 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
12529 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12530 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
12531}
12532
12533
12534/** Opcode 0xa9. */
12535FNIEMOP_DEF(iemOp_test_eAX_Iz)
12536{
12537 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
12538 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12539 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
12540}
12541
12542
12543/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
12544#define IEM_STOS_CASE(ValBits, AddrBits) \
12545 IEM_MC_BEGIN(0, 2); \
12546 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12547 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12548 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
12549 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12550 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12551 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12552 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12553 } IEM_MC_ELSE() { \
12554 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12555 } IEM_MC_ENDIF(); \
12556 IEM_MC_ADVANCE_RIP(); \
12557 IEM_MC_END(); \
12558
12559/** Opcode 0xaa. */
12560FNIEMOP_DEF(iemOp_stosb_Yb_AL)
12561{
12562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12563
12564 /*
12565 * Use the C implementation if a repeat prefix is encountered.
12566 */
12567 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12568 {
12569 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
12570 switch (pVCpu->iem.s.enmEffAddrMode)
12571 {
12572 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
12573 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
12574 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
12575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12576 }
12577 }
12578 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
12579
12580 /*
12581 * Sharing case implementation with stos[wdq] below.
12582 */
12583 switch (pVCpu->iem.s.enmEffAddrMode)
12584 {
12585 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
12586 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
12587 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
12588 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12589 }
12590 return VINF_SUCCESS;
12591}
12592
12593
12594/** Opcode 0xab. */
12595FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
12596{
12597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12598
12599 /*
12600 * Use the C implementation if a repeat prefix is encountered.
12601 */
12602 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12603 {
12604 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
12605 switch (pVCpu->iem.s.enmEffOpSize)
12606 {
12607 case IEMMODE_16BIT:
12608 switch (pVCpu->iem.s.enmEffAddrMode)
12609 {
12610 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
12611 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
12612 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
12613 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12614 }
12615 break;
12616 case IEMMODE_32BIT:
12617 switch (pVCpu->iem.s.enmEffAddrMode)
12618 {
12619 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
12620 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
12621 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
12622 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12623 }
12624 case IEMMODE_64BIT:
12625 switch (pVCpu->iem.s.enmEffAddrMode)
12626 {
12627 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
12628 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
12629 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
12630 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12631 }
12632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12633 }
12634 }
12635 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
12636
12637 /*
12638 * Annoying double switch here.
12639 * Using ugly macro for implementing the cases, sharing it with stosb.
12640 */
12641 switch (pVCpu->iem.s.enmEffOpSize)
12642 {
12643 case IEMMODE_16BIT:
12644 switch (pVCpu->iem.s.enmEffAddrMode)
12645 {
12646 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
12647 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
12648 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
12649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12650 }
12651 break;
12652
12653 case IEMMODE_32BIT:
12654 switch (pVCpu->iem.s.enmEffAddrMode)
12655 {
12656 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
12657 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
12658 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
12659 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12660 }
12661 break;
12662
12663 case IEMMODE_64BIT:
12664 switch (pVCpu->iem.s.enmEffAddrMode)
12665 {
12666 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12667 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
12668 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
12669 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12670 }
12671 break;
12672 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12673 }
12674 return VINF_SUCCESS;
12675}
12676
12677#undef IEM_STOS_CASE
12678
12679/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
12680#define IEM_LODS_CASE(ValBits, AddrBits) \
12681 IEM_MC_BEGIN(0, 2); \
12682 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12683 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12684 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12685 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12686 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
12687 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12688 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12689 } IEM_MC_ELSE() { \
12690 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12691 } IEM_MC_ENDIF(); \
12692 IEM_MC_ADVANCE_RIP(); \
12693 IEM_MC_END();
12694
12695/** Opcode 0xac. */
12696FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
12697{
12698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12699
12700 /*
12701 * Use the C implementation if a repeat prefix is encountered.
12702 */
12703 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12704 {
12705 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
12706 switch (pVCpu->iem.s.enmEffAddrMode)
12707 {
12708 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
12709 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
12710 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
12711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12712 }
12713 }
12714 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
12715
12716 /*
12717 * Sharing case implementation with stos[wdq] below.
12718 */
12719 switch (pVCpu->iem.s.enmEffAddrMode)
12720 {
12721 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
12722 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
12723 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
12724 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12725 }
12726 return VINF_SUCCESS;
12727}
12728
12729
12730/** Opcode 0xad. */
12731FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
12732{
12733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12734
12735 /*
12736 * Use the C implementation if a repeat prefix is encountered.
12737 */
12738 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12739 {
12740 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
12741 switch (pVCpu->iem.s.enmEffOpSize)
12742 {
12743 case IEMMODE_16BIT:
12744 switch (pVCpu->iem.s.enmEffAddrMode)
12745 {
12746 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
12747 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
12748 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
12749 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12750 }
12751 break;
12752 case IEMMODE_32BIT:
12753 switch (pVCpu->iem.s.enmEffAddrMode)
12754 {
12755 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
12756 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
12757 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
12758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12759 }
12760 case IEMMODE_64BIT:
12761 switch (pVCpu->iem.s.enmEffAddrMode)
12762 {
12763 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
12764 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
12765 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
12766 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12767 }
12768 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12769 }
12770 }
12771 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
12772
12773 /*
12774 * Annoying double switch here.
12775 * Using ugly macro for implementing the cases, sharing it with lodsb.
12776 */
12777 switch (pVCpu->iem.s.enmEffOpSize)
12778 {
12779 case IEMMODE_16BIT:
12780 switch (pVCpu->iem.s.enmEffAddrMode)
12781 {
12782 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
12783 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
12784 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
12785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12786 }
12787 break;
12788
12789 case IEMMODE_32BIT:
12790 switch (pVCpu->iem.s.enmEffAddrMode)
12791 {
12792 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
12793 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
12794 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
12795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12796 }
12797 break;
12798
12799 case IEMMODE_64BIT:
12800 switch (pVCpu->iem.s.enmEffAddrMode)
12801 {
12802 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12803 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
12804 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
12805 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12806 }
12807 break;
12808 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12809 }
12810 return VINF_SUCCESS;
12811}
12812
12813#undef IEM_LODS_CASE
12814
12815/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
12816#define IEM_SCAS_CASE(ValBits, AddrBits) \
12817 IEM_MC_BEGIN(3, 2); \
12818 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
12819 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
12820 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12821 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12822 \
12823 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12824 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
12825 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
12826 IEM_MC_REF_EFLAGS(pEFlags); \
12827 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
12828 \
12829 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12830 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12831 } IEM_MC_ELSE() { \
12832 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12833 } IEM_MC_ENDIF(); \
12834 IEM_MC_ADVANCE_RIP(); \
12835 IEM_MC_END();
12836
12837/** Opcode 0xae. */
12838FNIEMOP_DEF(iemOp_scasb_AL_Xb)
12839{
12840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12841
12842 /*
12843 * Use the C implementation if a repeat prefix is encountered.
12844 */
12845 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12846 {
12847 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
12848 switch (pVCpu->iem.s.enmEffAddrMode)
12849 {
12850 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
12851 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
12852 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
12853 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12854 }
12855 }
12856 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12857 {
12858 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
12859 switch (pVCpu->iem.s.enmEffAddrMode)
12860 {
12861 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
12862 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
12863 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
12864 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12865 }
12866 }
12867 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
12868
12869 /*
12870 * Sharing case implementation with stos[wdq] below.
12871 */
12872 switch (pVCpu->iem.s.enmEffAddrMode)
12873 {
12874 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
12875 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
12876 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
12877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12878 }
12879 return VINF_SUCCESS;
12880}
12881
12882
12883/** Opcode 0xaf. */
12884FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
12885{
12886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12887
12888 /*
12889 * Use the C implementation if a repeat prefix is encountered.
12890 */
12891 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12892 {
12893 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
12894 switch (pVCpu->iem.s.enmEffOpSize)
12895 {
12896 case IEMMODE_16BIT:
12897 switch (pVCpu->iem.s.enmEffAddrMode)
12898 {
12899 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
12900 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
12901 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
12902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12903 }
12904 break;
12905 case IEMMODE_32BIT:
12906 switch (pVCpu->iem.s.enmEffAddrMode)
12907 {
12908 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
12909 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
12910 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
12911 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12912 }
12913 case IEMMODE_64BIT:
12914 switch (pVCpu->iem.s.enmEffAddrMode)
12915 {
12916 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
12917 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
12918 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
12919 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12920 }
12921 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12922 }
12923 }
12924 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12925 {
12926 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
12927 switch (pVCpu->iem.s.enmEffOpSize)
12928 {
12929 case IEMMODE_16BIT:
12930 switch (pVCpu->iem.s.enmEffAddrMode)
12931 {
12932 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
12933 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
12934 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
12935 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12936 }
12937 break;
12938 case IEMMODE_32BIT:
12939 switch (pVCpu->iem.s.enmEffAddrMode)
12940 {
12941 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
12942 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
12943 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
12944 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12945 }
12946 case IEMMODE_64BIT:
12947 switch (pVCpu->iem.s.enmEffAddrMode)
12948 {
12949 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
12950 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
12951 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
12952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12953 }
12954 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12955 }
12956 }
12957 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
12958
12959 /*
12960 * Annoying double switch here.
12961 * Using ugly macro for implementing the cases, sharing it with scasb.
12962 */
12963 switch (pVCpu->iem.s.enmEffOpSize)
12964 {
12965 case IEMMODE_16BIT:
12966 switch (pVCpu->iem.s.enmEffAddrMode)
12967 {
12968 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
12969 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
12970 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
12971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12972 }
12973 break;
12974
12975 case IEMMODE_32BIT:
12976 switch (pVCpu->iem.s.enmEffAddrMode)
12977 {
12978 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
12979 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
12980 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
12981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12982 }
12983 break;
12984
12985 case IEMMODE_64BIT:
12986 switch (pVCpu->iem.s.enmEffAddrMode)
12987 {
12988 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12989 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
12990 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
12991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12992 }
12993 break;
12994 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12995 }
12996 return VINF_SUCCESS;
12997}
12998
12999#undef IEM_SCAS_CASE
13000
13001/**
13002 * Common 'mov r8, imm8' helper.
13003 */
13004FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
13005{
13006 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13008
13009 IEM_MC_BEGIN(0, 1);
13010 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
13011 IEM_MC_STORE_GREG_U8(iReg, u8Value);
13012 IEM_MC_ADVANCE_RIP();
13013 IEM_MC_END();
13014
13015 return VINF_SUCCESS;
13016}
13017
13018
13019/** Opcode 0xb0. */
13020FNIEMOP_DEF(iemOp_mov_AL_Ib)
13021{
13022 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
13023 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13024}
13025
13026
13027/** Opcode 0xb1. */
13028FNIEMOP_DEF(iemOp_CL_Ib)
13029{
13030 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
13031 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13032}
13033
13034
13035/** Opcode 0xb2. */
13036FNIEMOP_DEF(iemOp_DL_Ib)
13037{
13038 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
13039 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13040}
13041
13042
13043/** Opcode 0xb3. */
13044FNIEMOP_DEF(iemOp_BL_Ib)
13045{
13046 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
13047 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13048}
13049
13050
13051/** Opcode 0xb4. */
13052FNIEMOP_DEF(iemOp_mov_AH_Ib)
13053{
13054 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
13055 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13056}
13057
13058
13059/** Opcode 0xb5. */
13060FNIEMOP_DEF(iemOp_CH_Ib)
13061{
13062 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
13063 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13064}
13065
13066
13067/** Opcode 0xb6. */
13068FNIEMOP_DEF(iemOp_DH_Ib)
13069{
13070 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
13071 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13072}
13073
13074
13075/** Opcode 0xb7. */
13076FNIEMOP_DEF(iemOp_BH_Ib)
13077{
13078 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
13079 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13080}
13081
13082
13083/**
13084 * Common 'mov regX,immX' helper.
13085 */
13086FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
13087{
13088 switch (pVCpu->iem.s.enmEffOpSize)
13089 {
13090 case IEMMODE_16BIT:
13091 {
13092 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13094
13095 IEM_MC_BEGIN(0, 1);
13096 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
13097 IEM_MC_STORE_GREG_U16(iReg, u16Value);
13098 IEM_MC_ADVANCE_RIP();
13099 IEM_MC_END();
13100 break;
13101 }
13102
13103 case IEMMODE_32BIT:
13104 {
13105 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13107
13108 IEM_MC_BEGIN(0, 1);
13109 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
13110 IEM_MC_STORE_GREG_U32(iReg, u32Value);
13111 IEM_MC_ADVANCE_RIP();
13112 IEM_MC_END();
13113 break;
13114 }
13115 case IEMMODE_64BIT:
13116 {
13117 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
13118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13119
13120 IEM_MC_BEGIN(0, 1);
13121 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
13122 IEM_MC_STORE_GREG_U64(iReg, u64Value);
13123 IEM_MC_ADVANCE_RIP();
13124 IEM_MC_END();
13125 break;
13126 }
13127 }
13128
13129 return VINF_SUCCESS;
13130}
13131
13132
13133/** Opcode 0xb8. */
13134FNIEMOP_DEF(iemOp_eAX_Iv)
13135{
13136 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
13137 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13138}
13139
13140
13141/** Opcode 0xb9. */
13142FNIEMOP_DEF(iemOp_eCX_Iv)
13143{
13144 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
13145 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13146}
13147
13148
13149/** Opcode 0xba. */
13150FNIEMOP_DEF(iemOp_eDX_Iv)
13151{
13152 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
13153 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13154}
13155
13156
13157/** Opcode 0xbb. */
13158FNIEMOP_DEF(iemOp_eBX_Iv)
13159{
13160 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
13161 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13162}
13163
13164
13165/** Opcode 0xbc. */
13166FNIEMOP_DEF(iemOp_eSP_Iv)
13167{
13168 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
13169 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13170}
13171
13172
13173/** Opcode 0xbd. */
13174FNIEMOP_DEF(iemOp_eBP_Iv)
13175{
13176 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
13177 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13178}
13179
13180
13181/** Opcode 0xbe. */
13182FNIEMOP_DEF(iemOp_eSI_Iv)
13183{
13184 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
13185 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13186}
13187
13188
13189/** Opcode 0xbf. */
13190FNIEMOP_DEF(iemOp_eDI_Iv)
13191{
13192 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
13193 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13194}
13195
13196
13197/** Opcode 0xc0. */
13198FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
13199{
13200 IEMOP_HLP_MIN_186();
13201 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13202 PCIEMOPSHIFTSIZES pImpl;
13203 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13204 {
13205 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
13206 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
13207 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
13208 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
13209 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
13210 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
13211 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
13212 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13213 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13214 }
13215 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13216
13217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13218 {
13219 /* register */
13220 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13222 IEM_MC_BEGIN(3, 0);
13223 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13224 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13225 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13226 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13227 IEM_MC_REF_EFLAGS(pEFlags);
13228 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13229 IEM_MC_ADVANCE_RIP();
13230 IEM_MC_END();
13231 }
13232 else
13233 {
13234 /* memory */
13235 IEM_MC_BEGIN(3, 2);
13236 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13237 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13238 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13240
13241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13242 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13243 IEM_MC_ASSIGN(cShiftArg, cShift);
13244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13245 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13246 IEM_MC_FETCH_EFLAGS(EFlags);
13247 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13248
13249 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13250 IEM_MC_COMMIT_EFLAGS(EFlags);
13251 IEM_MC_ADVANCE_RIP();
13252 IEM_MC_END();
13253 }
13254 return VINF_SUCCESS;
13255}
13256
13257
13258/** Opcode 0xc1. */
13259FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
13260{
13261 IEMOP_HLP_MIN_186();
13262 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13263 PCIEMOPSHIFTSIZES pImpl;
13264 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13265 {
13266 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
13267 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
13268 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
13269 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
13270 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
13271 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
13272 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
13273 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13274 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13275 }
13276 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13277
13278 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13279 {
13280 /* register */
13281 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13283 switch (pVCpu->iem.s.enmEffOpSize)
13284 {
13285 case IEMMODE_16BIT:
13286 IEM_MC_BEGIN(3, 0);
13287 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13288 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13289 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13290 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13291 IEM_MC_REF_EFLAGS(pEFlags);
13292 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13293 IEM_MC_ADVANCE_RIP();
13294 IEM_MC_END();
13295 return VINF_SUCCESS;
13296
13297 case IEMMODE_32BIT:
13298 IEM_MC_BEGIN(3, 0);
13299 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13300 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13301 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13302 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13303 IEM_MC_REF_EFLAGS(pEFlags);
13304 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13305 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13306 IEM_MC_ADVANCE_RIP();
13307 IEM_MC_END();
13308 return VINF_SUCCESS;
13309
13310 case IEMMODE_64BIT:
13311 IEM_MC_BEGIN(3, 0);
13312 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13313 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13314 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13315 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13316 IEM_MC_REF_EFLAGS(pEFlags);
13317 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13318 IEM_MC_ADVANCE_RIP();
13319 IEM_MC_END();
13320 return VINF_SUCCESS;
13321
13322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13323 }
13324 }
13325 else
13326 {
13327 /* memory */
13328 switch (pVCpu->iem.s.enmEffOpSize)
13329 {
13330 case IEMMODE_16BIT:
13331 IEM_MC_BEGIN(3, 2);
13332 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13333 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13334 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13336
13337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13338 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13339 IEM_MC_ASSIGN(cShiftArg, cShift);
13340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13341 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13342 IEM_MC_FETCH_EFLAGS(EFlags);
13343 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13344
13345 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13346 IEM_MC_COMMIT_EFLAGS(EFlags);
13347 IEM_MC_ADVANCE_RIP();
13348 IEM_MC_END();
13349 return VINF_SUCCESS;
13350
13351 case IEMMODE_32BIT:
13352 IEM_MC_BEGIN(3, 2);
13353 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13354 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13355 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13357
13358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13359 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13360 IEM_MC_ASSIGN(cShiftArg, cShift);
13361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13362 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13363 IEM_MC_FETCH_EFLAGS(EFlags);
13364 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13365
13366 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13367 IEM_MC_COMMIT_EFLAGS(EFlags);
13368 IEM_MC_ADVANCE_RIP();
13369 IEM_MC_END();
13370 return VINF_SUCCESS;
13371
13372 case IEMMODE_64BIT:
13373 IEM_MC_BEGIN(3, 2);
13374 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13375 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13376 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13378
13379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13380 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13381 IEM_MC_ASSIGN(cShiftArg, cShift);
13382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13383 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13384 IEM_MC_FETCH_EFLAGS(EFlags);
13385 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13386
13387 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13388 IEM_MC_COMMIT_EFLAGS(EFlags);
13389 IEM_MC_ADVANCE_RIP();
13390 IEM_MC_END();
13391 return VINF_SUCCESS;
13392
13393 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13394 }
13395 }
13396}
13397
13398
13399/** Opcode 0xc2. */
13400FNIEMOP_DEF(iemOp_retn_Iw)
13401{
13402 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
13403 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13405 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13406 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
13407}
13408
13409
13410/** Opcode 0xc3. */
13411FNIEMOP_DEF(iemOp_retn)
13412{
13413 IEMOP_MNEMONIC(retn, "retn");
13414 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13416 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
13417}
13418
13419
13420/** Opcode 0xc4. */
13421FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
13422{
13423 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13424 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
13425 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13426 {
13427 IEMOP_MNEMONIC(vex2_prefix, "2-byte-vex");
13428 /* The LES instruction is invalid 64-bit mode. In legacy and
13429 compatability mode it is invalid with MOD=3.
13430 The use as a VEX prefix is made possible by assigning the inverted
13431 REX.R to the top MOD bit, and the top bit in the inverted register
13432 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
13433 to accessing registers 0..7 in this VEX form. */
13434 /** @todo VEX: Just use new tables for it. */
13435 return IEMOP_RAISE_INVALID_OPCODE();
13436 }
13437 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
13438 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
13439}
13440
13441
13442/** Opcode 0xc5. */
13443FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
13444{
13445 /* The LDS instruction is invalid 64-bit mode. In legacy and
13446 compatability mode it is invalid with MOD=3.
13447 The use as a VEX prefix is made possible by assigning the inverted
13448 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
13449 outside of 64-bit mode. VEX is not available in real or v86 mode. */
13450 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13451 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
13452 {
13453 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
13454 {
13455 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
13456 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
13457 }
13458 IEMOP_HLP_NO_REAL_OR_V86_MODE();
13459 }
13460
13461 IEMOP_MNEMONIC(vex3_prefix, "3-byte-vex");
13462 /** @todo Test when exctly the VEX conformance checks kick in during
13463 * instruction decoding and fetching (using \#PF). */
13464 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
13465 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
13466 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
13467#if 0 /* will make sense of this next week... */
13468 if ( !(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
13469 &&
13470 )
13471 {
13472
13473 }
13474#endif
13475
13476 /** @todo VEX: Just use new tables for it. */
13477 return IEMOP_RAISE_INVALID_OPCODE();
13478}
13479
13480
13481/** Opcode 0xc6. */
13482FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
13483{
13484 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13485 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13486 return IEMOP_RAISE_INVALID_OPCODE();
13487 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
13488
13489 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13490 {
13491 /* register access */
13492 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13494 IEM_MC_BEGIN(0, 0);
13495 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
13496 IEM_MC_ADVANCE_RIP();
13497 IEM_MC_END();
13498 }
13499 else
13500 {
13501 /* memory access. */
13502 IEM_MC_BEGIN(0, 1);
13503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13505 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13507 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
13508 IEM_MC_ADVANCE_RIP();
13509 IEM_MC_END();
13510 }
13511 return VINF_SUCCESS;
13512}
13513
13514
13515/** Opcode 0xc7. */
13516FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
13517{
13518 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13519 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13520 return IEMOP_RAISE_INVALID_OPCODE();
13521 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
13522
13523 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13524 {
13525 /* register access */
13526 switch (pVCpu->iem.s.enmEffOpSize)
13527 {
13528 case IEMMODE_16BIT:
13529 IEM_MC_BEGIN(0, 0);
13530 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13532 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
13533 IEM_MC_ADVANCE_RIP();
13534 IEM_MC_END();
13535 return VINF_SUCCESS;
13536
13537 case IEMMODE_32BIT:
13538 IEM_MC_BEGIN(0, 0);
13539 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13541 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
13542 IEM_MC_ADVANCE_RIP();
13543 IEM_MC_END();
13544 return VINF_SUCCESS;
13545
13546 case IEMMODE_64BIT:
13547 IEM_MC_BEGIN(0, 0);
13548 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13550 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
13551 IEM_MC_ADVANCE_RIP();
13552 IEM_MC_END();
13553 return VINF_SUCCESS;
13554
13555 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13556 }
13557 }
13558 else
13559 {
13560 /* memory access. */
13561 switch (pVCpu->iem.s.enmEffOpSize)
13562 {
13563 case IEMMODE_16BIT:
13564 IEM_MC_BEGIN(0, 1);
13565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13567 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13569 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
13570 IEM_MC_ADVANCE_RIP();
13571 IEM_MC_END();
13572 return VINF_SUCCESS;
13573
13574 case IEMMODE_32BIT:
13575 IEM_MC_BEGIN(0, 1);
13576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13578 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13580 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
13581 IEM_MC_ADVANCE_RIP();
13582 IEM_MC_END();
13583 return VINF_SUCCESS;
13584
13585 case IEMMODE_64BIT:
13586 IEM_MC_BEGIN(0, 1);
13587 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13589 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13591 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
13592 IEM_MC_ADVANCE_RIP();
13593 IEM_MC_END();
13594 return VINF_SUCCESS;
13595
13596 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13597 }
13598 }
13599}
13600
13601
13602
13603
13604/** Opcode 0xc8. */
13605FNIEMOP_DEF(iemOp_enter_Iw_Ib)
13606{
13607 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
13608 IEMOP_HLP_MIN_186();
13609 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13610 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
13611 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
13612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13613 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
13614}
13615
13616
13617/** Opcode 0xc9. */
13618FNIEMOP_DEF(iemOp_leave)
13619{
13620 IEMOP_MNEMONIC(leave, "leave");
13621 IEMOP_HLP_MIN_186();
13622 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13624 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
13625}
13626
13627
13628/** Opcode 0xca. */
13629FNIEMOP_DEF(iemOp_retf_Iw)
13630{
13631 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
13632 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13634 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13635 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
13636}
13637
13638
13639/** Opcode 0xcb. */
13640FNIEMOP_DEF(iemOp_retf)
13641{
13642 IEMOP_MNEMONIC(retf, "retf");
13643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13644 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13645 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
13646}
13647
13648
13649/** Opcode 0xcc. */
13650FNIEMOP_DEF(iemOp_int_3)
13651{
13652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13653 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
13654}
13655
13656
13657/** Opcode 0xcd. */
13658FNIEMOP_DEF(iemOp_int_Ib)
13659{
13660 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
13661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13662 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
13663}
13664
13665
13666/** Opcode 0xce. */
13667FNIEMOP_DEF(iemOp_into)
13668{
13669 IEMOP_MNEMONIC(into, "into");
13670 IEMOP_HLP_NO_64BIT();
13671
13672 IEM_MC_BEGIN(2, 0);
13673 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
13674 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
13675 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
13676 IEM_MC_END();
13677 return VINF_SUCCESS;
13678}
13679
13680
13681/** Opcode 0xcf. */
13682FNIEMOP_DEF(iemOp_iret)
13683{
13684 IEMOP_MNEMONIC(iret, "iret");
13685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13686 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
13687}
13688
13689
13690/** Opcode 0xd0. */
13691FNIEMOP_DEF(iemOp_Grp2_Eb_1)
13692{
13693 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13694 PCIEMOPSHIFTSIZES pImpl;
13695 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13696 {
13697 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
13698 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
13699 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
13700 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
13701 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
13702 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
13703 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
13704 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13705 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13706 }
13707 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13708
13709 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13710 {
13711 /* register */
13712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13713 IEM_MC_BEGIN(3, 0);
13714 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13715 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13716 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13717 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13718 IEM_MC_REF_EFLAGS(pEFlags);
13719 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13720 IEM_MC_ADVANCE_RIP();
13721 IEM_MC_END();
13722 }
13723 else
13724 {
13725 /* memory */
13726 IEM_MC_BEGIN(3, 2);
13727 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13728 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13729 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13731
13732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13734 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13735 IEM_MC_FETCH_EFLAGS(EFlags);
13736 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13737
13738 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13739 IEM_MC_COMMIT_EFLAGS(EFlags);
13740 IEM_MC_ADVANCE_RIP();
13741 IEM_MC_END();
13742 }
13743 return VINF_SUCCESS;
13744}
13745
13746
13747
13748/** Opcode 0xd1. */
13749FNIEMOP_DEF(iemOp_Grp2_Ev_1)
13750{
13751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13752 PCIEMOPSHIFTSIZES pImpl;
13753 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13754 {
13755 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
13756 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
13757 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
13758 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
13759 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
13760 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
13761 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
13762 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13763 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13764 }
13765 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13766
13767 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13768 {
13769 /* register */
13770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13771 switch (pVCpu->iem.s.enmEffOpSize)
13772 {
13773 case IEMMODE_16BIT:
13774 IEM_MC_BEGIN(3, 0);
13775 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13776 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13777 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13778 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13779 IEM_MC_REF_EFLAGS(pEFlags);
13780 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13781 IEM_MC_ADVANCE_RIP();
13782 IEM_MC_END();
13783 return VINF_SUCCESS;
13784
13785 case IEMMODE_32BIT:
13786 IEM_MC_BEGIN(3, 0);
13787 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13788 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13789 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13790 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13791 IEM_MC_REF_EFLAGS(pEFlags);
13792 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13793 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13794 IEM_MC_ADVANCE_RIP();
13795 IEM_MC_END();
13796 return VINF_SUCCESS;
13797
13798 case IEMMODE_64BIT:
13799 IEM_MC_BEGIN(3, 0);
13800 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13801 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13802 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13803 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13804 IEM_MC_REF_EFLAGS(pEFlags);
13805 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13806 IEM_MC_ADVANCE_RIP();
13807 IEM_MC_END();
13808 return VINF_SUCCESS;
13809
13810 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13811 }
13812 }
13813 else
13814 {
13815 /* memory */
13816 switch (pVCpu->iem.s.enmEffOpSize)
13817 {
13818 case IEMMODE_16BIT:
13819 IEM_MC_BEGIN(3, 2);
13820 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13821 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13822 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13824
13825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13827 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13828 IEM_MC_FETCH_EFLAGS(EFlags);
13829 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13830
13831 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13832 IEM_MC_COMMIT_EFLAGS(EFlags);
13833 IEM_MC_ADVANCE_RIP();
13834 IEM_MC_END();
13835 return VINF_SUCCESS;
13836
13837 case IEMMODE_32BIT:
13838 IEM_MC_BEGIN(3, 2);
13839 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13840 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13841 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13843
13844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13846 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13847 IEM_MC_FETCH_EFLAGS(EFlags);
13848 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13849
13850 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13851 IEM_MC_COMMIT_EFLAGS(EFlags);
13852 IEM_MC_ADVANCE_RIP();
13853 IEM_MC_END();
13854 return VINF_SUCCESS;
13855
13856 case IEMMODE_64BIT:
13857 IEM_MC_BEGIN(3, 2);
13858 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13859 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13860 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13862
13863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13865 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13866 IEM_MC_FETCH_EFLAGS(EFlags);
13867 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13868
13869 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13870 IEM_MC_COMMIT_EFLAGS(EFlags);
13871 IEM_MC_ADVANCE_RIP();
13872 IEM_MC_END();
13873 return VINF_SUCCESS;
13874
13875 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13876 }
13877 }
13878}
13879
13880
13881/** Opcode 0xd2. */
13882FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
13883{
13884 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13885 PCIEMOPSHIFTSIZES pImpl;
13886 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13887 {
13888 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
13889 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
13890 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
13891 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
13892 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
13893 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
13894 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
13895 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13896 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
13897 }
13898 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13899
13900 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13901 {
13902 /* register */
13903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13904 IEM_MC_BEGIN(3, 0);
13905 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13906 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13907 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13908 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13909 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13910 IEM_MC_REF_EFLAGS(pEFlags);
13911 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13912 IEM_MC_ADVANCE_RIP();
13913 IEM_MC_END();
13914 }
13915 else
13916 {
13917 /* memory */
13918 IEM_MC_BEGIN(3, 2);
13919 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13920 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13921 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13923
13924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13926 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13927 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13928 IEM_MC_FETCH_EFLAGS(EFlags);
13929 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13930
13931 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13932 IEM_MC_COMMIT_EFLAGS(EFlags);
13933 IEM_MC_ADVANCE_RIP();
13934 IEM_MC_END();
13935 }
13936 return VINF_SUCCESS;
13937}
13938
13939
13940/** Opcode 0xd3. */
13941FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
13942{
13943 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13944 PCIEMOPSHIFTSIZES pImpl;
13945 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13946 {
13947 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
13948 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
13949 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
13950 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
13951 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
13952 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
13953 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
13954 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13955 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13956 }
13957 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13958
13959 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13960 {
13961 /* register */
13962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13963 switch (pVCpu->iem.s.enmEffOpSize)
13964 {
13965 case IEMMODE_16BIT:
13966 IEM_MC_BEGIN(3, 0);
13967 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13968 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13969 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13970 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13971 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13972 IEM_MC_REF_EFLAGS(pEFlags);
13973 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13974 IEM_MC_ADVANCE_RIP();
13975 IEM_MC_END();
13976 return VINF_SUCCESS;
13977
13978 case IEMMODE_32BIT:
13979 IEM_MC_BEGIN(3, 0);
13980 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13981 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13982 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13983 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13984 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13985 IEM_MC_REF_EFLAGS(pEFlags);
13986 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13987 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13988 IEM_MC_ADVANCE_RIP();
13989 IEM_MC_END();
13990 return VINF_SUCCESS;
13991
13992 case IEMMODE_64BIT:
13993 IEM_MC_BEGIN(3, 0);
13994 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13995 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13996 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13997 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13998 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13999 IEM_MC_REF_EFLAGS(pEFlags);
14000 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14001 IEM_MC_ADVANCE_RIP();
14002 IEM_MC_END();
14003 return VINF_SUCCESS;
14004
14005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14006 }
14007 }
14008 else
14009 {
14010 /* memory */
14011 switch (pVCpu->iem.s.enmEffOpSize)
14012 {
14013 case IEMMODE_16BIT:
14014 IEM_MC_BEGIN(3, 2);
14015 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14016 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14017 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14019
14020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14022 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14023 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14024 IEM_MC_FETCH_EFLAGS(EFlags);
14025 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14026
14027 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
14028 IEM_MC_COMMIT_EFLAGS(EFlags);
14029 IEM_MC_ADVANCE_RIP();
14030 IEM_MC_END();
14031 return VINF_SUCCESS;
14032
14033 case IEMMODE_32BIT:
14034 IEM_MC_BEGIN(3, 2);
14035 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14036 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14037 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14039
14040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14042 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14043 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14044 IEM_MC_FETCH_EFLAGS(EFlags);
14045 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14046
14047 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
14048 IEM_MC_COMMIT_EFLAGS(EFlags);
14049 IEM_MC_ADVANCE_RIP();
14050 IEM_MC_END();
14051 return VINF_SUCCESS;
14052
14053 case IEMMODE_64BIT:
14054 IEM_MC_BEGIN(3, 2);
14055 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14056 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14057 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14059
14060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14062 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14063 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14064 IEM_MC_FETCH_EFLAGS(EFlags);
14065 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14066
14067 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
14068 IEM_MC_COMMIT_EFLAGS(EFlags);
14069 IEM_MC_ADVANCE_RIP();
14070 IEM_MC_END();
14071 return VINF_SUCCESS;
14072
14073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14074 }
14075 }
14076}
14077
14078/** Opcode 0xd4. */
14079FNIEMOP_DEF(iemOp_aam_Ib)
14080{
14081 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
14082 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14084 IEMOP_HLP_NO_64BIT();
14085 if (!bImm)
14086 return IEMOP_RAISE_DIVIDE_ERROR();
14087 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
14088}
14089
14090
14091/** Opcode 0xd5. */
14092FNIEMOP_DEF(iemOp_aad_Ib)
14093{
14094 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
14095 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14097 IEMOP_HLP_NO_64BIT();
14098 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
14099}
14100
14101
14102/** Opcode 0xd6. */
14103FNIEMOP_DEF(iemOp_salc)
14104{
14105 IEMOP_MNEMONIC(salc, "salc");
14106 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
14107 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14109 IEMOP_HLP_NO_64BIT();
14110
14111 IEM_MC_BEGIN(0, 0);
14112 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
14113 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
14114 } IEM_MC_ELSE() {
14115 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
14116 } IEM_MC_ENDIF();
14117 IEM_MC_ADVANCE_RIP();
14118 IEM_MC_END();
14119 return VINF_SUCCESS;
14120}
14121
14122
14123/** Opcode 0xd7. */
14124FNIEMOP_DEF(iemOp_xlat)
14125{
14126 IEMOP_MNEMONIC(xlat, "xlat");
14127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14128 switch (pVCpu->iem.s.enmEffAddrMode)
14129 {
14130 case IEMMODE_16BIT:
14131 IEM_MC_BEGIN(2, 0);
14132 IEM_MC_LOCAL(uint8_t, u8Tmp);
14133 IEM_MC_LOCAL(uint16_t, u16Addr);
14134 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
14135 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
14136 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
14137 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14138 IEM_MC_ADVANCE_RIP();
14139 IEM_MC_END();
14140 return VINF_SUCCESS;
14141
14142 case IEMMODE_32BIT:
14143 IEM_MC_BEGIN(2, 0);
14144 IEM_MC_LOCAL(uint8_t, u8Tmp);
14145 IEM_MC_LOCAL(uint32_t, u32Addr);
14146 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
14147 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
14148 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
14149 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14150 IEM_MC_ADVANCE_RIP();
14151 IEM_MC_END();
14152 return VINF_SUCCESS;
14153
14154 case IEMMODE_64BIT:
14155 IEM_MC_BEGIN(2, 0);
14156 IEM_MC_LOCAL(uint8_t, u8Tmp);
14157 IEM_MC_LOCAL(uint64_t, u64Addr);
14158 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
14159 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
14160 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
14161 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14162 IEM_MC_ADVANCE_RIP();
14163 IEM_MC_END();
14164 return VINF_SUCCESS;
14165
14166 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14167 }
14168}
14169
14170
14171/**
14172 * Common worker for FPU instructions working on ST0 and STn, and storing the
14173 * result in ST0.
14174 *
14175 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14176 */
14177FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14178{
14179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14180
14181 IEM_MC_BEGIN(3, 1);
14182 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14183 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14184 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14185 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14186
14187 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14188 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14189 IEM_MC_PREPARE_FPU_USAGE();
14190 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14191 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14192 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14193 IEM_MC_ELSE()
14194 IEM_MC_FPU_STACK_UNDERFLOW(0);
14195 IEM_MC_ENDIF();
14196 IEM_MC_ADVANCE_RIP();
14197
14198 IEM_MC_END();
14199 return VINF_SUCCESS;
14200}
14201
14202
14203/**
14204 * Common worker for FPU instructions working on ST0 and STn, and only affecting
14205 * flags.
14206 *
14207 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14208 */
14209FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14210{
14211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14212
14213 IEM_MC_BEGIN(3, 1);
14214 IEM_MC_LOCAL(uint16_t, u16Fsw);
14215 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14216 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14217 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14218
14219 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14220 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14221 IEM_MC_PREPARE_FPU_USAGE();
14222 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14223 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14224 IEM_MC_UPDATE_FSW(u16Fsw);
14225 IEM_MC_ELSE()
14226 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14227 IEM_MC_ENDIF();
14228 IEM_MC_ADVANCE_RIP();
14229
14230 IEM_MC_END();
14231 return VINF_SUCCESS;
14232}
14233
14234
14235/**
14236 * Common worker for FPU instructions working on ST0 and STn, only affecting
14237 * flags, and popping when done.
14238 *
14239 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14240 */
14241FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14242{
14243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14244
14245 IEM_MC_BEGIN(3, 1);
14246 IEM_MC_LOCAL(uint16_t, u16Fsw);
14247 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14248 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14249 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14250
14251 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14252 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14253 IEM_MC_PREPARE_FPU_USAGE();
14254 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14255 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14256 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14257 IEM_MC_ELSE()
14258 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
14259 IEM_MC_ENDIF();
14260 IEM_MC_ADVANCE_RIP();
14261
14262 IEM_MC_END();
14263 return VINF_SUCCESS;
14264}
14265
14266
14267/** Opcode 0xd8 11/0. */
14268FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
14269{
14270 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
14271 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
14272}
14273
14274
14275/** Opcode 0xd8 11/1. */
14276FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
14277{
14278 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
14279 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
14280}
14281
14282
14283/** Opcode 0xd8 11/2. */
14284FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
14285{
14286 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
14287 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
14288}
14289
14290
14291/** Opcode 0xd8 11/3. */
14292FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
14293{
14294 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
14295 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
14296}
14297
14298
14299/** Opcode 0xd8 11/4. */
14300FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
14301{
14302 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
14303 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
14304}
14305
14306
14307/** Opcode 0xd8 11/5. */
14308FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
14309{
14310 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
14311 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
14312}
14313
14314
14315/** Opcode 0xd8 11/6. */
14316FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
14317{
14318 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
14319 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
14320}
14321
14322
14323/** Opcode 0xd8 11/7. */
14324FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
14325{
14326 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
14327 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
14328}
14329
14330
14331/**
14332 * Common worker for FPU instructions working on ST0 and an m32r, and storing
14333 * the result in ST0.
14334 *
14335 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14336 */
14337FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
14338{
14339 IEM_MC_BEGIN(3, 3);
14340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14341 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14342 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14343 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14344 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14345 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14346
14347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14349
14350 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14351 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14352 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14353
14354 IEM_MC_PREPARE_FPU_USAGE();
14355 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14356 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
14357 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14358 IEM_MC_ELSE()
14359 IEM_MC_FPU_STACK_UNDERFLOW(0);
14360 IEM_MC_ENDIF();
14361 IEM_MC_ADVANCE_RIP();
14362
14363 IEM_MC_END();
14364 return VINF_SUCCESS;
14365}
14366
14367
14368/** Opcode 0xd8 !11/0. */
14369FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
14370{
14371 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
14372 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
14373}
14374
14375
14376/** Opcode 0xd8 !11/1. */
14377FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
14378{
14379 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
14380 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
14381}
14382
14383
14384/** Opcode 0xd8 !11/2. */
14385FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
14386{
14387 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
14388
14389 IEM_MC_BEGIN(3, 3);
14390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14391 IEM_MC_LOCAL(uint16_t, u16Fsw);
14392 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14393 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14394 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14395 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14396
14397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14399
14400 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14401 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14402 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14403
14404 IEM_MC_PREPARE_FPU_USAGE();
14405 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14406 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14407 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14408 IEM_MC_ELSE()
14409 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14410 IEM_MC_ENDIF();
14411 IEM_MC_ADVANCE_RIP();
14412
14413 IEM_MC_END();
14414 return VINF_SUCCESS;
14415}
14416
14417
14418/** Opcode 0xd8 !11/3. */
14419FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
14420{
14421 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
14422
14423 IEM_MC_BEGIN(3, 3);
14424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14425 IEM_MC_LOCAL(uint16_t, u16Fsw);
14426 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14427 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14428 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14429 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14430
14431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14433
14434 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14435 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14436 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14437
14438 IEM_MC_PREPARE_FPU_USAGE();
14439 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14440 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14441 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14442 IEM_MC_ELSE()
14443 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14444 IEM_MC_ENDIF();
14445 IEM_MC_ADVANCE_RIP();
14446
14447 IEM_MC_END();
14448 return VINF_SUCCESS;
14449}
14450
14451
14452/** Opcode 0xd8 !11/4. */
14453FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
14454{
14455 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
14456 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
14457}
14458
14459
14460/** Opcode 0xd8 !11/5. */
14461FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
14462{
14463 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
14464 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
14465}
14466
14467
14468/** Opcode 0xd8 !11/6. */
14469FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
14470{
14471 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
14472 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
14473}
14474
14475
14476/** Opcode 0xd8 !11/7. */
14477FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
14478{
14479 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
14480 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
14481}
14482
14483
14484/** Opcode 0xd8. */
14485FNIEMOP_DEF(iemOp_EscF0)
14486{
14487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14488 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
14489
14490 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14491 {
14492 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14493 {
14494 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
14495 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
14496 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
14497 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
14498 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
14499 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
14500 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
14501 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
14502 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14503 }
14504 }
14505 else
14506 {
14507 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14508 {
14509 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
14510 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
14511 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
14512 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
14513 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
14514 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
14515 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
14516 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
14517 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14518 }
14519 }
14520}
14521
14522
14523/** Opcode 0xd9 /0 mem32real
14524 * @sa iemOp_fld_m64r */
14525FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
14526{
14527 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
14528
14529 IEM_MC_BEGIN(2, 3);
14530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14531 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14532 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
14533 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14534 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
14535
14536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14538
14539 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14540 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14541 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14542
14543 IEM_MC_PREPARE_FPU_USAGE();
14544 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14545 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
14546 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14547 IEM_MC_ELSE()
14548 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14549 IEM_MC_ENDIF();
14550 IEM_MC_ADVANCE_RIP();
14551
14552 IEM_MC_END();
14553 return VINF_SUCCESS;
14554}
14555
14556
14557/** Opcode 0xd9 !11/2 mem32real */
14558FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
14559{
14560 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
14561 IEM_MC_BEGIN(3, 2);
14562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14563 IEM_MC_LOCAL(uint16_t, u16Fsw);
14564 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14565 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14566 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14567
14568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14570 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14571 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14572
14573 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14574 IEM_MC_PREPARE_FPU_USAGE();
14575 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14576 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14577 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14578 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14579 IEM_MC_ELSE()
14580 IEM_MC_IF_FCW_IM()
14581 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14582 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14583 IEM_MC_ENDIF();
14584 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14585 IEM_MC_ENDIF();
14586 IEM_MC_ADVANCE_RIP();
14587
14588 IEM_MC_END();
14589 return VINF_SUCCESS;
14590}
14591
14592
14593/** Opcode 0xd9 !11/3 */
14594FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
14595{
14596 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
14597 IEM_MC_BEGIN(3, 2);
14598 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14599 IEM_MC_LOCAL(uint16_t, u16Fsw);
14600 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14601 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14602 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14603
14604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14606 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14607 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14608
14609 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14610 IEM_MC_PREPARE_FPU_USAGE();
14611 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14612 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14613 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14614 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14615 IEM_MC_ELSE()
14616 IEM_MC_IF_FCW_IM()
14617 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14618 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14619 IEM_MC_ENDIF();
14620 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14621 IEM_MC_ENDIF();
14622 IEM_MC_ADVANCE_RIP();
14623
14624 IEM_MC_END();
14625 return VINF_SUCCESS;
14626}
14627
14628
14629/** Opcode 0xd9 !11/4 */
14630FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
14631{
14632 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
14633 IEM_MC_BEGIN(3, 0);
14634 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14635 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14636 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
14637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14639 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14640 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14641 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14642 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
14643 IEM_MC_END();
14644 return VINF_SUCCESS;
14645}
14646
14647
14648/** Opcode 0xd9 !11/5 */
14649FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
14650{
14651 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
14652 IEM_MC_BEGIN(1, 1);
14653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14654 IEM_MC_ARG(uint16_t, u16Fsw, 0);
14655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14657 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14658 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14659 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14660 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
14661 IEM_MC_END();
14662 return VINF_SUCCESS;
14663}
14664
14665
14666/** Opcode 0xd9 !11/6 */
14667FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
14668{
14669 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
14670 IEM_MC_BEGIN(3, 0);
14671 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14672 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14673 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
14674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14676 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14677 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14678 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14679 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
14680 IEM_MC_END();
14681 return VINF_SUCCESS;
14682}
14683
14684
14685/** Opcode 0xd9 !11/7 */
14686FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
14687{
14688 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
14689 IEM_MC_BEGIN(2, 0);
14690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14691 IEM_MC_LOCAL(uint16_t, u16Fcw);
14692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14694 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14695 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14696 IEM_MC_FETCH_FCW(u16Fcw);
14697 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
14698 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14699 IEM_MC_END();
14700 return VINF_SUCCESS;
14701}
14702
14703
14704/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
14705FNIEMOP_DEF(iemOp_fnop)
14706{
14707 IEMOP_MNEMONIC(fnop, "fnop");
14708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14709
14710 IEM_MC_BEGIN(0, 0);
14711 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14712 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14713 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14714 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
14715 * intel optimizations. Investigate. */
14716 IEM_MC_UPDATE_FPU_OPCODE_IP();
14717 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14718 IEM_MC_END();
14719 return VINF_SUCCESS;
14720}
14721
14722
14723/** Opcode 0xd9 11/0 stN */
14724FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
14725{
14726 IEMOP_MNEMONIC(fld_stN, "fld stN");
14727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14728
14729 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14730 * indicates that it does. */
14731 IEM_MC_BEGIN(0, 2);
14732 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14733 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14734 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14735 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14736
14737 IEM_MC_PREPARE_FPU_USAGE();
14738 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
14739 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14740 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14741 IEM_MC_ELSE()
14742 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
14743 IEM_MC_ENDIF();
14744
14745 IEM_MC_ADVANCE_RIP();
14746 IEM_MC_END();
14747
14748 return VINF_SUCCESS;
14749}
14750
14751
14752/** Opcode 0xd9 11/3 stN */
14753FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
14754{
14755 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
14756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14757
14758 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14759 * indicates that it does. */
14760 IEM_MC_BEGIN(1, 3);
14761 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
14762 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
14763 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14764 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
14765 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14766 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14767
14768 IEM_MC_PREPARE_FPU_USAGE();
14769 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14770 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
14771 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
14772 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14773 IEM_MC_ELSE()
14774 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
14775 IEM_MC_ENDIF();
14776
14777 IEM_MC_ADVANCE_RIP();
14778 IEM_MC_END();
14779
14780 return VINF_SUCCESS;
14781}
14782
14783
14784/** Opcode 0xd9 11/4, 0xdd 11/2. */
14785FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
14786{
14787 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
14788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14789
14790 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
14791 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
14792 if (!iDstReg)
14793 {
14794 IEM_MC_BEGIN(0, 1);
14795 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
14796 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14797 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14798
14799 IEM_MC_PREPARE_FPU_USAGE();
14800 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
14801 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14802 IEM_MC_ELSE()
14803 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
14804 IEM_MC_ENDIF();
14805
14806 IEM_MC_ADVANCE_RIP();
14807 IEM_MC_END();
14808 }
14809 else
14810 {
14811 IEM_MC_BEGIN(0, 2);
14812 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14813 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14814 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14815 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14816
14817 IEM_MC_PREPARE_FPU_USAGE();
14818 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14819 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14820 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
14821 IEM_MC_ELSE()
14822 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
14823 IEM_MC_ENDIF();
14824
14825 IEM_MC_ADVANCE_RIP();
14826 IEM_MC_END();
14827 }
14828 return VINF_SUCCESS;
14829}
14830
14831
14832/**
14833 * Common worker for FPU instructions working on ST0 and replaces it with the
14834 * result, i.e. unary operators.
14835 *
14836 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14837 */
14838FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
14839{
14840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14841
14842 IEM_MC_BEGIN(2, 1);
14843 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14844 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14845 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14846
14847 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14848 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14849 IEM_MC_PREPARE_FPU_USAGE();
14850 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14851 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
14852 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14853 IEM_MC_ELSE()
14854 IEM_MC_FPU_STACK_UNDERFLOW(0);
14855 IEM_MC_ENDIF();
14856 IEM_MC_ADVANCE_RIP();
14857
14858 IEM_MC_END();
14859 return VINF_SUCCESS;
14860}
14861
14862
14863/** Opcode 0xd9 0xe0. */
14864FNIEMOP_DEF(iemOp_fchs)
14865{
14866 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
14867 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
14868}
14869
14870
14871/** Opcode 0xd9 0xe1. */
14872FNIEMOP_DEF(iemOp_fabs)
14873{
14874 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
14875 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
14876}
14877
14878
14879/**
14880 * Common worker for FPU instructions working on ST0 and only returns FSW.
14881 *
14882 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14883 */
14884FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
14885{
14886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14887
14888 IEM_MC_BEGIN(2, 1);
14889 IEM_MC_LOCAL(uint16_t, u16Fsw);
14890 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14891 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14892
14893 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14894 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14895 IEM_MC_PREPARE_FPU_USAGE();
14896 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14897 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
14898 IEM_MC_UPDATE_FSW(u16Fsw);
14899 IEM_MC_ELSE()
14900 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14901 IEM_MC_ENDIF();
14902 IEM_MC_ADVANCE_RIP();
14903
14904 IEM_MC_END();
14905 return VINF_SUCCESS;
14906}
14907
14908
14909/** Opcode 0xd9 0xe4. */
14910FNIEMOP_DEF(iemOp_ftst)
14911{
14912 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
14913 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
14914}
14915
14916
14917/** Opcode 0xd9 0xe5. */
14918FNIEMOP_DEF(iemOp_fxam)
14919{
14920 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
14921 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
14922}
14923
14924
14925/**
14926 * Common worker for FPU instructions pushing a constant onto the FPU stack.
14927 *
14928 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14929 */
14930FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
14931{
14932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14933
14934 IEM_MC_BEGIN(1, 1);
14935 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14936 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14937
14938 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14939 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14940 IEM_MC_PREPARE_FPU_USAGE();
14941 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14942 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
14943 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14944 IEM_MC_ELSE()
14945 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
14946 IEM_MC_ENDIF();
14947 IEM_MC_ADVANCE_RIP();
14948
14949 IEM_MC_END();
14950 return VINF_SUCCESS;
14951}
14952
14953
14954/** Opcode 0xd9 0xe8. */
14955FNIEMOP_DEF(iemOp_fld1)
14956{
14957 IEMOP_MNEMONIC(fld1, "fld1");
14958 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
14959}
14960
14961
14962/** Opcode 0xd9 0xe9. */
14963FNIEMOP_DEF(iemOp_fldl2t)
14964{
14965 IEMOP_MNEMONIC(fldl2t, "fldl2t");
14966 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
14967}
14968
14969
14970/** Opcode 0xd9 0xea. */
14971FNIEMOP_DEF(iemOp_fldl2e)
14972{
14973 IEMOP_MNEMONIC(fldl2e, "fldl2e");
14974 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
14975}
14976
14977/** Opcode 0xd9 0xeb. */
14978FNIEMOP_DEF(iemOp_fldpi)
14979{
14980 IEMOP_MNEMONIC(fldpi, "fldpi");
14981 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
14982}
14983
14984
14985/** Opcode 0xd9 0xec. */
14986FNIEMOP_DEF(iemOp_fldlg2)
14987{
14988 IEMOP_MNEMONIC(fldlg2, "fldlg2");
14989 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
14990}
14991
14992/** Opcode 0xd9 0xed. */
14993FNIEMOP_DEF(iemOp_fldln2)
14994{
14995 IEMOP_MNEMONIC(fldln2, "fldln2");
14996 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
14997}
14998
14999
15000/** Opcode 0xd9 0xee. */
15001FNIEMOP_DEF(iemOp_fldz)
15002{
15003 IEMOP_MNEMONIC(fldz, "fldz");
15004 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
15005}
15006
15007
15008/** Opcode 0xd9 0xf0. */
15009FNIEMOP_DEF(iemOp_f2xm1)
15010{
15011 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
15012 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
15013}
15014
15015
15016/**
15017 * Common worker for FPU instructions working on STn and ST0, storing the result
15018 * in STn, and popping the stack unless IE, DE or ZE was raised.
15019 *
15020 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15021 */
15022FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15023{
15024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15025
15026 IEM_MC_BEGIN(3, 1);
15027 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15028 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15029 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15030 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15031
15032 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15033 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15034
15035 IEM_MC_PREPARE_FPU_USAGE();
15036 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15037 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15038 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
15039 IEM_MC_ELSE()
15040 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
15041 IEM_MC_ENDIF();
15042 IEM_MC_ADVANCE_RIP();
15043
15044 IEM_MC_END();
15045 return VINF_SUCCESS;
15046}
15047
15048
15049/** Opcode 0xd9 0xf1. */
15050FNIEMOP_DEF(iemOp_fyl2x)
15051{
15052 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
15053 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
15054}
15055
15056
15057/**
15058 * Common worker for FPU instructions working on ST0 and having two outputs, one
15059 * replacing ST0 and one pushed onto the stack.
15060 *
15061 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15062 */
15063FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
15064{
15065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15066
15067 IEM_MC_BEGIN(2, 1);
15068 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
15069 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
15070 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
15071
15072 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15073 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15074 IEM_MC_PREPARE_FPU_USAGE();
15075 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15076 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
15077 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
15078 IEM_MC_ELSE()
15079 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
15080 IEM_MC_ENDIF();
15081 IEM_MC_ADVANCE_RIP();
15082
15083 IEM_MC_END();
15084 return VINF_SUCCESS;
15085}
15086
15087
15088/** Opcode 0xd9 0xf2. */
15089FNIEMOP_DEF(iemOp_fptan)
15090{
15091 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
15092 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
15093}
15094
15095
15096/** Opcode 0xd9 0xf3. */
15097FNIEMOP_DEF(iemOp_fpatan)
15098{
15099 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
15100 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
15101}
15102
15103
15104/** Opcode 0xd9 0xf4. */
15105FNIEMOP_DEF(iemOp_fxtract)
15106{
15107 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
15108 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
15109}
15110
15111
15112/** Opcode 0xd9 0xf5. */
15113FNIEMOP_DEF(iemOp_fprem1)
15114{
15115 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
15116 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
15117}
15118
15119
15120/** Opcode 0xd9 0xf6. */
15121FNIEMOP_DEF(iemOp_fdecstp)
15122{
15123 IEMOP_MNEMONIC(fdecstp, "fdecstp");
15124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15125 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15126 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15127 * FINCSTP and FDECSTP. */
15128
15129 IEM_MC_BEGIN(0,0);
15130
15131 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15132 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15133
15134 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15135 IEM_MC_FPU_STACK_DEC_TOP();
15136 IEM_MC_UPDATE_FSW_CONST(0);
15137
15138 IEM_MC_ADVANCE_RIP();
15139 IEM_MC_END();
15140 return VINF_SUCCESS;
15141}
15142
15143
15144/** Opcode 0xd9 0xf7. */
15145FNIEMOP_DEF(iemOp_fincstp)
15146{
15147 IEMOP_MNEMONIC(fincstp, "fincstp");
15148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15149 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15150 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15151 * FINCSTP and FDECSTP. */
15152
15153 IEM_MC_BEGIN(0,0);
15154
15155 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15156 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15157
15158 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15159 IEM_MC_FPU_STACK_INC_TOP();
15160 IEM_MC_UPDATE_FSW_CONST(0);
15161
15162 IEM_MC_ADVANCE_RIP();
15163 IEM_MC_END();
15164 return VINF_SUCCESS;
15165}
15166
15167
15168/** Opcode 0xd9 0xf8. */
15169FNIEMOP_DEF(iemOp_fprem)
15170{
15171 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
15172 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
15173}
15174
15175
15176/** Opcode 0xd9 0xf9. */
15177FNIEMOP_DEF(iemOp_fyl2xp1)
15178{
15179 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
15180 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
15181}
15182
15183
15184/** Opcode 0xd9 0xfa. */
15185FNIEMOP_DEF(iemOp_fsqrt)
15186{
15187 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
15188 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
15189}
15190
15191
15192/** Opcode 0xd9 0xfb. */
15193FNIEMOP_DEF(iemOp_fsincos)
15194{
15195 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
15196 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
15197}
15198
15199
15200/** Opcode 0xd9 0xfc. */
15201FNIEMOP_DEF(iemOp_frndint)
15202{
15203 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
15204 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
15205}
15206
15207
15208/** Opcode 0xd9 0xfd. */
15209FNIEMOP_DEF(iemOp_fscale)
15210{
15211 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
15212 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
15213}
15214
15215
15216/** Opcode 0xd9 0xfe. */
15217FNIEMOP_DEF(iemOp_fsin)
15218{
15219 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
15220 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
15221}
15222
15223
15224/** Opcode 0xd9 0xff. */
15225FNIEMOP_DEF(iemOp_fcos)
15226{
15227 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
15228 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
15229}
15230
15231
15232/** Used by iemOp_EscF1. */
15233IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
15234{
15235 /* 0xe0 */ iemOp_fchs,
15236 /* 0xe1 */ iemOp_fabs,
15237 /* 0xe2 */ iemOp_Invalid,
15238 /* 0xe3 */ iemOp_Invalid,
15239 /* 0xe4 */ iemOp_ftst,
15240 /* 0xe5 */ iemOp_fxam,
15241 /* 0xe6 */ iemOp_Invalid,
15242 /* 0xe7 */ iemOp_Invalid,
15243 /* 0xe8 */ iemOp_fld1,
15244 /* 0xe9 */ iemOp_fldl2t,
15245 /* 0xea */ iemOp_fldl2e,
15246 /* 0xeb */ iemOp_fldpi,
15247 /* 0xec */ iemOp_fldlg2,
15248 /* 0xed */ iemOp_fldln2,
15249 /* 0xee */ iemOp_fldz,
15250 /* 0xef */ iemOp_Invalid,
15251 /* 0xf0 */ iemOp_f2xm1,
15252 /* 0xf1 */ iemOp_fyl2x,
15253 /* 0xf2 */ iemOp_fptan,
15254 /* 0xf3 */ iemOp_fpatan,
15255 /* 0xf4 */ iemOp_fxtract,
15256 /* 0xf5 */ iemOp_fprem1,
15257 /* 0xf6 */ iemOp_fdecstp,
15258 /* 0xf7 */ iemOp_fincstp,
15259 /* 0xf8 */ iemOp_fprem,
15260 /* 0xf9 */ iemOp_fyl2xp1,
15261 /* 0xfa */ iemOp_fsqrt,
15262 /* 0xfb */ iemOp_fsincos,
15263 /* 0xfc */ iemOp_frndint,
15264 /* 0xfd */ iemOp_fscale,
15265 /* 0xfe */ iemOp_fsin,
15266 /* 0xff */ iemOp_fcos
15267};
15268
15269
15270/** Opcode 0xd9. */
15271FNIEMOP_DEF(iemOp_EscF1)
15272{
15273 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15274 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
15275
15276 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15277 {
15278 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15279 {
15280 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
15281 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
15282 case 2:
15283 if (bRm == 0xd0)
15284 return FNIEMOP_CALL(iemOp_fnop);
15285 return IEMOP_RAISE_INVALID_OPCODE();
15286 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
15287 case 4:
15288 case 5:
15289 case 6:
15290 case 7:
15291 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
15292 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
15293 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15294 }
15295 }
15296 else
15297 {
15298 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15299 {
15300 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
15301 case 1: return IEMOP_RAISE_INVALID_OPCODE();
15302 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
15303 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
15304 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
15305 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
15306 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
15307 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
15308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15309 }
15310 }
15311}
15312
15313
15314/** Opcode 0xda 11/0. */
15315FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
15316{
15317 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
15318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15319
15320 IEM_MC_BEGIN(0, 1);
15321 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15322
15323 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15324 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15325
15326 IEM_MC_PREPARE_FPU_USAGE();
15327 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15328 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
15329 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15330 IEM_MC_ENDIF();
15331 IEM_MC_UPDATE_FPU_OPCODE_IP();
15332 IEM_MC_ELSE()
15333 IEM_MC_FPU_STACK_UNDERFLOW(0);
15334 IEM_MC_ENDIF();
15335 IEM_MC_ADVANCE_RIP();
15336
15337 IEM_MC_END();
15338 return VINF_SUCCESS;
15339}
15340
15341
15342/** Opcode 0xda 11/1. */
15343FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
15344{
15345 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
15346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15347
15348 IEM_MC_BEGIN(0, 1);
15349 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15350
15351 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15352 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15353
15354 IEM_MC_PREPARE_FPU_USAGE();
15355 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15356 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
15357 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15358 IEM_MC_ENDIF();
15359 IEM_MC_UPDATE_FPU_OPCODE_IP();
15360 IEM_MC_ELSE()
15361 IEM_MC_FPU_STACK_UNDERFLOW(0);
15362 IEM_MC_ENDIF();
15363 IEM_MC_ADVANCE_RIP();
15364
15365 IEM_MC_END();
15366 return VINF_SUCCESS;
15367}
15368
15369
15370/** Opcode 0xda 11/2. */
15371FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
15372{
15373 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
15374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15375
15376 IEM_MC_BEGIN(0, 1);
15377 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15378
15379 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15380 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15381
15382 IEM_MC_PREPARE_FPU_USAGE();
15383 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15384 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15385 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15386 IEM_MC_ENDIF();
15387 IEM_MC_UPDATE_FPU_OPCODE_IP();
15388 IEM_MC_ELSE()
15389 IEM_MC_FPU_STACK_UNDERFLOW(0);
15390 IEM_MC_ENDIF();
15391 IEM_MC_ADVANCE_RIP();
15392
15393 IEM_MC_END();
15394 return VINF_SUCCESS;
15395}
15396
15397
15398/** Opcode 0xda 11/3. */
15399FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
15400{
15401 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
15402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15403
15404 IEM_MC_BEGIN(0, 1);
15405 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15406
15407 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15408 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15409
15410 IEM_MC_PREPARE_FPU_USAGE();
15411 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15412 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
15413 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15414 IEM_MC_ENDIF();
15415 IEM_MC_UPDATE_FPU_OPCODE_IP();
15416 IEM_MC_ELSE()
15417 IEM_MC_FPU_STACK_UNDERFLOW(0);
15418 IEM_MC_ENDIF();
15419 IEM_MC_ADVANCE_RIP();
15420
15421 IEM_MC_END();
15422 return VINF_SUCCESS;
15423}
15424
15425
15426/**
15427 * Common worker for FPU instructions working on ST0 and STn, only affecting
15428 * flags, and popping twice when done.
15429 *
15430 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15431 */
15432FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
15433{
15434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15435
15436 IEM_MC_BEGIN(3, 1);
15437 IEM_MC_LOCAL(uint16_t, u16Fsw);
15438 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15439 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15440 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15441
15442 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15443 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15444
15445 IEM_MC_PREPARE_FPU_USAGE();
15446 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
15447 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
15448 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
15449 IEM_MC_ELSE()
15450 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
15451 IEM_MC_ENDIF();
15452 IEM_MC_ADVANCE_RIP();
15453
15454 IEM_MC_END();
15455 return VINF_SUCCESS;
15456}
15457
15458
15459/** Opcode 0xda 0xe9. */
15460FNIEMOP_DEF(iemOp_fucompp)
15461{
15462 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
15463 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
15464}
15465
15466
15467/**
15468 * Common worker for FPU instructions working on ST0 and an m32i, and storing
15469 * the result in ST0.
15470 *
15471 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15472 */
15473FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
15474{
15475 IEM_MC_BEGIN(3, 3);
15476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15477 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15478 IEM_MC_LOCAL(int32_t, i32Val2);
15479 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15480 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15481 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15482
15483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15485
15486 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15487 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15488 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15489
15490 IEM_MC_PREPARE_FPU_USAGE();
15491 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15492 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
15493 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15494 IEM_MC_ELSE()
15495 IEM_MC_FPU_STACK_UNDERFLOW(0);
15496 IEM_MC_ENDIF();
15497 IEM_MC_ADVANCE_RIP();
15498
15499 IEM_MC_END();
15500 return VINF_SUCCESS;
15501}
15502
15503
15504/** Opcode 0xda !11/0. */
15505FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
15506{
15507 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
15508 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
15509}
15510
15511
15512/** Opcode 0xda !11/1. */
15513FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
15514{
15515 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
15516 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
15517}
15518
15519
15520/** Opcode 0xda !11/2. */
15521FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
15522{
15523 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
15524
15525 IEM_MC_BEGIN(3, 3);
15526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15527 IEM_MC_LOCAL(uint16_t, u16Fsw);
15528 IEM_MC_LOCAL(int32_t, i32Val2);
15529 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15530 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15531 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15532
15533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15535
15536 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15537 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15538 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15539
15540 IEM_MC_PREPARE_FPU_USAGE();
15541 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15542 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15543 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15544 IEM_MC_ELSE()
15545 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15546 IEM_MC_ENDIF();
15547 IEM_MC_ADVANCE_RIP();
15548
15549 IEM_MC_END();
15550 return VINF_SUCCESS;
15551}
15552
15553
15554/** Opcode 0xda !11/3. */
15555FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
15556{
15557 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
15558
15559 IEM_MC_BEGIN(3, 3);
15560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15561 IEM_MC_LOCAL(uint16_t, u16Fsw);
15562 IEM_MC_LOCAL(int32_t, i32Val2);
15563 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15564 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15565 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15566
15567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15569
15570 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15571 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15572 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15573
15574 IEM_MC_PREPARE_FPU_USAGE();
15575 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15576 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15577 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15578 IEM_MC_ELSE()
15579 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15580 IEM_MC_ENDIF();
15581 IEM_MC_ADVANCE_RIP();
15582
15583 IEM_MC_END();
15584 return VINF_SUCCESS;
15585}
15586
15587
15588/** Opcode 0xda !11/4. */
15589FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
15590{
15591 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
15592 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
15593}
15594
15595
15596/** Opcode 0xda !11/5. */
15597FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
15598{
15599 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
15600 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
15601}
15602
15603
15604/** Opcode 0xda !11/6. */
15605FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
15606{
15607 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
15608 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
15609}
15610
15611
15612/** Opcode 0xda !11/7. */
15613FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
15614{
15615 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
15616 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
15617}
15618
15619
15620/** Opcode 0xda. */
15621FNIEMOP_DEF(iemOp_EscF2)
15622{
15623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15624 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
15625 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15626 {
15627 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15628 {
15629 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
15630 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
15631 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
15632 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
15633 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15634 case 5:
15635 if (bRm == 0xe9)
15636 return FNIEMOP_CALL(iemOp_fucompp);
15637 return IEMOP_RAISE_INVALID_OPCODE();
15638 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15639 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15640 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15641 }
15642 }
15643 else
15644 {
15645 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15646 {
15647 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
15648 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
15649 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
15650 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
15651 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
15652 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
15653 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
15654 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
15655 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15656 }
15657 }
15658}
15659
15660
15661/** Opcode 0xdb !11/0. */
15662FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
15663{
15664 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
15665
15666 IEM_MC_BEGIN(2, 3);
15667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15668 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15669 IEM_MC_LOCAL(int32_t, i32Val);
15670 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15671 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
15672
15673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15675
15676 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15677 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15678 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15679
15680 IEM_MC_PREPARE_FPU_USAGE();
15681 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15682 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
15683 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15684 IEM_MC_ELSE()
15685 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15686 IEM_MC_ENDIF();
15687 IEM_MC_ADVANCE_RIP();
15688
15689 IEM_MC_END();
15690 return VINF_SUCCESS;
15691}
15692
15693
15694/** Opcode 0xdb !11/1. */
15695FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
15696{
15697 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
15698 IEM_MC_BEGIN(3, 2);
15699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15700 IEM_MC_LOCAL(uint16_t, u16Fsw);
15701 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15702 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15703 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15704
15705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15707 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15708 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15709
15710 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15711 IEM_MC_PREPARE_FPU_USAGE();
15712 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15713 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15714 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15715 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15716 IEM_MC_ELSE()
15717 IEM_MC_IF_FCW_IM()
15718 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15719 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15720 IEM_MC_ENDIF();
15721 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15722 IEM_MC_ENDIF();
15723 IEM_MC_ADVANCE_RIP();
15724
15725 IEM_MC_END();
15726 return VINF_SUCCESS;
15727}
15728
15729
15730/** Opcode 0xdb !11/2. */
15731FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
15732{
15733 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
15734 IEM_MC_BEGIN(3, 2);
15735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15736 IEM_MC_LOCAL(uint16_t, u16Fsw);
15737 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15738 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15739 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15740
15741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15743 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15744 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15745
15746 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15747 IEM_MC_PREPARE_FPU_USAGE();
15748 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15749 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15750 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15751 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15752 IEM_MC_ELSE()
15753 IEM_MC_IF_FCW_IM()
15754 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15755 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15756 IEM_MC_ENDIF();
15757 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15758 IEM_MC_ENDIF();
15759 IEM_MC_ADVANCE_RIP();
15760
15761 IEM_MC_END();
15762 return VINF_SUCCESS;
15763}
15764
15765
15766/** Opcode 0xdb !11/3. */
15767FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
15768{
15769 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
15770 IEM_MC_BEGIN(3, 2);
15771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15772 IEM_MC_LOCAL(uint16_t, u16Fsw);
15773 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15774 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15775 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15776
15777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15779 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15780 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15781
15782 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15783 IEM_MC_PREPARE_FPU_USAGE();
15784 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15785 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15786 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15787 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15788 IEM_MC_ELSE()
15789 IEM_MC_IF_FCW_IM()
15790 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15791 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15792 IEM_MC_ENDIF();
15793 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15794 IEM_MC_ENDIF();
15795 IEM_MC_ADVANCE_RIP();
15796
15797 IEM_MC_END();
15798 return VINF_SUCCESS;
15799}
15800
15801
15802/** Opcode 0xdb !11/5. */
15803FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
15804{
15805 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
15806
15807 IEM_MC_BEGIN(2, 3);
15808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15809 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15810 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
15811 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15812 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
15813
15814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15816
15817 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15818 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15819 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15820
15821 IEM_MC_PREPARE_FPU_USAGE();
15822 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15823 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
15824 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15825 IEM_MC_ELSE()
15826 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15827 IEM_MC_ENDIF();
15828 IEM_MC_ADVANCE_RIP();
15829
15830 IEM_MC_END();
15831 return VINF_SUCCESS;
15832}
15833
15834
15835/** Opcode 0xdb !11/7. */
15836FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
15837{
15838 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
15839 IEM_MC_BEGIN(3, 2);
15840 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15841 IEM_MC_LOCAL(uint16_t, u16Fsw);
15842 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15843 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
15844 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15845
15846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15848 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15849 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15850
15851 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15852 IEM_MC_PREPARE_FPU_USAGE();
15853 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15854 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
15855 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
15856 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15857 IEM_MC_ELSE()
15858 IEM_MC_IF_FCW_IM()
15859 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
15860 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
15861 IEM_MC_ENDIF();
15862 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15863 IEM_MC_ENDIF();
15864 IEM_MC_ADVANCE_RIP();
15865
15866 IEM_MC_END();
15867 return VINF_SUCCESS;
15868}
15869
15870
15871/** Opcode 0xdb 11/0. */
15872FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
15873{
15874 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
15875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15876
15877 IEM_MC_BEGIN(0, 1);
15878 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15879
15880 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15881 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15882
15883 IEM_MC_PREPARE_FPU_USAGE();
15884 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15885 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
15886 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15887 IEM_MC_ENDIF();
15888 IEM_MC_UPDATE_FPU_OPCODE_IP();
15889 IEM_MC_ELSE()
15890 IEM_MC_FPU_STACK_UNDERFLOW(0);
15891 IEM_MC_ENDIF();
15892 IEM_MC_ADVANCE_RIP();
15893
15894 IEM_MC_END();
15895 return VINF_SUCCESS;
15896}
15897
15898
15899/** Opcode 0xdb 11/1. */
15900FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
15901{
15902 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
15903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15904
15905 IEM_MC_BEGIN(0, 1);
15906 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15907
15908 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15909 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15910
15911 IEM_MC_PREPARE_FPU_USAGE();
15912 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15913 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
15914 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15915 IEM_MC_ENDIF();
15916 IEM_MC_UPDATE_FPU_OPCODE_IP();
15917 IEM_MC_ELSE()
15918 IEM_MC_FPU_STACK_UNDERFLOW(0);
15919 IEM_MC_ENDIF();
15920 IEM_MC_ADVANCE_RIP();
15921
15922 IEM_MC_END();
15923 return VINF_SUCCESS;
15924}
15925
15926
15927/** Opcode 0xdb 11/2. */
15928FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
15929{
15930 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
15931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15932
15933 IEM_MC_BEGIN(0, 1);
15934 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15935
15936 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15937 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15938
15939 IEM_MC_PREPARE_FPU_USAGE();
15940 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15941 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15942 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15943 IEM_MC_ENDIF();
15944 IEM_MC_UPDATE_FPU_OPCODE_IP();
15945 IEM_MC_ELSE()
15946 IEM_MC_FPU_STACK_UNDERFLOW(0);
15947 IEM_MC_ENDIF();
15948 IEM_MC_ADVANCE_RIP();
15949
15950 IEM_MC_END();
15951 return VINF_SUCCESS;
15952}
15953
15954
15955/** Opcode 0xdb 11/3. */
15956FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
15957{
15958 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
15959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15960
15961 IEM_MC_BEGIN(0, 1);
15962 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15963
15964 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15965 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15966
15967 IEM_MC_PREPARE_FPU_USAGE();
15968 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15969 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
15970 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15971 IEM_MC_ENDIF();
15972 IEM_MC_UPDATE_FPU_OPCODE_IP();
15973 IEM_MC_ELSE()
15974 IEM_MC_FPU_STACK_UNDERFLOW(0);
15975 IEM_MC_ENDIF();
15976 IEM_MC_ADVANCE_RIP();
15977
15978 IEM_MC_END();
15979 return VINF_SUCCESS;
15980}
15981
15982
15983/** Opcode 0xdb 0xe0. */
15984FNIEMOP_DEF(iemOp_fneni)
15985{
15986 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
15987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15988 IEM_MC_BEGIN(0,0);
15989 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15990 IEM_MC_ADVANCE_RIP();
15991 IEM_MC_END();
15992 return VINF_SUCCESS;
15993}
15994
15995
15996/** Opcode 0xdb 0xe1. */
15997FNIEMOP_DEF(iemOp_fndisi)
15998{
15999 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
16000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16001 IEM_MC_BEGIN(0,0);
16002 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16003 IEM_MC_ADVANCE_RIP();
16004 IEM_MC_END();
16005 return VINF_SUCCESS;
16006}
16007
16008
16009/** Opcode 0xdb 0xe2. */
16010FNIEMOP_DEF(iemOp_fnclex)
16011{
16012 IEMOP_MNEMONIC(fnclex, "fnclex");
16013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16014
16015 IEM_MC_BEGIN(0,0);
16016 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16017 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16018 IEM_MC_CLEAR_FSW_EX();
16019 IEM_MC_ADVANCE_RIP();
16020 IEM_MC_END();
16021 return VINF_SUCCESS;
16022}
16023
16024
16025/** Opcode 0xdb 0xe3. */
16026FNIEMOP_DEF(iemOp_fninit)
16027{
16028 IEMOP_MNEMONIC(fninit, "fninit");
16029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16030 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
16031}
16032
16033
16034/** Opcode 0xdb 0xe4. */
16035FNIEMOP_DEF(iemOp_fnsetpm)
16036{
16037 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
16038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16039 IEM_MC_BEGIN(0,0);
16040 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16041 IEM_MC_ADVANCE_RIP();
16042 IEM_MC_END();
16043 return VINF_SUCCESS;
16044}
16045
16046
16047/** Opcode 0xdb 0xe5. */
16048FNIEMOP_DEF(iemOp_frstpm)
16049{
16050 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
16051#if 0 /* #UDs on newer CPUs */
16052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16053 IEM_MC_BEGIN(0,0);
16054 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16055 IEM_MC_ADVANCE_RIP();
16056 IEM_MC_END();
16057 return VINF_SUCCESS;
16058#else
16059 return IEMOP_RAISE_INVALID_OPCODE();
16060#endif
16061}
16062
16063
16064/** Opcode 0xdb 11/5. */
16065FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
16066{
16067 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
16068 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
16069}
16070
16071
16072/** Opcode 0xdb 11/6. */
16073FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
16074{
16075 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
16076 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
16077}
16078
16079
16080/** Opcode 0xdb. */
16081FNIEMOP_DEF(iemOp_EscF3)
16082{
16083 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16084 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
16085 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16086 {
16087 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16088 {
16089 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
16090 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
16091 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
16092 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
16093 case 4:
16094 switch (bRm)
16095 {
16096 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
16097 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
16098 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
16099 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
16100 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
16101 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
16102 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
16103 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
16104 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16105 }
16106 break;
16107 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
16108 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
16109 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16110 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16111 }
16112 }
16113 else
16114 {
16115 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16116 {
16117 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
16118 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
16119 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
16120 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
16121 case 4: return IEMOP_RAISE_INVALID_OPCODE();
16122 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
16123 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16124 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
16125 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16126 }
16127 }
16128}
16129
16130
16131/**
16132 * Common worker for FPU instructions working on STn and ST0, and storing the
16133 * result in STn unless IE, DE or ZE was raised.
16134 *
16135 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16136 */
16137FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
16138{
16139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16140
16141 IEM_MC_BEGIN(3, 1);
16142 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16143 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16144 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16145 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
16146
16147 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16148 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16149
16150 IEM_MC_PREPARE_FPU_USAGE();
16151 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
16152 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
16153 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16154 IEM_MC_ELSE()
16155 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16156 IEM_MC_ENDIF();
16157 IEM_MC_ADVANCE_RIP();
16158
16159 IEM_MC_END();
16160 return VINF_SUCCESS;
16161}
16162
16163
16164/** Opcode 0xdc 11/0. */
16165FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
16166{
16167 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
16168 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
16169}
16170
16171
16172/** Opcode 0xdc 11/1. */
16173FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
16174{
16175 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
16176 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
16177}
16178
16179
16180/** Opcode 0xdc 11/4. */
16181FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
16182{
16183 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
16184 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
16185}
16186
16187
16188/** Opcode 0xdc 11/5. */
16189FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
16190{
16191 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
16192 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
16193}
16194
16195
16196/** Opcode 0xdc 11/6. */
16197FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
16198{
16199 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
16200 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
16201}
16202
16203
16204/** Opcode 0xdc 11/7. */
16205FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
16206{
16207 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
16208 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
16209}
16210
16211
16212/**
16213 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
16214 * memory operand, and storing the result in ST0.
16215 *
16216 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16217 */
16218FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
16219{
16220 IEM_MC_BEGIN(3, 3);
16221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16222 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16223 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
16224 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16225 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
16226 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
16227
16228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16230 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16231 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16232
16233 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16234 IEM_MC_PREPARE_FPU_USAGE();
16235 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
16236 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
16237 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16238 IEM_MC_ELSE()
16239 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16240 IEM_MC_ENDIF();
16241 IEM_MC_ADVANCE_RIP();
16242
16243 IEM_MC_END();
16244 return VINF_SUCCESS;
16245}
16246
16247
16248/** Opcode 0xdc !11/0. */
16249FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
16250{
16251 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
16252 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
16253}
16254
16255
16256/** Opcode 0xdc !11/1. */
16257FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
16258{
16259 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
16260 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
16261}
16262
16263
16264/** Opcode 0xdc !11/2. */
16265FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
16266{
16267 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
16268
16269 IEM_MC_BEGIN(3, 3);
16270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16271 IEM_MC_LOCAL(uint16_t, u16Fsw);
16272 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16273 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16274 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16275 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16276
16277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16279
16280 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16281 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16282 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16283
16284 IEM_MC_PREPARE_FPU_USAGE();
16285 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16286 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16287 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16288 IEM_MC_ELSE()
16289 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16290 IEM_MC_ENDIF();
16291 IEM_MC_ADVANCE_RIP();
16292
16293 IEM_MC_END();
16294 return VINF_SUCCESS;
16295}
16296
16297
16298/** Opcode 0xdc !11/3. */
16299FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
16300{
16301 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
16302
16303 IEM_MC_BEGIN(3, 3);
16304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16305 IEM_MC_LOCAL(uint16_t, u16Fsw);
16306 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16307 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16308 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16309 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16310
16311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16313
16314 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16315 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16316 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16317
16318 IEM_MC_PREPARE_FPU_USAGE();
16319 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16320 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16321 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16322 IEM_MC_ELSE()
16323 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16324 IEM_MC_ENDIF();
16325 IEM_MC_ADVANCE_RIP();
16326
16327 IEM_MC_END();
16328 return VINF_SUCCESS;
16329}
16330
16331
16332/** Opcode 0xdc !11/4. */
16333FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
16334{
16335 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
16336 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
16337}
16338
16339
16340/** Opcode 0xdc !11/5. */
16341FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
16342{
16343 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
16344 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
16345}
16346
16347
16348/** Opcode 0xdc !11/6. */
16349FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
16350{
16351 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
16352 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
16353}
16354
16355
16356/** Opcode 0xdc !11/7. */
16357FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
16358{
16359 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
16360 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
16361}
16362
16363
16364/** Opcode 0xdc. */
16365FNIEMOP_DEF(iemOp_EscF4)
16366{
16367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16368 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
16369 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16370 {
16371 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16372 {
16373 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
16374 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
16375 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
16376 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
16377 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
16378 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
16379 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
16380 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
16381 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16382 }
16383 }
16384 else
16385 {
16386 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16387 {
16388 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
16389 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
16390 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
16391 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
16392 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
16393 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
16394 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
16395 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
16396 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16397 }
16398 }
16399}
16400
16401
16402/** Opcode 0xdd !11/0.
16403 * @sa iemOp_fld_m32r */
16404FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
16405{
16406 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
16407
16408 IEM_MC_BEGIN(2, 3);
16409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16410 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16411 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
16412 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16413 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
16414
16415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16417 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16418 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16419
16420 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16421 IEM_MC_PREPARE_FPU_USAGE();
16422 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16423 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
16424 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16425 IEM_MC_ELSE()
16426 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16427 IEM_MC_ENDIF();
16428 IEM_MC_ADVANCE_RIP();
16429
16430 IEM_MC_END();
16431 return VINF_SUCCESS;
16432}
16433
16434
16435/** Opcode 0xdd !11/0. */
16436FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
16437{
16438 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
16439 IEM_MC_BEGIN(3, 2);
16440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16441 IEM_MC_LOCAL(uint16_t, u16Fsw);
16442 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16443 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16444 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16445
16446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16448 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16449 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16450
16451 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16452 IEM_MC_PREPARE_FPU_USAGE();
16453 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16454 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16455 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16456 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16457 IEM_MC_ELSE()
16458 IEM_MC_IF_FCW_IM()
16459 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16460 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16461 IEM_MC_ENDIF();
16462 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16463 IEM_MC_ENDIF();
16464 IEM_MC_ADVANCE_RIP();
16465
16466 IEM_MC_END();
16467 return VINF_SUCCESS;
16468}
16469
16470
16471/** Opcode 0xdd !11/0. */
16472FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
16473{
16474 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
16475 IEM_MC_BEGIN(3, 2);
16476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16477 IEM_MC_LOCAL(uint16_t, u16Fsw);
16478 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16479 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16480 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16481
16482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16484 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16485 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16486
16487 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16488 IEM_MC_PREPARE_FPU_USAGE();
16489 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16490 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16491 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16492 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16493 IEM_MC_ELSE()
16494 IEM_MC_IF_FCW_IM()
16495 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16496 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16497 IEM_MC_ENDIF();
16498 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16499 IEM_MC_ENDIF();
16500 IEM_MC_ADVANCE_RIP();
16501
16502 IEM_MC_END();
16503 return VINF_SUCCESS;
16504}
16505
16506
16507
16508
16509/** Opcode 0xdd !11/0. */
16510FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
16511{
16512 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
16513 IEM_MC_BEGIN(3, 2);
16514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16515 IEM_MC_LOCAL(uint16_t, u16Fsw);
16516 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16517 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16518 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16519
16520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16522 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16523 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16524
16525 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16526 IEM_MC_PREPARE_FPU_USAGE();
16527 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16528 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16529 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16530 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16531 IEM_MC_ELSE()
16532 IEM_MC_IF_FCW_IM()
16533 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16534 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16535 IEM_MC_ENDIF();
16536 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16537 IEM_MC_ENDIF();
16538 IEM_MC_ADVANCE_RIP();
16539
16540 IEM_MC_END();
16541 return VINF_SUCCESS;
16542}
16543
16544
16545/** Opcode 0xdd !11/0. */
16546FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
16547{
16548 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
16549 IEM_MC_BEGIN(3, 0);
16550 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16551 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16552 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
16553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16555 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16556 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16557 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16558 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
16559 IEM_MC_END();
16560 return VINF_SUCCESS;
16561}
16562
16563
16564/** Opcode 0xdd !11/0. */
16565FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
16566{
16567 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
16568 IEM_MC_BEGIN(3, 0);
16569 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16570 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16571 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
16572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16574 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16575 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16576 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16577 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
16578 IEM_MC_END();
16579 return VINF_SUCCESS;
16580
16581}
16582
16583/** Opcode 0xdd !11/0. */
16584FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
16585{
16586 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
16587
16588 IEM_MC_BEGIN(0, 2);
16589 IEM_MC_LOCAL(uint16_t, u16Tmp);
16590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16591
16592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16594 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16595
16596 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16597 IEM_MC_FETCH_FSW(u16Tmp);
16598 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
16599 IEM_MC_ADVANCE_RIP();
16600
16601/** @todo Debug / drop a hint to the verifier that things may differ
16602 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
16603 * NT4SP1. (X86_FSW_PE) */
16604 IEM_MC_END();
16605 return VINF_SUCCESS;
16606}
16607
16608
16609/** Opcode 0xdd 11/0. */
16610FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
16611{
16612 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
16613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16614 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
16615 unmodified. */
16616
16617 IEM_MC_BEGIN(0, 0);
16618
16619 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16620 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16621
16622 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16623 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16624 IEM_MC_UPDATE_FPU_OPCODE_IP();
16625
16626 IEM_MC_ADVANCE_RIP();
16627 IEM_MC_END();
16628 return VINF_SUCCESS;
16629}
16630
16631
16632/** Opcode 0xdd 11/1. */
16633FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
16634{
16635 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
16636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16637
16638 IEM_MC_BEGIN(0, 2);
16639 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
16640 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16641 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16642 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16643
16644 IEM_MC_PREPARE_FPU_USAGE();
16645 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16646 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
16647 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16648 IEM_MC_ELSE()
16649 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16650 IEM_MC_ENDIF();
16651
16652 IEM_MC_ADVANCE_RIP();
16653 IEM_MC_END();
16654 return VINF_SUCCESS;
16655}
16656
16657
16658/** Opcode 0xdd 11/3. */
16659FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
16660{
16661 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
16662 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
16663}
16664
16665
16666/** Opcode 0xdd 11/4. */
16667FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
16668{
16669 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
16670 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
16671}
16672
16673
16674/** Opcode 0xdd. */
16675FNIEMOP_DEF(iemOp_EscF5)
16676{
16677 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16678 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
16679 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16680 {
16681 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16682 {
16683 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
16684 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
16685 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
16686 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
16687 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
16688 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
16689 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16690 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16691 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16692 }
16693 }
16694 else
16695 {
16696 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16697 {
16698 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
16699 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
16700 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
16701 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
16702 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
16703 case 5: return IEMOP_RAISE_INVALID_OPCODE();
16704 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
16705 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
16706 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16707 }
16708 }
16709}
16710
16711
16712/** Opcode 0xde 11/0. */
16713FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
16714{
16715 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
16716 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
16717}
16718
16719
16720/** Opcode 0xde 11/0. */
16721FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
16722{
16723 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
16724 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
16725}
16726
16727
16728/** Opcode 0xde 0xd9. */
16729FNIEMOP_DEF(iemOp_fcompp)
16730{
16731 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
16732 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
16733}
16734
16735
16736/** Opcode 0xde 11/4. */
16737FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
16738{
16739 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
16740 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
16741}
16742
16743
16744/** Opcode 0xde 11/5. */
16745FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
16746{
16747 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
16748 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
16749}
16750
16751
16752/** Opcode 0xde 11/6. */
16753FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
16754{
16755 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
16756 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
16757}
16758
16759
16760/** Opcode 0xde 11/7. */
16761FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
16762{
16763 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
16764 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
16765}
16766
16767
16768/**
16769 * Common worker for FPU instructions working on ST0 and an m16i, and storing
16770 * the result in ST0.
16771 *
16772 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16773 */
16774FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
16775{
16776 IEM_MC_BEGIN(3, 3);
16777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16778 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16779 IEM_MC_LOCAL(int16_t, i16Val2);
16780 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16781 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16782 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16783
16784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16786
16787 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16788 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16789 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16790
16791 IEM_MC_PREPARE_FPU_USAGE();
16792 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16793 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
16794 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
16795 IEM_MC_ELSE()
16796 IEM_MC_FPU_STACK_UNDERFLOW(0);
16797 IEM_MC_ENDIF();
16798 IEM_MC_ADVANCE_RIP();
16799
16800 IEM_MC_END();
16801 return VINF_SUCCESS;
16802}
16803
16804
16805/** Opcode 0xde !11/0. */
16806FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
16807{
16808 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
16809 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
16810}
16811
16812
16813/** Opcode 0xde !11/1. */
16814FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
16815{
16816 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
16817 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
16818}
16819
16820
16821/** Opcode 0xde !11/2. */
16822FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
16823{
16824 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
16825
16826 IEM_MC_BEGIN(3, 3);
16827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16828 IEM_MC_LOCAL(uint16_t, u16Fsw);
16829 IEM_MC_LOCAL(int16_t, i16Val2);
16830 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16831 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16832 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16833
16834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16836
16837 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16838 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16839 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16840
16841 IEM_MC_PREPARE_FPU_USAGE();
16842 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16843 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16844 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16845 IEM_MC_ELSE()
16846 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16847 IEM_MC_ENDIF();
16848 IEM_MC_ADVANCE_RIP();
16849
16850 IEM_MC_END();
16851 return VINF_SUCCESS;
16852}
16853
16854
16855/** Opcode 0xde !11/3. */
16856FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
16857{
16858 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
16859
16860 IEM_MC_BEGIN(3, 3);
16861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16862 IEM_MC_LOCAL(uint16_t, u16Fsw);
16863 IEM_MC_LOCAL(int16_t, i16Val2);
16864 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16865 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16866 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16867
16868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16870
16871 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16872 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16873 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16874
16875 IEM_MC_PREPARE_FPU_USAGE();
16876 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16877 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16878 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16879 IEM_MC_ELSE()
16880 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16881 IEM_MC_ENDIF();
16882 IEM_MC_ADVANCE_RIP();
16883
16884 IEM_MC_END();
16885 return VINF_SUCCESS;
16886}
16887
16888
16889/** Opcode 0xde !11/4. */
16890FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
16891{
16892 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
16893 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
16894}
16895
16896
16897/** Opcode 0xde !11/5. */
16898FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
16899{
16900 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
16901 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
16902}
16903
16904
16905/** Opcode 0xde !11/6. */
16906FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
16907{
16908 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
16909 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
16910}
16911
16912
16913/** Opcode 0xde !11/7. */
16914FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
16915{
16916 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
16917 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
16918}
16919
16920
16921/** Opcode 0xde. */
16922FNIEMOP_DEF(iemOp_EscF6)
16923{
16924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16925 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
16926 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16927 {
16928 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16929 {
16930 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
16931 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
16932 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
16933 case 3: if (bRm == 0xd9)
16934 return FNIEMOP_CALL(iemOp_fcompp);
16935 return IEMOP_RAISE_INVALID_OPCODE();
16936 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
16937 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
16938 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
16939 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
16940 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16941 }
16942 }
16943 else
16944 {
16945 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16946 {
16947 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
16948 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
16949 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
16950 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
16951 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
16952 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
16953 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
16954 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
16955 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16956 }
16957 }
16958}
16959
16960
16961/** Opcode 0xdf 11/0.
16962 * Undocument instruction, assumed to work like ffree + fincstp. */
16963FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
16964{
16965 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
16966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16967
16968 IEM_MC_BEGIN(0, 0);
16969
16970 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16971 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16972
16973 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16974 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16975 IEM_MC_FPU_STACK_INC_TOP();
16976 IEM_MC_UPDATE_FPU_OPCODE_IP();
16977
16978 IEM_MC_ADVANCE_RIP();
16979 IEM_MC_END();
16980 return VINF_SUCCESS;
16981}
16982
16983
16984/** Opcode 0xdf 0xe0. */
16985FNIEMOP_DEF(iemOp_fnstsw_ax)
16986{
16987 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
16988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16989
16990 IEM_MC_BEGIN(0, 1);
16991 IEM_MC_LOCAL(uint16_t, u16Tmp);
16992 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16993 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16994 IEM_MC_FETCH_FSW(u16Tmp);
16995 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
16996 IEM_MC_ADVANCE_RIP();
16997 IEM_MC_END();
16998 return VINF_SUCCESS;
16999}
17000
17001
17002/** Opcode 0xdf 11/5. */
17003FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
17004{
17005 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
17006 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17007}
17008
17009
17010/** Opcode 0xdf 11/6. */
17011FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
17012{
17013 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
17014 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17015}
17016
17017
17018/** Opcode 0xdf !11/0. */
17019FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
17020{
17021 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
17022
17023 IEM_MC_BEGIN(2, 3);
17024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17025 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17026 IEM_MC_LOCAL(int16_t, i16Val);
17027 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17028 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
17029
17030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17032
17033 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17034 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17035 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17036
17037 IEM_MC_PREPARE_FPU_USAGE();
17038 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17039 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
17040 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17041 IEM_MC_ELSE()
17042 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17043 IEM_MC_ENDIF();
17044 IEM_MC_ADVANCE_RIP();
17045
17046 IEM_MC_END();
17047 return VINF_SUCCESS;
17048}
17049
17050
17051/** Opcode 0xdf !11/1. */
17052FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
17053{
17054 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
17055 IEM_MC_BEGIN(3, 2);
17056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17057 IEM_MC_LOCAL(uint16_t, u16Fsw);
17058 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17059 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17060 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17061
17062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17064 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17065 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17066
17067 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17068 IEM_MC_PREPARE_FPU_USAGE();
17069 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17070 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17071 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17072 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17073 IEM_MC_ELSE()
17074 IEM_MC_IF_FCW_IM()
17075 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17076 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17077 IEM_MC_ENDIF();
17078 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17079 IEM_MC_ENDIF();
17080 IEM_MC_ADVANCE_RIP();
17081
17082 IEM_MC_END();
17083 return VINF_SUCCESS;
17084}
17085
17086
17087/** Opcode 0xdf !11/2. */
17088FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
17089{
17090 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
17091 IEM_MC_BEGIN(3, 2);
17092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17093 IEM_MC_LOCAL(uint16_t, u16Fsw);
17094 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17095 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17096 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17097
17098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17100 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17101 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17102
17103 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17104 IEM_MC_PREPARE_FPU_USAGE();
17105 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17106 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17107 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17108 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17109 IEM_MC_ELSE()
17110 IEM_MC_IF_FCW_IM()
17111 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17112 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17113 IEM_MC_ENDIF();
17114 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17115 IEM_MC_ENDIF();
17116 IEM_MC_ADVANCE_RIP();
17117
17118 IEM_MC_END();
17119 return VINF_SUCCESS;
17120}
17121
17122
17123/** Opcode 0xdf !11/3. */
17124FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
17125{
17126 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
17127 IEM_MC_BEGIN(3, 2);
17128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17129 IEM_MC_LOCAL(uint16_t, u16Fsw);
17130 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17131 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17132 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17133
17134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17136 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17137 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17138
17139 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17140 IEM_MC_PREPARE_FPU_USAGE();
17141 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17142 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17143 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17144 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17145 IEM_MC_ELSE()
17146 IEM_MC_IF_FCW_IM()
17147 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17148 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17149 IEM_MC_ENDIF();
17150 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17151 IEM_MC_ENDIF();
17152 IEM_MC_ADVANCE_RIP();
17153
17154 IEM_MC_END();
17155 return VINF_SUCCESS;
17156}
17157
17158
17159/** Opcode 0xdf !11/4. */
17160FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
17161
17162
17163/** Opcode 0xdf !11/5. */
17164FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
17165{
17166 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
17167
17168 IEM_MC_BEGIN(2, 3);
17169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17170 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17171 IEM_MC_LOCAL(int64_t, i64Val);
17172 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17173 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
17174
17175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17177
17178 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17179 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17180 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17181
17182 IEM_MC_PREPARE_FPU_USAGE();
17183 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17184 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
17185 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17186 IEM_MC_ELSE()
17187 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17188 IEM_MC_ENDIF();
17189 IEM_MC_ADVANCE_RIP();
17190
17191 IEM_MC_END();
17192 return VINF_SUCCESS;
17193}
17194
17195
17196/** Opcode 0xdf !11/6. */
17197FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
17198
17199
17200/** Opcode 0xdf !11/7. */
17201FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
17202{
17203 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
17204 IEM_MC_BEGIN(3, 2);
17205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17206 IEM_MC_LOCAL(uint16_t, u16Fsw);
17207 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17208 IEM_MC_ARG(int64_t *, pi64Dst, 1);
17209 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17210
17211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17213 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17214 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17215
17216 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17217 IEM_MC_PREPARE_FPU_USAGE();
17218 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17219 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
17220 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
17221 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17222 IEM_MC_ELSE()
17223 IEM_MC_IF_FCW_IM()
17224 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
17225 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
17226 IEM_MC_ENDIF();
17227 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17228 IEM_MC_ENDIF();
17229 IEM_MC_ADVANCE_RIP();
17230
17231 IEM_MC_END();
17232 return VINF_SUCCESS;
17233}
17234
17235
17236/** Opcode 0xdf. */
17237FNIEMOP_DEF(iemOp_EscF7)
17238{
17239 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17240 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17241 {
17242 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17243 {
17244 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
17245 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
17246 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17247 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17248 case 4: if (bRm == 0xe0)
17249 return FNIEMOP_CALL(iemOp_fnstsw_ax);
17250 return IEMOP_RAISE_INVALID_OPCODE();
17251 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
17252 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
17253 case 7: return IEMOP_RAISE_INVALID_OPCODE();
17254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17255 }
17256 }
17257 else
17258 {
17259 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17260 {
17261 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
17262 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
17263 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
17264 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
17265 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
17266 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
17267 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
17268 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
17269 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17270 }
17271 }
17272}
17273
17274
17275/** Opcode 0xe0. */
17276FNIEMOP_DEF(iemOp_loopne_Jb)
17277{
17278 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
17279 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17281 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17282
17283 switch (pVCpu->iem.s.enmEffAddrMode)
17284 {
17285 case IEMMODE_16BIT:
17286 IEM_MC_BEGIN(0,0);
17287 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17288 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17289 IEM_MC_REL_JMP_S8(i8Imm);
17290 } IEM_MC_ELSE() {
17291 IEM_MC_ADVANCE_RIP();
17292 } IEM_MC_ENDIF();
17293 IEM_MC_END();
17294 return VINF_SUCCESS;
17295
17296 case IEMMODE_32BIT:
17297 IEM_MC_BEGIN(0,0);
17298 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17299 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17300 IEM_MC_REL_JMP_S8(i8Imm);
17301 } IEM_MC_ELSE() {
17302 IEM_MC_ADVANCE_RIP();
17303 } IEM_MC_ENDIF();
17304 IEM_MC_END();
17305 return VINF_SUCCESS;
17306
17307 case IEMMODE_64BIT:
17308 IEM_MC_BEGIN(0,0);
17309 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17310 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17311 IEM_MC_REL_JMP_S8(i8Imm);
17312 } IEM_MC_ELSE() {
17313 IEM_MC_ADVANCE_RIP();
17314 } IEM_MC_ENDIF();
17315 IEM_MC_END();
17316 return VINF_SUCCESS;
17317
17318 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17319 }
17320}
17321
17322
17323/** Opcode 0xe1. */
17324FNIEMOP_DEF(iemOp_loope_Jb)
17325{
17326 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
17327 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17329 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17330
17331 switch (pVCpu->iem.s.enmEffAddrMode)
17332 {
17333 case IEMMODE_16BIT:
17334 IEM_MC_BEGIN(0,0);
17335 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17336 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17337 IEM_MC_REL_JMP_S8(i8Imm);
17338 } IEM_MC_ELSE() {
17339 IEM_MC_ADVANCE_RIP();
17340 } IEM_MC_ENDIF();
17341 IEM_MC_END();
17342 return VINF_SUCCESS;
17343
17344 case IEMMODE_32BIT:
17345 IEM_MC_BEGIN(0,0);
17346 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17347 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17348 IEM_MC_REL_JMP_S8(i8Imm);
17349 } IEM_MC_ELSE() {
17350 IEM_MC_ADVANCE_RIP();
17351 } IEM_MC_ENDIF();
17352 IEM_MC_END();
17353 return VINF_SUCCESS;
17354
17355 case IEMMODE_64BIT:
17356 IEM_MC_BEGIN(0,0);
17357 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17358 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17359 IEM_MC_REL_JMP_S8(i8Imm);
17360 } IEM_MC_ELSE() {
17361 IEM_MC_ADVANCE_RIP();
17362 } IEM_MC_ENDIF();
17363 IEM_MC_END();
17364 return VINF_SUCCESS;
17365
17366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17367 }
17368}
17369
17370
17371/** Opcode 0xe2. */
17372FNIEMOP_DEF(iemOp_loop_Jb)
17373{
17374 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
17375 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17377 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17378
17379 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
17380 * using the 32-bit operand size override. How can that be restarted? See
17381 * weird pseudo code in intel manual. */
17382 switch (pVCpu->iem.s.enmEffAddrMode)
17383 {
17384 case IEMMODE_16BIT:
17385 IEM_MC_BEGIN(0,0);
17386 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17387 {
17388 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17389 IEM_MC_IF_CX_IS_NZ() {
17390 IEM_MC_REL_JMP_S8(i8Imm);
17391 } IEM_MC_ELSE() {
17392 IEM_MC_ADVANCE_RIP();
17393 } IEM_MC_ENDIF();
17394 }
17395 else
17396 {
17397 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
17398 IEM_MC_ADVANCE_RIP();
17399 }
17400 IEM_MC_END();
17401 return VINF_SUCCESS;
17402
17403 case IEMMODE_32BIT:
17404 IEM_MC_BEGIN(0,0);
17405 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17406 {
17407 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17408 IEM_MC_IF_ECX_IS_NZ() {
17409 IEM_MC_REL_JMP_S8(i8Imm);
17410 } IEM_MC_ELSE() {
17411 IEM_MC_ADVANCE_RIP();
17412 } IEM_MC_ENDIF();
17413 }
17414 else
17415 {
17416 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
17417 IEM_MC_ADVANCE_RIP();
17418 }
17419 IEM_MC_END();
17420 return VINF_SUCCESS;
17421
17422 case IEMMODE_64BIT:
17423 IEM_MC_BEGIN(0,0);
17424 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17425 {
17426 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17427 IEM_MC_IF_RCX_IS_NZ() {
17428 IEM_MC_REL_JMP_S8(i8Imm);
17429 } IEM_MC_ELSE() {
17430 IEM_MC_ADVANCE_RIP();
17431 } IEM_MC_ENDIF();
17432 }
17433 else
17434 {
17435 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
17436 IEM_MC_ADVANCE_RIP();
17437 }
17438 IEM_MC_END();
17439 return VINF_SUCCESS;
17440
17441 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17442 }
17443}
17444
17445
17446/** Opcode 0xe3. */
17447FNIEMOP_DEF(iemOp_jecxz_Jb)
17448{
17449 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
17450 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17452 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17453
17454 switch (pVCpu->iem.s.enmEffAddrMode)
17455 {
17456 case IEMMODE_16BIT:
17457 IEM_MC_BEGIN(0,0);
17458 IEM_MC_IF_CX_IS_NZ() {
17459 IEM_MC_ADVANCE_RIP();
17460 } IEM_MC_ELSE() {
17461 IEM_MC_REL_JMP_S8(i8Imm);
17462 } IEM_MC_ENDIF();
17463 IEM_MC_END();
17464 return VINF_SUCCESS;
17465
17466 case IEMMODE_32BIT:
17467 IEM_MC_BEGIN(0,0);
17468 IEM_MC_IF_ECX_IS_NZ() {
17469 IEM_MC_ADVANCE_RIP();
17470 } IEM_MC_ELSE() {
17471 IEM_MC_REL_JMP_S8(i8Imm);
17472 } IEM_MC_ENDIF();
17473 IEM_MC_END();
17474 return VINF_SUCCESS;
17475
17476 case IEMMODE_64BIT:
17477 IEM_MC_BEGIN(0,0);
17478 IEM_MC_IF_RCX_IS_NZ() {
17479 IEM_MC_ADVANCE_RIP();
17480 } IEM_MC_ELSE() {
17481 IEM_MC_REL_JMP_S8(i8Imm);
17482 } IEM_MC_ENDIF();
17483 IEM_MC_END();
17484 return VINF_SUCCESS;
17485
17486 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17487 }
17488}
17489
17490
17491/** Opcode 0xe4 */
17492FNIEMOP_DEF(iemOp_in_AL_Ib)
17493{
17494 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
17495 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17497 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
17498}
17499
17500
17501/** Opcode 0xe5 */
17502FNIEMOP_DEF(iemOp_in_eAX_Ib)
17503{
17504 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
17505 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17507 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17508}
17509
17510
17511/** Opcode 0xe6 */
17512FNIEMOP_DEF(iemOp_out_Ib_AL)
17513{
17514 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
17515 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17517 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
17518}
17519
17520
17521/** Opcode 0xe7 */
17522FNIEMOP_DEF(iemOp_out_Ib_eAX)
17523{
17524 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
17525 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17527 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17528}
17529
17530
17531/** Opcode 0xe8. */
17532FNIEMOP_DEF(iemOp_call_Jv)
17533{
17534 IEMOP_MNEMONIC(call_Jv, "call Jv");
17535 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17536 switch (pVCpu->iem.s.enmEffOpSize)
17537 {
17538 case IEMMODE_16BIT:
17539 {
17540 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17541 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
17542 }
17543
17544 case IEMMODE_32BIT:
17545 {
17546 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17547 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
17548 }
17549
17550 case IEMMODE_64BIT:
17551 {
17552 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17553 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
17554 }
17555
17556 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17557 }
17558}
17559
17560
17561/** Opcode 0xe9. */
17562FNIEMOP_DEF(iemOp_jmp_Jv)
17563{
17564 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
17565 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17566 switch (pVCpu->iem.s.enmEffOpSize)
17567 {
17568 case IEMMODE_16BIT:
17569 {
17570 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
17571 IEM_MC_BEGIN(0, 0);
17572 IEM_MC_REL_JMP_S16(i16Imm);
17573 IEM_MC_END();
17574 return VINF_SUCCESS;
17575 }
17576
17577 case IEMMODE_64BIT:
17578 case IEMMODE_32BIT:
17579 {
17580 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
17581 IEM_MC_BEGIN(0, 0);
17582 IEM_MC_REL_JMP_S32(i32Imm);
17583 IEM_MC_END();
17584 return VINF_SUCCESS;
17585 }
17586
17587 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17588 }
17589}
17590
17591
17592/** Opcode 0xea. */
17593FNIEMOP_DEF(iemOp_jmp_Ap)
17594{
17595 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
17596 IEMOP_HLP_NO_64BIT();
17597
17598 /* Decode the far pointer address and pass it on to the far call C implementation. */
17599 uint32_t offSeg;
17600 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
17601 IEM_OPCODE_GET_NEXT_U32(&offSeg);
17602 else
17603 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
17604 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
17605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17606 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
17607}
17608
17609
17610/** Opcode 0xeb. */
17611FNIEMOP_DEF(iemOp_jmp_Jb)
17612{
17613 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
17614 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17616 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17617
17618 IEM_MC_BEGIN(0, 0);
17619 IEM_MC_REL_JMP_S8(i8Imm);
17620 IEM_MC_END();
17621 return VINF_SUCCESS;
17622}
17623
17624
17625/** Opcode 0xec */
17626FNIEMOP_DEF(iemOp_in_AL_DX)
17627{
17628 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
17629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17630 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
17631}
17632
17633
17634/** Opcode 0xed */
17635FNIEMOP_DEF(iemOp_eAX_DX)
17636{
17637 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
17638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17639 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17640}
17641
17642
17643/** Opcode 0xee */
17644FNIEMOP_DEF(iemOp_out_DX_AL)
17645{
17646 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
17647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17648 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
17649}
17650
17651
17652/** Opcode 0xef */
17653FNIEMOP_DEF(iemOp_out_DX_eAX)
17654{
17655 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
17656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17657 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17658}
17659
17660
17661/** Opcode 0xf0. */
17662FNIEMOP_DEF(iemOp_lock)
17663{
17664 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
17665 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
17666
17667 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17668 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17669}
17670
17671
17672/** Opcode 0xf1. */
17673FNIEMOP_DEF(iemOp_int_1)
17674{
17675 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
17676 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
17677 /** @todo testcase! */
17678 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
17679}
17680
17681
17682/** Opcode 0xf2. */
17683FNIEMOP_DEF(iemOp_repne)
17684{
17685 /* This overrides any previous REPE prefix. */
17686 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
17687 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
17688 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
17689
17690 /* For the 4 entry opcode tables, REPNZ overrides any previous
17691 REPZ and operand size prefixes. */
17692 pVCpu->iem.s.idxPrefix = 3;
17693
17694 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17695 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17696}
17697
17698
17699/** Opcode 0xf3. */
17700FNIEMOP_DEF(iemOp_repe)
17701{
17702 /* This overrides any previous REPNE prefix. */
17703 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
17704 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
17705 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
17706
17707 /* For the 4 entry opcode tables, REPNZ overrides any previous
17708 REPNZ and operand size prefixes. */
17709 pVCpu->iem.s.idxPrefix = 2;
17710
17711 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17712 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17713}
17714
17715
17716/** Opcode 0xf4. */
17717FNIEMOP_DEF(iemOp_hlt)
17718{
17719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17720 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
17721}
17722
17723
17724/** Opcode 0xf5. */
17725FNIEMOP_DEF(iemOp_cmc)
17726{
17727 IEMOP_MNEMONIC(cmc, "cmc");
17728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17729 IEM_MC_BEGIN(0, 0);
17730 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
17731 IEM_MC_ADVANCE_RIP();
17732 IEM_MC_END();
17733 return VINF_SUCCESS;
17734}
17735
17736
17737/**
17738 * Common implementation of 'inc/dec/not/neg Eb'.
17739 *
17740 * @param bRm The RM byte.
17741 * @param pImpl The instruction implementation.
17742 */
17743FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17744{
17745 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17746 {
17747 /* register access */
17748 IEM_MC_BEGIN(2, 0);
17749 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17750 IEM_MC_ARG(uint32_t *, pEFlags, 1);
17751 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17752 IEM_MC_REF_EFLAGS(pEFlags);
17753 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17754 IEM_MC_ADVANCE_RIP();
17755 IEM_MC_END();
17756 }
17757 else
17758 {
17759 /* memory access. */
17760 IEM_MC_BEGIN(2, 2);
17761 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17762 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17764
17765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17766 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17767 IEM_MC_FETCH_EFLAGS(EFlags);
17768 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17769 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17770 else
17771 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
17772
17773 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
17774 IEM_MC_COMMIT_EFLAGS(EFlags);
17775 IEM_MC_ADVANCE_RIP();
17776 IEM_MC_END();
17777 }
17778 return VINF_SUCCESS;
17779}
17780
17781
17782/**
17783 * Common implementation of 'inc/dec/not/neg Ev'.
17784 *
17785 * @param bRm The RM byte.
17786 * @param pImpl The instruction implementation.
17787 */
17788FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17789{
17790 /* Registers are handled by a common worker. */
17791 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17792 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17793
17794 /* Memory we do here. */
17795 switch (pVCpu->iem.s.enmEffOpSize)
17796 {
17797 case IEMMODE_16BIT:
17798 IEM_MC_BEGIN(2, 2);
17799 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17800 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17802
17803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17804 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17805 IEM_MC_FETCH_EFLAGS(EFlags);
17806 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17807 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
17808 else
17809 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
17810
17811 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
17812 IEM_MC_COMMIT_EFLAGS(EFlags);
17813 IEM_MC_ADVANCE_RIP();
17814 IEM_MC_END();
17815 return VINF_SUCCESS;
17816
17817 case IEMMODE_32BIT:
17818 IEM_MC_BEGIN(2, 2);
17819 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17820 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17822
17823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17824 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17825 IEM_MC_FETCH_EFLAGS(EFlags);
17826 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17827 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
17828 else
17829 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
17830
17831 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
17832 IEM_MC_COMMIT_EFLAGS(EFlags);
17833 IEM_MC_ADVANCE_RIP();
17834 IEM_MC_END();
17835 return VINF_SUCCESS;
17836
17837 case IEMMODE_64BIT:
17838 IEM_MC_BEGIN(2, 2);
17839 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17840 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17842
17843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17844 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17845 IEM_MC_FETCH_EFLAGS(EFlags);
17846 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17847 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
17848 else
17849 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
17850
17851 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
17852 IEM_MC_COMMIT_EFLAGS(EFlags);
17853 IEM_MC_ADVANCE_RIP();
17854 IEM_MC_END();
17855 return VINF_SUCCESS;
17856
17857 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17858 }
17859}
17860
17861
17862/** Opcode 0xf6 /0. */
17863FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
17864{
17865 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
17866 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17867
17868 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17869 {
17870 /* register access */
17871 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17873
17874 IEM_MC_BEGIN(3, 0);
17875 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17876 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
17877 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17878 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17879 IEM_MC_REF_EFLAGS(pEFlags);
17880 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17881 IEM_MC_ADVANCE_RIP();
17882 IEM_MC_END();
17883 }
17884 else
17885 {
17886 /* memory access. */
17887 IEM_MC_BEGIN(3, 2);
17888 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17889 IEM_MC_ARG(uint8_t, u8Src, 1);
17890 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17892
17893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
17894 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17895 IEM_MC_ASSIGN(u8Src, u8Imm);
17896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17897 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17898 IEM_MC_FETCH_EFLAGS(EFlags);
17899 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17900
17901 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
17902 IEM_MC_COMMIT_EFLAGS(EFlags);
17903 IEM_MC_ADVANCE_RIP();
17904 IEM_MC_END();
17905 }
17906 return VINF_SUCCESS;
17907}
17908
17909
17910/** Opcode 0xf7 /0. */
17911FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
17912{
17913 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
17914 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17915
17916 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17917 {
17918 /* register access */
17919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17920 switch (pVCpu->iem.s.enmEffOpSize)
17921 {
17922 case IEMMODE_16BIT:
17923 {
17924 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17925 IEM_MC_BEGIN(3, 0);
17926 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17927 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
17928 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17929 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17930 IEM_MC_REF_EFLAGS(pEFlags);
17931 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17932 IEM_MC_ADVANCE_RIP();
17933 IEM_MC_END();
17934 return VINF_SUCCESS;
17935 }
17936
17937 case IEMMODE_32BIT:
17938 {
17939 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17940 IEM_MC_BEGIN(3, 0);
17941 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17942 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
17943 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17944 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17945 IEM_MC_REF_EFLAGS(pEFlags);
17946 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17947 /* No clearing the high dword here - test doesn't write back the result. */
17948 IEM_MC_ADVANCE_RIP();
17949 IEM_MC_END();
17950 return VINF_SUCCESS;
17951 }
17952
17953 case IEMMODE_64BIT:
17954 {
17955 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17956 IEM_MC_BEGIN(3, 0);
17957 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17958 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
17959 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17960 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17961 IEM_MC_REF_EFLAGS(pEFlags);
17962 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
17963 IEM_MC_ADVANCE_RIP();
17964 IEM_MC_END();
17965 return VINF_SUCCESS;
17966 }
17967
17968 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17969 }
17970 }
17971 else
17972 {
17973 /* memory access. */
17974 switch (pVCpu->iem.s.enmEffOpSize)
17975 {
17976 case IEMMODE_16BIT:
17977 {
17978 IEM_MC_BEGIN(3, 2);
17979 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17980 IEM_MC_ARG(uint16_t, u16Src, 1);
17981 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17982 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17983
17984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
17985 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17986 IEM_MC_ASSIGN(u16Src, u16Imm);
17987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17988 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17989 IEM_MC_FETCH_EFLAGS(EFlags);
17990 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17991
17992 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
17993 IEM_MC_COMMIT_EFLAGS(EFlags);
17994 IEM_MC_ADVANCE_RIP();
17995 IEM_MC_END();
17996 return VINF_SUCCESS;
17997 }
17998
17999 case IEMMODE_32BIT:
18000 {
18001 IEM_MC_BEGIN(3, 2);
18002 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18003 IEM_MC_ARG(uint32_t, u32Src, 1);
18004 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18006
18007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18008 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
18009 IEM_MC_ASSIGN(u32Src, u32Imm);
18010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18011 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18012 IEM_MC_FETCH_EFLAGS(EFlags);
18013 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
18014
18015 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
18016 IEM_MC_COMMIT_EFLAGS(EFlags);
18017 IEM_MC_ADVANCE_RIP();
18018 IEM_MC_END();
18019 return VINF_SUCCESS;
18020 }
18021
18022 case IEMMODE_64BIT:
18023 {
18024 IEM_MC_BEGIN(3, 2);
18025 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18026 IEM_MC_ARG(uint64_t, u64Src, 1);
18027 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18028 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18029
18030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18031 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18032 IEM_MC_ASSIGN(u64Src, u64Imm);
18033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18034 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18035 IEM_MC_FETCH_EFLAGS(EFlags);
18036 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18037
18038 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
18039 IEM_MC_COMMIT_EFLAGS(EFlags);
18040 IEM_MC_ADVANCE_RIP();
18041 IEM_MC_END();
18042 return VINF_SUCCESS;
18043 }
18044
18045 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18046 }
18047 }
18048}
18049
18050
18051/** Opcode 0xf6 /4, /5, /6 and /7. */
18052FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
18053{
18054 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18055 {
18056 /* register access */
18057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18058 IEM_MC_BEGIN(3, 1);
18059 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18060 IEM_MC_ARG(uint8_t, u8Value, 1);
18061 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18062 IEM_MC_LOCAL(int32_t, rc);
18063
18064 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18065 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18066 IEM_MC_REF_EFLAGS(pEFlags);
18067 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18068 IEM_MC_IF_LOCAL_IS_Z(rc) {
18069 IEM_MC_ADVANCE_RIP();
18070 } IEM_MC_ELSE() {
18071 IEM_MC_RAISE_DIVIDE_ERROR();
18072 } IEM_MC_ENDIF();
18073
18074 IEM_MC_END();
18075 }
18076 else
18077 {
18078 /* memory access. */
18079 IEM_MC_BEGIN(3, 2);
18080 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18081 IEM_MC_ARG(uint8_t, u8Value, 1);
18082 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18084 IEM_MC_LOCAL(int32_t, rc);
18085
18086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18088 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18089 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18090 IEM_MC_REF_EFLAGS(pEFlags);
18091 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18092 IEM_MC_IF_LOCAL_IS_Z(rc) {
18093 IEM_MC_ADVANCE_RIP();
18094 } IEM_MC_ELSE() {
18095 IEM_MC_RAISE_DIVIDE_ERROR();
18096 } IEM_MC_ENDIF();
18097
18098 IEM_MC_END();
18099 }
18100 return VINF_SUCCESS;
18101}
18102
18103
18104/** Opcode 0xf7 /4, /5, /6 and /7. */
18105FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
18106{
18107 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18108
18109 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18110 {
18111 /* register access */
18112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18113 switch (pVCpu->iem.s.enmEffOpSize)
18114 {
18115 case IEMMODE_16BIT:
18116 {
18117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18118 IEM_MC_BEGIN(4, 1);
18119 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18120 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18121 IEM_MC_ARG(uint16_t, u16Value, 2);
18122 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18123 IEM_MC_LOCAL(int32_t, rc);
18124
18125 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18126 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18127 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18128 IEM_MC_REF_EFLAGS(pEFlags);
18129 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18130 IEM_MC_IF_LOCAL_IS_Z(rc) {
18131 IEM_MC_ADVANCE_RIP();
18132 } IEM_MC_ELSE() {
18133 IEM_MC_RAISE_DIVIDE_ERROR();
18134 } IEM_MC_ENDIF();
18135
18136 IEM_MC_END();
18137 return VINF_SUCCESS;
18138 }
18139
18140 case IEMMODE_32BIT:
18141 {
18142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18143 IEM_MC_BEGIN(4, 1);
18144 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18145 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18146 IEM_MC_ARG(uint32_t, u32Value, 2);
18147 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18148 IEM_MC_LOCAL(int32_t, rc);
18149
18150 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18151 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18152 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18153 IEM_MC_REF_EFLAGS(pEFlags);
18154 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18155 IEM_MC_IF_LOCAL_IS_Z(rc) {
18156 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18157 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18158 IEM_MC_ADVANCE_RIP();
18159 } IEM_MC_ELSE() {
18160 IEM_MC_RAISE_DIVIDE_ERROR();
18161 } IEM_MC_ENDIF();
18162
18163 IEM_MC_END();
18164 return VINF_SUCCESS;
18165 }
18166
18167 case IEMMODE_64BIT:
18168 {
18169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18170 IEM_MC_BEGIN(4, 1);
18171 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18172 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18173 IEM_MC_ARG(uint64_t, u64Value, 2);
18174 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18175 IEM_MC_LOCAL(int32_t, rc);
18176
18177 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18178 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18179 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18180 IEM_MC_REF_EFLAGS(pEFlags);
18181 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18182 IEM_MC_IF_LOCAL_IS_Z(rc) {
18183 IEM_MC_ADVANCE_RIP();
18184 } IEM_MC_ELSE() {
18185 IEM_MC_RAISE_DIVIDE_ERROR();
18186 } IEM_MC_ENDIF();
18187
18188 IEM_MC_END();
18189 return VINF_SUCCESS;
18190 }
18191
18192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18193 }
18194 }
18195 else
18196 {
18197 /* memory access. */
18198 switch (pVCpu->iem.s.enmEffOpSize)
18199 {
18200 case IEMMODE_16BIT:
18201 {
18202 IEM_MC_BEGIN(4, 2);
18203 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18204 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18205 IEM_MC_ARG(uint16_t, u16Value, 2);
18206 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18208 IEM_MC_LOCAL(int32_t, rc);
18209
18210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18212 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18213 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18214 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18215 IEM_MC_REF_EFLAGS(pEFlags);
18216 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18217 IEM_MC_IF_LOCAL_IS_Z(rc) {
18218 IEM_MC_ADVANCE_RIP();
18219 } IEM_MC_ELSE() {
18220 IEM_MC_RAISE_DIVIDE_ERROR();
18221 } IEM_MC_ENDIF();
18222
18223 IEM_MC_END();
18224 return VINF_SUCCESS;
18225 }
18226
18227 case IEMMODE_32BIT:
18228 {
18229 IEM_MC_BEGIN(4, 2);
18230 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18231 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18232 IEM_MC_ARG(uint32_t, u32Value, 2);
18233 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18235 IEM_MC_LOCAL(int32_t, rc);
18236
18237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18239 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18240 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18241 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18242 IEM_MC_REF_EFLAGS(pEFlags);
18243 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18244 IEM_MC_IF_LOCAL_IS_Z(rc) {
18245 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18246 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18247 IEM_MC_ADVANCE_RIP();
18248 } IEM_MC_ELSE() {
18249 IEM_MC_RAISE_DIVIDE_ERROR();
18250 } IEM_MC_ENDIF();
18251
18252 IEM_MC_END();
18253 return VINF_SUCCESS;
18254 }
18255
18256 case IEMMODE_64BIT:
18257 {
18258 IEM_MC_BEGIN(4, 2);
18259 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18260 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18261 IEM_MC_ARG(uint64_t, u64Value, 2);
18262 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18264 IEM_MC_LOCAL(int32_t, rc);
18265
18266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18268 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18269 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18270 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18271 IEM_MC_REF_EFLAGS(pEFlags);
18272 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18273 IEM_MC_IF_LOCAL_IS_Z(rc) {
18274 IEM_MC_ADVANCE_RIP();
18275 } IEM_MC_ELSE() {
18276 IEM_MC_RAISE_DIVIDE_ERROR();
18277 } IEM_MC_ENDIF();
18278
18279 IEM_MC_END();
18280 return VINF_SUCCESS;
18281 }
18282
18283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18284 }
18285 }
18286}
18287
18288/** Opcode 0xf6. */
18289FNIEMOP_DEF(iemOp_Grp3_Eb)
18290{
18291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18292 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18293 {
18294 case 0:
18295 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
18296 case 1:
18297/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18298 return IEMOP_RAISE_INVALID_OPCODE();
18299 case 2:
18300 IEMOP_MNEMONIC(not_Eb, "not Eb");
18301 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
18302 case 3:
18303 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
18304 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
18305 case 4:
18306 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
18307 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18308 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
18309 case 5:
18310 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
18311 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18312 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
18313 case 6:
18314 IEMOP_MNEMONIC(div_Eb, "div Eb");
18315 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18316 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
18317 case 7:
18318 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
18319 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18320 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
18321 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18322 }
18323}
18324
18325
18326/** Opcode 0xf7. */
18327FNIEMOP_DEF(iemOp_Grp3_Ev)
18328{
18329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18330 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18331 {
18332 case 0:
18333 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
18334 case 1:
18335/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18336 return IEMOP_RAISE_INVALID_OPCODE();
18337 case 2:
18338 IEMOP_MNEMONIC(not_Ev, "not Ev");
18339 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
18340 case 3:
18341 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
18342 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
18343 case 4:
18344 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
18345 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18346 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
18347 case 5:
18348 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
18349 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18350 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
18351 case 6:
18352 IEMOP_MNEMONIC(div_Ev, "div Ev");
18353 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18354 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
18355 case 7:
18356 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
18357 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18358 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
18359 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18360 }
18361}
18362
18363
18364/** Opcode 0xf8. */
18365FNIEMOP_DEF(iemOp_clc)
18366{
18367 IEMOP_MNEMONIC(clc, "clc");
18368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18369 IEM_MC_BEGIN(0, 0);
18370 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
18371 IEM_MC_ADVANCE_RIP();
18372 IEM_MC_END();
18373 return VINF_SUCCESS;
18374}
18375
18376
18377/** Opcode 0xf9. */
18378FNIEMOP_DEF(iemOp_stc)
18379{
18380 IEMOP_MNEMONIC(stc, "stc");
18381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18382 IEM_MC_BEGIN(0, 0);
18383 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
18384 IEM_MC_ADVANCE_RIP();
18385 IEM_MC_END();
18386 return VINF_SUCCESS;
18387}
18388
18389
18390/** Opcode 0xfa. */
18391FNIEMOP_DEF(iemOp_cli)
18392{
18393 IEMOP_MNEMONIC(cli, "cli");
18394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
18396}
18397
18398
18399FNIEMOP_DEF(iemOp_sti)
18400{
18401 IEMOP_MNEMONIC(sti, "sti");
18402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18403 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
18404}
18405
18406
18407/** Opcode 0xfc. */
18408FNIEMOP_DEF(iemOp_cld)
18409{
18410 IEMOP_MNEMONIC(cld, "cld");
18411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18412 IEM_MC_BEGIN(0, 0);
18413 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
18414 IEM_MC_ADVANCE_RIP();
18415 IEM_MC_END();
18416 return VINF_SUCCESS;
18417}
18418
18419
18420/** Opcode 0xfd. */
18421FNIEMOP_DEF(iemOp_std)
18422{
18423 IEMOP_MNEMONIC(std, "std");
18424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18425 IEM_MC_BEGIN(0, 0);
18426 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
18427 IEM_MC_ADVANCE_RIP();
18428 IEM_MC_END();
18429 return VINF_SUCCESS;
18430}
18431
18432
18433/** Opcode 0xfe. */
18434FNIEMOP_DEF(iemOp_Grp4)
18435{
18436 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18437 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18438 {
18439 case 0:
18440 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
18441 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
18442 case 1:
18443 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
18444 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
18445 default:
18446 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
18447 return IEMOP_RAISE_INVALID_OPCODE();
18448 }
18449}
18450
18451
18452/**
18453 * Opcode 0xff /2.
18454 * @param bRm The RM byte.
18455 */
18456FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
18457{
18458 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
18459 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18460
18461 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18462 {
18463 /* The new RIP is taken from a register. */
18464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18465 switch (pVCpu->iem.s.enmEffOpSize)
18466 {
18467 case IEMMODE_16BIT:
18468 IEM_MC_BEGIN(1, 0);
18469 IEM_MC_ARG(uint16_t, u16Target, 0);
18470 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18471 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18472 IEM_MC_END()
18473 return VINF_SUCCESS;
18474
18475 case IEMMODE_32BIT:
18476 IEM_MC_BEGIN(1, 0);
18477 IEM_MC_ARG(uint32_t, u32Target, 0);
18478 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18479 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18480 IEM_MC_END()
18481 return VINF_SUCCESS;
18482
18483 case IEMMODE_64BIT:
18484 IEM_MC_BEGIN(1, 0);
18485 IEM_MC_ARG(uint64_t, u64Target, 0);
18486 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18487 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18488 IEM_MC_END()
18489 return VINF_SUCCESS;
18490
18491 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18492 }
18493 }
18494 else
18495 {
18496 /* The new RIP is taken from a register. */
18497 switch (pVCpu->iem.s.enmEffOpSize)
18498 {
18499 case IEMMODE_16BIT:
18500 IEM_MC_BEGIN(1, 1);
18501 IEM_MC_ARG(uint16_t, u16Target, 0);
18502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18505 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18506 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18507 IEM_MC_END()
18508 return VINF_SUCCESS;
18509
18510 case IEMMODE_32BIT:
18511 IEM_MC_BEGIN(1, 1);
18512 IEM_MC_ARG(uint32_t, u32Target, 0);
18513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18516 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18517 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18518 IEM_MC_END()
18519 return VINF_SUCCESS;
18520
18521 case IEMMODE_64BIT:
18522 IEM_MC_BEGIN(1, 1);
18523 IEM_MC_ARG(uint64_t, u64Target, 0);
18524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18527 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18528 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18529 IEM_MC_END()
18530 return VINF_SUCCESS;
18531
18532 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18533 }
18534 }
18535}
18536
18537typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
18538
18539FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
18540{
18541 /* Registers? How?? */
18542 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
18543 { /* likely */ }
18544 else
18545 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
18546
18547 /* Far pointer loaded from memory. */
18548 switch (pVCpu->iem.s.enmEffOpSize)
18549 {
18550 case IEMMODE_16BIT:
18551 IEM_MC_BEGIN(3, 1);
18552 IEM_MC_ARG(uint16_t, u16Sel, 0);
18553 IEM_MC_ARG(uint16_t, offSeg, 1);
18554 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18558 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18559 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
18560 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18561 IEM_MC_END();
18562 return VINF_SUCCESS;
18563
18564 case IEMMODE_64BIT:
18565 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
18566 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
18567 * and call far qword [rsp] encodings. */
18568 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
18569 {
18570 IEM_MC_BEGIN(3, 1);
18571 IEM_MC_ARG(uint16_t, u16Sel, 0);
18572 IEM_MC_ARG(uint64_t, offSeg, 1);
18573 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18577 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18578 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
18579 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18580 IEM_MC_END();
18581 return VINF_SUCCESS;
18582 }
18583 /* AMD falls thru. */
18584
18585 case IEMMODE_32BIT:
18586 IEM_MC_BEGIN(3, 1);
18587 IEM_MC_ARG(uint16_t, u16Sel, 0);
18588 IEM_MC_ARG(uint32_t, offSeg, 1);
18589 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
18590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18593 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18594 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
18595 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18596 IEM_MC_END();
18597 return VINF_SUCCESS;
18598
18599 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18600 }
18601}
18602
18603
18604/**
18605 * Opcode 0xff /3.
18606 * @param bRm The RM byte.
18607 */
18608FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
18609{
18610 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
18611 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
18612}
18613
18614
18615/**
18616 * Opcode 0xff /4.
18617 * @param bRm The RM byte.
18618 */
18619FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
18620{
18621 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
18622 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18623
18624 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18625 {
18626 /* The new RIP is taken from a register. */
18627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18628 switch (pVCpu->iem.s.enmEffOpSize)
18629 {
18630 case IEMMODE_16BIT:
18631 IEM_MC_BEGIN(0, 1);
18632 IEM_MC_LOCAL(uint16_t, u16Target);
18633 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18634 IEM_MC_SET_RIP_U16(u16Target);
18635 IEM_MC_END()
18636 return VINF_SUCCESS;
18637
18638 case IEMMODE_32BIT:
18639 IEM_MC_BEGIN(0, 1);
18640 IEM_MC_LOCAL(uint32_t, u32Target);
18641 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18642 IEM_MC_SET_RIP_U32(u32Target);
18643 IEM_MC_END()
18644 return VINF_SUCCESS;
18645
18646 case IEMMODE_64BIT:
18647 IEM_MC_BEGIN(0, 1);
18648 IEM_MC_LOCAL(uint64_t, u64Target);
18649 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18650 IEM_MC_SET_RIP_U64(u64Target);
18651 IEM_MC_END()
18652 return VINF_SUCCESS;
18653
18654 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18655 }
18656 }
18657 else
18658 {
18659 /* The new RIP is taken from a memory location. */
18660 switch (pVCpu->iem.s.enmEffOpSize)
18661 {
18662 case IEMMODE_16BIT:
18663 IEM_MC_BEGIN(0, 2);
18664 IEM_MC_LOCAL(uint16_t, u16Target);
18665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18668 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18669 IEM_MC_SET_RIP_U16(u16Target);
18670 IEM_MC_END()
18671 return VINF_SUCCESS;
18672
18673 case IEMMODE_32BIT:
18674 IEM_MC_BEGIN(0, 2);
18675 IEM_MC_LOCAL(uint32_t, u32Target);
18676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18679 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18680 IEM_MC_SET_RIP_U32(u32Target);
18681 IEM_MC_END()
18682 return VINF_SUCCESS;
18683
18684 case IEMMODE_64BIT:
18685 IEM_MC_BEGIN(0, 2);
18686 IEM_MC_LOCAL(uint64_t, u64Target);
18687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18690 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18691 IEM_MC_SET_RIP_U64(u64Target);
18692 IEM_MC_END()
18693 return VINF_SUCCESS;
18694
18695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18696 }
18697 }
18698}
18699
18700
18701/**
18702 * Opcode 0xff /5.
18703 * @param bRm The RM byte.
18704 */
18705FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
18706{
18707 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
18708 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
18709}
18710
18711
18712/**
18713 * Opcode 0xff /6.
18714 * @param bRm The RM byte.
18715 */
18716FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
18717{
18718 IEMOP_MNEMONIC(push_Ev, "push Ev");
18719
18720 /* Registers are handled by a common worker. */
18721 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18722 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18723
18724 /* Memory we do here. */
18725 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18726 switch (pVCpu->iem.s.enmEffOpSize)
18727 {
18728 case IEMMODE_16BIT:
18729 IEM_MC_BEGIN(0, 2);
18730 IEM_MC_LOCAL(uint16_t, u16Src);
18731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18734 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18735 IEM_MC_PUSH_U16(u16Src);
18736 IEM_MC_ADVANCE_RIP();
18737 IEM_MC_END();
18738 return VINF_SUCCESS;
18739
18740 case IEMMODE_32BIT:
18741 IEM_MC_BEGIN(0, 2);
18742 IEM_MC_LOCAL(uint32_t, u32Src);
18743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18746 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18747 IEM_MC_PUSH_U32(u32Src);
18748 IEM_MC_ADVANCE_RIP();
18749 IEM_MC_END();
18750 return VINF_SUCCESS;
18751
18752 case IEMMODE_64BIT:
18753 IEM_MC_BEGIN(0, 2);
18754 IEM_MC_LOCAL(uint64_t, u64Src);
18755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18758 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18759 IEM_MC_PUSH_U64(u64Src);
18760 IEM_MC_ADVANCE_RIP();
18761 IEM_MC_END();
18762 return VINF_SUCCESS;
18763
18764 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18765 }
18766}
18767
18768
18769/** Opcode 0xff. */
18770FNIEMOP_DEF(iemOp_Grp5)
18771{
18772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18773 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18774 {
18775 case 0:
18776 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
18777 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
18778 case 1:
18779 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
18780 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
18781 case 2:
18782 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
18783 case 3:
18784 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
18785 case 4:
18786 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
18787 case 5:
18788 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
18789 case 6:
18790 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
18791 case 7:
18792 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
18793 return IEMOP_RAISE_INVALID_OPCODE();
18794 }
18795 AssertFailedReturn(VERR_IEM_IPE_3);
18796}
18797
18798
18799
18800const PFNIEMOP g_apfnOneByteMap[256] =
18801{
18802 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
18803 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
18804 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
18805 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
18806 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
18807 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
18808 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
18809 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
18810 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
18811 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
18812 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
18813 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
18814 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
18815 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
18816 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
18817 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
18818 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
18819 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
18820 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
18821 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
18822 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
18823 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
18824 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
18825 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
18826 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
18827 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
18828 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
18829 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
18830 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
18831 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
18832 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
18833 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
18834 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
18835 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
18836 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
18837 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
18838 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
18839 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
18840 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
18841 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
18842 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
18843 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
18844 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
18845 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
18846 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
18847 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
18848 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
18849 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
18850 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
18851 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
18852 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
18853 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
18854 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
18855 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
18856 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
18857 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
18858 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
18859 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
18860 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
18861 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
18862 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
18863 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
18864 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
18865 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
18866};
18867
18868
18869/** @} */
18870
18871#ifdef _MSC_VER
18872# pragma warning(pop)
18873#endif
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette