VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66007

Last change on this file since 66007 was 66007, checked in by vboxsync, 8 years ago

nit

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 307.2 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66007 2017-03-08 22:08:15Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/** @name ..... opcodes.
26 *
27 * @{
28 */
29
30/** @} */
31
32
33/** @name Two byte opcodes (first byte 0x0f).
34 *
35 * @{
36 */
37
38/** Opcode 0x0f 0x00 /0. */
39FNIEMOPRM_DEF(iemOp_Grp6_sldt)
40{
41 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
42 IEMOP_HLP_MIN_286();
43 IEMOP_HLP_NO_REAL_OR_V86_MODE();
44
45 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
46 {
47 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
48 switch (pVCpu->iem.s.enmEffOpSize)
49 {
50 case IEMMODE_16BIT:
51 IEM_MC_BEGIN(0, 1);
52 IEM_MC_LOCAL(uint16_t, u16Ldtr);
53 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
54 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
55 IEM_MC_ADVANCE_RIP();
56 IEM_MC_END();
57 break;
58
59 case IEMMODE_32BIT:
60 IEM_MC_BEGIN(0, 1);
61 IEM_MC_LOCAL(uint32_t, u32Ldtr);
62 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
63 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
64 IEM_MC_ADVANCE_RIP();
65 IEM_MC_END();
66 break;
67
68 case IEMMODE_64BIT:
69 IEM_MC_BEGIN(0, 1);
70 IEM_MC_LOCAL(uint64_t, u64Ldtr);
71 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
72 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
73 IEM_MC_ADVANCE_RIP();
74 IEM_MC_END();
75 break;
76
77 IEM_NOT_REACHED_DEFAULT_CASE_RET();
78 }
79 }
80 else
81 {
82 IEM_MC_BEGIN(0, 2);
83 IEM_MC_LOCAL(uint16_t, u16Ldtr);
84 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
85 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
86 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
87 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
88 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
89 IEM_MC_ADVANCE_RIP();
90 IEM_MC_END();
91 }
92 return VINF_SUCCESS;
93}
94
95
96/** Opcode 0x0f 0x00 /1. */
97FNIEMOPRM_DEF(iemOp_Grp6_str)
98{
99 IEMOP_MNEMONIC(str, "str Rv/Mw");
100 IEMOP_HLP_MIN_286();
101 IEMOP_HLP_NO_REAL_OR_V86_MODE();
102
103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
104 {
105 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
106 switch (pVCpu->iem.s.enmEffOpSize)
107 {
108 case IEMMODE_16BIT:
109 IEM_MC_BEGIN(0, 1);
110 IEM_MC_LOCAL(uint16_t, u16Tr);
111 IEM_MC_FETCH_TR_U16(u16Tr);
112 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 break;
116
117 case IEMMODE_32BIT:
118 IEM_MC_BEGIN(0, 1);
119 IEM_MC_LOCAL(uint32_t, u32Tr);
120 IEM_MC_FETCH_TR_U32(u32Tr);
121 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
122 IEM_MC_ADVANCE_RIP();
123 IEM_MC_END();
124 break;
125
126 case IEMMODE_64BIT:
127 IEM_MC_BEGIN(0, 1);
128 IEM_MC_LOCAL(uint64_t, u64Tr);
129 IEM_MC_FETCH_TR_U64(u64Tr);
130 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
131 IEM_MC_ADVANCE_RIP();
132 IEM_MC_END();
133 break;
134
135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
136 }
137 }
138 else
139 {
140 IEM_MC_BEGIN(0, 2);
141 IEM_MC_LOCAL(uint16_t, u16Tr);
142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
144 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
145 IEM_MC_FETCH_TR_U16(u16Tr);
146 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
147 IEM_MC_ADVANCE_RIP();
148 IEM_MC_END();
149 }
150 return VINF_SUCCESS;
151}
152
153
154/** Opcode 0x0f 0x00 /2. */
155FNIEMOPRM_DEF(iemOp_Grp6_lldt)
156{
157 IEMOP_MNEMONIC(lldt, "lldt Ew");
158 IEMOP_HLP_MIN_286();
159 IEMOP_HLP_NO_REAL_OR_V86_MODE();
160
161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
162 {
163 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
164 IEM_MC_BEGIN(1, 0);
165 IEM_MC_ARG(uint16_t, u16Sel, 0);
166 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
167 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
168 IEM_MC_END();
169 }
170 else
171 {
172 IEM_MC_BEGIN(1, 1);
173 IEM_MC_ARG(uint16_t, u16Sel, 0);
174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
176 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
177 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
178 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
179 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
180 IEM_MC_END();
181 }
182 return VINF_SUCCESS;
183}
184
185
186/** Opcode 0x0f 0x00 /3. */
187FNIEMOPRM_DEF(iemOp_Grp6_ltr)
188{
189 IEMOP_MNEMONIC(ltr, "ltr Ew");
190 IEMOP_HLP_MIN_286();
191 IEMOP_HLP_NO_REAL_OR_V86_MODE();
192
193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
194 {
195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
196 IEM_MC_BEGIN(1, 0);
197 IEM_MC_ARG(uint16_t, u16Sel, 0);
198 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
199 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
200 IEM_MC_END();
201 }
202 else
203 {
204 IEM_MC_BEGIN(1, 1);
205 IEM_MC_ARG(uint16_t, u16Sel, 0);
206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
209 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
210 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
211 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
212 IEM_MC_END();
213 }
214 return VINF_SUCCESS;
215}
216
217
218/** Opcode 0x0f 0x00 /3. */
219FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
220{
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
223
224 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
225 {
226 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
227 IEM_MC_BEGIN(2, 0);
228 IEM_MC_ARG(uint16_t, u16Sel, 0);
229 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
230 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
231 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
232 IEM_MC_END();
233 }
234 else
235 {
236 IEM_MC_BEGIN(2, 1);
237 IEM_MC_ARG(uint16_t, u16Sel, 0);
238 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
241 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
242 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
243 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
244 IEM_MC_END();
245 }
246 return VINF_SUCCESS;
247}
248
249
250/** Opcode 0x0f 0x00 /4. */
251FNIEMOPRM_DEF(iemOp_Grp6_verr)
252{
253 IEMOP_MNEMONIC(verr, "verr Ew");
254 IEMOP_HLP_MIN_286();
255 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
256}
257
258
259/** Opcode 0x0f 0x00 /5. */
260FNIEMOPRM_DEF(iemOp_Grp6_verw)
261{
262 IEMOP_MNEMONIC(verw, "verw Ew");
263 IEMOP_HLP_MIN_286();
264 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
265}
266
267
268/**
269 * Group 6 jump table.
270 */
271IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
272{
273 iemOp_Grp6_sldt,
274 iemOp_Grp6_str,
275 iemOp_Grp6_lldt,
276 iemOp_Grp6_ltr,
277 iemOp_Grp6_verr,
278 iemOp_Grp6_verw,
279 iemOp_InvalidWithRM,
280 iemOp_InvalidWithRM
281};
282
283/** Opcode 0x0f 0x00. */
284FNIEMOP_DEF(iemOp_Grp6)
285{
286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
287 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
288}
289
290
291/** Opcode 0x0f 0x01 /0. */
292FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
293{
294 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
295 IEMOP_HLP_MIN_286();
296 IEMOP_HLP_64BIT_OP_SIZE();
297 IEM_MC_BEGIN(2, 1);
298 IEM_MC_ARG(uint8_t, iEffSeg, 0);
299 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
302 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
303 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
304 IEM_MC_END();
305 return VINF_SUCCESS;
306}
307
308
309/** Opcode 0x0f 0x01 /0. */
310FNIEMOP_DEF(iemOp_Grp7_vmcall)
311{
312 IEMOP_BITCH_ABOUT_STUB();
313 return IEMOP_RAISE_INVALID_OPCODE();
314}
315
316
317/** Opcode 0x0f 0x01 /0. */
318FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
319{
320 IEMOP_BITCH_ABOUT_STUB();
321 return IEMOP_RAISE_INVALID_OPCODE();
322}
323
324
325/** Opcode 0x0f 0x01 /0. */
326FNIEMOP_DEF(iemOp_Grp7_vmresume)
327{
328 IEMOP_BITCH_ABOUT_STUB();
329 return IEMOP_RAISE_INVALID_OPCODE();
330}
331
332
333/** Opcode 0x0f 0x01 /0. */
334FNIEMOP_DEF(iemOp_Grp7_vmxoff)
335{
336 IEMOP_BITCH_ABOUT_STUB();
337 return IEMOP_RAISE_INVALID_OPCODE();
338}
339
340
341/** Opcode 0x0f 0x01 /1. */
342FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
343{
344 IEMOP_MNEMONIC(sidt, "sidt Ms");
345 IEMOP_HLP_MIN_286();
346 IEMOP_HLP_64BIT_OP_SIZE();
347 IEM_MC_BEGIN(2, 1);
348 IEM_MC_ARG(uint8_t, iEffSeg, 0);
349 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
352 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
353 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
354 IEM_MC_END();
355 return VINF_SUCCESS;
356}
357
358
359/** Opcode 0x0f 0x01 /1. */
360FNIEMOP_DEF(iemOp_Grp7_monitor)
361{
362 IEMOP_MNEMONIC(monitor, "monitor");
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
364 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
365}
366
367
368/** Opcode 0x0f 0x01 /1. */
369FNIEMOP_DEF(iemOp_Grp7_mwait)
370{
371 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
374}
375
376
377/** Opcode 0x0f 0x01 /2. */
378FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
379{
380 IEMOP_MNEMONIC(lgdt, "lgdt");
381 IEMOP_HLP_64BIT_OP_SIZE();
382 IEM_MC_BEGIN(3, 1);
383 IEM_MC_ARG(uint8_t, iEffSeg, 0);
384 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
385 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
389 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
390 IEM_MC_END();
391 return VINF_SUCCESS;
392}
393
394
395/** Opcode 0x0f 0x01 0xd0. */
396FNIEMOP_DEF(iemOp_Grp7_xgetbv)
397{
398 IEMOP_MNEMONIC(xgetbv, "xgetbv");
399 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
400 {
401 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
402 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
403 }
404 return IEMOP_RAISE_INVALID_OPCODE();
405}
406
407
408/** Opcode 0x0f 0x01 0xd1. */
409FNIEMOP_DEF(iemOp_Grp7_xsetbv)
410{
411 IEMOP_MNEMONIC(xsetbv, "xsetbv");
412 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
413 {
414 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
416 }
417 return IEMOP_RAISE_INVALID_OPCODE();
418}
419
420
421/** Opcode 0x0f 0x01 /3. */
422FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
423{
424 IEMOP_MNEMONIC(lidt, "lidt");
425 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
426 ? IEMMODE_64BIT
427 : pVCpu->iem.s.enmEffOpSize;
428 IEM_MC_BEGIN(3, 1);
429 IEM_MC_ARG(uint8_t, iEffSeg, 0);
430 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
431 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
434 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
435 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
436 IEM_MC_END();
437 return VINF_SUCCESS;
438}
439
440
441#ifdef VBOX_WITH_NESTED_HWVIRT
442/** Opcode 0x0f 0x01 0xd8. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
444{
445 IEMOP_MNEMONIC(vmrun, "vmrun");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
447}
448
449/** Opcode 0x0f 0x01 0xd9. */
450FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
451{
452 IEMOP_MNEMONIC(vmmcall, "vmmcall");
453 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
454}
455
456
457/** Opcode 0x0f 0x01 0xda. */
458FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
459{
460 IEMOP_MNEMONIC(vmload, "vmload");
461 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
462}
463
464
465/** Opcode 0x0f 0x01 0xdb. */
466FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
467{
468 IEMOP_MNEMONIC(vmsave, "vmsave");
469 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
470}
471
472
473/** Opcode 0x0f 0x01 0xdc. */
474FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
475{
476 IEMOP_MNEMONIC(stgi, "stgi");
477 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
478}
479
480
481/** Opcode 0x0f 0x01 0xdd. */
482FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
483{
484 IEMOP_MNEMONIC(clgi, "clgi");
485 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
486}
487
488
489/** Opcode 0x0f 0x01 0xdf. */
490FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
491{
492 IEMOP_MNEMONIC(invlpga, "invlpga");
493 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
494}
495#else
496/** Opcode 0x0f 0x01 0xd8. */
497FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
498
499/** Opcode 0x0f 0x01 0xd9. */
500FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
501
502/** Opcode 0x0f 0x01 0xda. */
503FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
504
505/** Opcode 0x0f 0x01 0xdb. */
506FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
507
508/** Opcode 0x0f 0x01 0xdc. */
509FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
510
511/** Opcode 0x0f 0x01 0xdd. */
512FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
513
514/** Opcode 0x0f 0x01 0xdf. */
515FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
516#endif /* VBOX_WITH_NESTED_HWVIRT */
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520
521/** Opcode 0x0f 0x01 /4. */
522FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
523{
524 IEMOP_MNEMONIC(smsw, "smsw");
525 IEMOP_HLP_MIN_286();
526 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
527 {
528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
529 switch (pVCpu->iem.s.enmEffOpSize)
530 {
531 case IEMMODE_16BIT:
532 IEM_MC_BEGIN(0, 1);
533 IEM_MC_LOCAL(uint16_t, u16Tmp);
534 IEM_MC_FETCH_CR0_U16(u16Tmp);
535 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
536 { /* likely */ }
537 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
538 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
539 else
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
541 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
542 IEM_MC_ADVANCE_RIP();
543 IEM_MC_END();
544 return VINF_SUCCESS;
545
546 case IEMMODE_32BIT:
547 IEM_MC_BEGIN(0, 1);
548 IEM_MC_LOCAL(uint32_t, u32Tmp);
549 IEM_MC_FETCH_CR0_U32(u32Tmp);
550 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
551 IEM_MC_ADVANCE_RIP();
552 IEM_MC_END();
553 return VINF_SUCCESS;
554
555 case IEMMODE_64BIT:
556 IEM_MC_BEGIN(0, 1);
557 IEM_MC_LOCAL(uint64_t, u64Tmp);
558 IEM_MC_FETCH_CR0_U64(u64Tmp);
559 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
560 IEM_MC_ADVANCE_RIP();
561 IEM_MC_END();
562 return VINF_SUCCESS;
563
564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
565 }
566 }
567 else
568 {
569 /* Ignore operand size here, memory refs are always 16-bit. */
570 IEM_MC_BEGIN(0, 2);
571 IEM_MC_LOCAL(uint16_t, u16Tmp);
572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
575 IEM_MC_FETCH_CR0_U16(u16Tmp);
576 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
577 { /* likely */ }
578 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
579 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
580 else
581 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
582 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
583 IEM_MC_ADVANCE_RIP();
584 IEM_MC_END();
585 return VINF_SUCCESS;
586 }
587}
588
589
590/** Opcode 0x0f 0x01 /6. */
591FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
592{
593 /* The operand size is effectively ignored, all is 16-bit and only the
594 lower 3-bits are used. */
595 IEMOP_MNEMONIC(lmsw, "lmsw");
596 IEMOP_HLP_MIN_286();
597 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
598 {
599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
600 IEM_MC_BEGIN(1, 0);
601 IEM_MC_ARG(uint16_t, u16Tmp, 0);
602 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
603 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
604 IEM_MC_END();
605 }
606 else
607 {
608 IEM_MC_BEGIN(1, 1);
609 IEM_MC_ARG(uint16_t, u16Tmp, 0);
610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
613 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
614 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
615 IEM_MC_END();
616 }
617 return VINF_SUCCESS;
618}
619
620
621/** Opcode 0x0f 0x01 /7. */
622FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
623{
624 IEMOP_MNEMONIC(invlpg, "invlpg");
625 IEMOP_HLP_MIN_486();
626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
627 IEM_MC_BEGIN(1, 1);
628 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
630 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
631 IEM_MC_END();
632 return VINF_SUCCESS;
633}
634
635
636/** Opcode 0x0f 0x01 /7. */
637FNIEMOP_DEF(iemOp_Grp7_swapgs)
638{
639 IEMOP_MNEMONIC(swapgs, "swapgs");
640 IEMOP_HLP_ONLY_64BIT();
641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
642 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
643}
644
645
646/** Opcode 0x0f 0x01 /7. */
647FNIEMOP_DEF(iemOp_Grp7_rdtscp)
648{
649 NOREF(pVCpu);
650 IEMOP_BITCH_ABOUT_STUB();
651 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
652}
653
654
655/**
656 * Group 7 jump table, memory variant.
657 */
658IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
659{
660 iemOp_Grp7_sgdt,
661 iemOp_Grp7_sidt,
662 iemOp_Grp7_lgdt,
663 iemOp_Grp7_lidt,
664 iemOp_Grp7_smsw,
665 iemOp_InvalidWithRM,
666 iemOp_Grp7_lmsw,
667 iemOp_Grp7_invlpg
668};
669
670
671/** Opcode 0x0f 0x01. */
672FNIEMOP_DEF(iemOp_Grp7)
673{
674 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
675 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
676 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
677
678 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
679 {
680 case 0:
681 switch (bRm & X86_MODRM_RM_MASK)
682 {
683 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
684 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
685 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
686 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
687 }
688 return IEMOP_RAISE_INVALID_OPCODE();
689
690 case 1:
691 switch (bRm & X86_MODRM_RM_MASK)
692 {
693 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
694 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
695 }
696 return IEMOP_RAISE_INVALID_OPCODE();
697
698 case 2:
699 switch (bRm & X86_MODRM_RM_MASK)
700 {
701 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
702 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
703 }
704 return IEMOP_RAISE_INVALID_OPCODE();
705
706 case 3:
707 switch (bRm & X86_MODRM_RM_MASK)
708 {
709 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
710 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
711 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
712 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
713 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
714 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
715 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
716 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
718 }
719
720 case 4:
721 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
722
723 case 5:
724 return IEMOP_RAISE_INVALID_OPCODE();
725
726 case 6:
727 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
728
729 case 7:
730 switch (bRm & X86_MODRM_RM_MASK)
731 {
732 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
733 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
734 }
735 return IEMOP_RAISE_INVALID_OPCODE();
736
737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
738 }
739}
740
741/** Opcode 0x0f 0x00 /3. */
742FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
743{
744 IEMOP_HLP_NO_REAL_OR_V86_MODE();
745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
746
747 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
748 {
749 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
750 switch (pVCpu->iem.s.enmEffOpSize)
751 {
752 case IEMMODE_16BIT:
753 {
754 IEM_MC_BEGIN(3, 0);
755 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
756 IEM_MC_ARG(uint16_t, u16Sel, 1);
757 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
758
759 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
760 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
761 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
762
763 IEM_MC_END();
764 return VINF_SUCCESS;
765 }
766
767 case IEMMODE_32BIT:
768 case IEMMODE_64BIT:
769 {
770 IEM_MC_BEGIN(3, 0);
771 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
772 IEM_MC_ARG(uint16_t, u16Sel, 1);
773 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
774
775 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
776 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
777 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
778
779 IEM_MC_END();
780 return VINF_SUCCESS;
781 }
782
783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
784 }
785 }
786 else
787 {
788 switch (pVCpu->iem.s.enmEffOpSize)
789 {
790 case IEMMODE_16BIT:
791 {
792 IEM_MC_BEGIN(3, 1);
793 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
794 IEM_MC_ARG(uint16_t, u16Sel, 1);
795 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
797
798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
799 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
800
801 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
802 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
803 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
804
805 IEM_MC_END();
806 return VINF_SUCCESS;
807 }
808
809 case IEMMODE_32BIT:
810 case IEMMODE_64BIT:
811 {
812 IEM_MC_BEGIN(3, 1);
813 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
814 IEM_MC_ARG(uint16_t, u16Sel, 1);
815 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
817
818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
819 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
820/** @todo testcase: make sure it's a 16-bit read. */
821
822 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
823 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
824 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
825
826 IEM_MC_END();
827 return VINF_SUCCESS;
828 }
829
830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
831 }
832 }
833}
834
835
836
837/** Opcode 0x0f 0x02. */
838FNIEMOP_DEF(iemOp_lar_Gv_Ew)
839{
840 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
841 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
842}
843
844
845/** Opcode 0x0f 0x03. */
846FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
847{
848 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
849 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
850}
851
852
853/** Opcode 0x0f 0x05. */
854FNIEMOP_DEF(iemOp_syscall)
855{
856 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
858 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
859}
860
861
862/** Opcode 0x0f 0x06. */
863FNIEMOP_DEF(iemOp_clts)
864{
865 IEMOP_MNEMONIC(clts, "clts");
866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
867 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
868}
869
870
871/** Opcode 0x0f 0x07. */
872FNIEMOP_DEF(iemOp_sysret)
873{
874 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
876 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
877}
878
879
880/** Opcode 0x0f 0x08. */
881FNIEMOP_STUB(iemOp_invd);
882// IEMOP_HLP_MIN_486();
883
884
885/** Opcode 0x0f 0x09. */
886FNIEMOP_DEF(iemOp_wbinvd)
887{
888 IEMOP_MNEMONIC(wbinvd, "wbinvd");
889 IEMOP_HLP_MIN_486();
890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
891 IEM_MC_BEGIN(0, 0);
892 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
893 IEM_MC_ADVANCE_RIP();
894 IEM_MC_END();
895 return VINF_SUCCESS; /* ignore for now */
896}
897
898
899/** Opcode 0x0f 0x0b. */
900FNIEMOP_DEF(iemOp_ud2)
901{
902 IEMOP_MNEMONIC(ud2, "ud2");
903 return IEMOP_RAISE_INVALID_OPCODE();
904}
905
906/** Opcode 0x0f 0x0d. */
907FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
908{
909 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
910 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
911 {
912 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
913 return IEMOP_RAISE_INVALID_OPCODE();
914 }
915
916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
917 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
918 {
919 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
920 return IEMOP_RAISE_INVALID_OPCODE();
921 }
922
923 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
924 {
925 case 2: /* Aliased to /0 for the time being. */
926 case 4: /* Aliased to /0 for the time being. */
927 case 5: /* Aliased to /0 for the time being. */
928 case 6: /* Aliased to /0 for the time being. */
929 case 7: /* Aliased to /0 for the time being. */
930 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
931 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
932 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
933 IEM_NOT_REACHED_DEFAULT_CASE_RET();
934 }
935
936 IEM_MC_BEGIN(0, 1);
937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
940 /* Currently a NOP. */
941 NOREF(GCPtrEffSrc);
942 IEM_MC_ADVANCE_RIP();
943 IEM_MC_END();
944 return VINF_SUCCESS;
945}
946
947
948/** Opcode 0x0f 0x0e. */
949FNIEMOP_STUB(iemOp_femms);
950
951
952/** Opcode 0x0f 0x0f 0x0c. */
953FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
954
955/** Opcode 0x0f 0x0f 0x0d. */
956FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
957
958/** Opcode 0x0f 0x0f 0x1c. */
959FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
960
961/** Opcode 0x0f 0x0f 0x1d. */
962FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
963
964/** Opcode 0x0f 0x0f 0x8a. */
965FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
966
967/** Opcode 0x0f 0x0f 0x8e. */
968FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
969
970/** Opcode 0x0f 0x0f 0x90. */
971FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
972
973/** Opcode 0x0f 0x0f 0x94. */
974FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
975
976/** Opcode 0x0f 0x0f 0x96. */
977FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
978
979/** Opcode 0x0f 0x0f 0x97. */
980FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
981
982/** Opcode 0x0f 0x0f 0x9a. */
983FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
984
985/** Opcode 0x0f 0x0f 0x9e. */
986FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
987
988/** Opcode 0x0f 0x0f 0xa0. */
989FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
990
991/** Opcode 0x0f 0x0f 0xa4. */
992FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
993
994/** Opcode 0x0f 0x0f 0xa6. */
995FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
996
997/** Opcode 0x0f 0x0f 0xa7. */
998FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
999
1000/** Opcode 0x0f 0x0f 0xaa. */
1001FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1002
1003/** Opcode 0x0f 0x0f 0xae. */
1004FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1005
1006/** Opcode 0x0f 0x0f 0xb0. */
1007FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1008
1009/** Opcode 0x0f 0x0f 0xb4. */
1010FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1011
1012/** Opcode 0x0f 0x0f 0xb6. */
1013FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1014
1015/** Opcode 0x0f 0x0f 0xb7. */
1016FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1017
1018/** Opcode 0x0f 0x0f 0xbb. */
1019FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1020
1021/** Opcode 0x0f 0x0f 0xbf. */
1022FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1023
1024
1025/** Opcode 0x0f 0x0f. */
1026FNIEMOP_DEF(iemOp_3Dnow)
1027{
1028 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1029 {
1030 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1031 return IEMOP_RAISE_INVALID_OPCODE();
1032 }
1033
1034 /* This is pretty sparse, use switch instead of table. */
1035 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1036 switch (b)
1037 {
1038 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1039 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1040 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1041 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1042 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1043 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1044 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1045 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1046 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1047 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1048 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1049 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1050 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1051 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1052 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1053 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1054 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1055 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1056 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1057 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1058 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1059 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1060 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1061 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1062 default:
1063 return IEMOP_RAISE_INVALID_OPCODE();
1064 }
1065}
1066
1067
1068/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1069FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1070/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1071FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1072/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1073FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1074/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1075FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1076
1077
1078/** Opcode 0x0f 0x11 - vmovups Wps, Vps */
1079FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1080{
1081 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1082 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1083 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1084 {
1085 /*
1086 * Register, register.
1087 */
1088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1089 IEM_MC_BEGIN(0, 0);
1090 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1091 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1092 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1093 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1094 IEM_MC_ADVANCE_RIP();
1095 IEM_MC_END();
1096 }
1097 else
1098 {
1099 /*
1100 * Memory, register.
1101 */
1102 IEM_MC_BEGIN(0, 2);
1103 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1105
1106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1108 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1109 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1110
1111 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1112 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1113
1114 IEM_MC_ADVANCE_RIP();
1115 IEM_MC_END();
1116 }
1117 return VINF_SUCCESS;
1118}
1119
1120
1121/** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */
1122FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
1123
1124/** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */
1125FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
1126
1127/** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */
1128FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1129{
1130 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1131 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1132 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1133 {
1134 /*
1135 * Register, register.
1136 */
1137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1138 IEM_MC_BEGIN(0, 1);
1139 IEM_MC_LOCAL(uint64_t, uSrc);
1140
1141 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1142 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1143 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1144 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1145
1146 IEM_MC_ADVANCE_RIP();
1147 IEM_MC_END();
1148 }
1149 else
1150 {
1151 /*
1152 * Memory, register.
1153 */
1154 IEM_MC_BEGIN(0, 2);
1155 IEM_MC_LOCAL(uint64_t, uSrc);
1156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1157
1158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1160 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1161 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1162
1163 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1164 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1165
1166 IEM_MC_ADVANCE_RIP();
1167 IEM_MC_END();
1168 }
1169 return VINF_SUCCESS;
1170}
1171
1172
1173/** Opcode 0x0f 0x12. */
1174FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1175
1176/** Opcode 0x66 0x0f 0x12. */
1177FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1178
1179/** Opcode 0xf3 0x0f 0x12. */
1180FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1181
1182/** Opcode 0xf2 0x0f 0x12. */
1183FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1184
1185/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1186FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1187
1188/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1189FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1190{
1191 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1192 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1194 {
1195#if 0
1196 /*
1197 * Register, register.
1198 */
1199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1200 IEM_MC_BEGIN(0, 1);
1201 IEM_MC_LOCAL(uint64_t, uSrc);
1202 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1203 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1204 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1205 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1206 IEM_MC_ADVANCE_RIP();
1207 IEM_MC_END();
1208#else
1209 return IEMOP_RAISE_INVALID_OPCODE();
1210#endif
1211 }
1212 else
1213 {
1214 /*
1215 * Memory, register.
1216 */
1217 IEM_MC_BEGIN(0, 2);
1218 IEM_MC_LOCAL(uint64_t, uSrc);
1219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1220
1221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1223 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1224 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1225
1226 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1227 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1228
1229 IEM_MC_ADVANCE_RIP();
1230 IEM_MC_END();
1231 }
1232 return VINF_SUCCESS;
1233}
1234
1235/* Opcode 0xf3 0x0f 0x13 - invalid */
1236/* Opcode 0xf2 0x0f 0x13 - invalid */
1237
1238/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1239FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1240/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1241FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1242/* Opcode 0xf3 0x0f 0x14 - invalid */
1243/* Opcode 0xf2 0x0f 0x14 - invalid */
1244/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1245FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1246/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1247FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1248/* Opcode 0xf3 0x0f 0x15 - invalid */
1249/* Opcode 0xf2 0x0f 0x15 - invalid */
1250/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1251FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1252/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1253FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1254/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1255FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1256/* Opcode 0xf2 0x0f 0x16 - invalid */
1257/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1258FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1259/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1260FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1261/* Opcode 0xf3 0x0f 0x17 - invalid */
1262/* Opcode 0xf2 0x0f 0x17 - invalid */
1263
1264
1265/** Opcode 0x0f 0x18. */
1266FNIEMOP_DEF(iemOp_prefetch_Grp16)
1267{
1268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1269 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1270 {
1271 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1272 {
1273 case 4: /* Aliased to /0 for the time being according to AMD. */
1274 case 5: /* Aliased to /0 for the time being according to AMD. */
1275 case 6: /* Aliased to /0 for the time being according to AMD. */
1276 case 7: /* Aliased to /0 for the time being according to AMD. */
1277 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1278 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1279 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1280 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1281 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1282 }
1283
1284 IEM_MC_BEGIN(0, 1);
1285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1288 /* Currently a NOP. */
1289 NOREF(GCPtrEffSrc);
1290 IEM_MC_ADVANCE_RIP();
1291 IEM_MC_END();
1292 return VINF_SUCCESS;
1293 }
1294
1295 return IEMOP_RAISE_INVALID_OPCODE();
1296}
1297
1298
1299/** Opcode 0x0f 0x19..0x1f. */
1300FNIEMOP_DEF(iemOp_nop_Ev)
1301{
1302 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1304 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1305 {
1306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1307 IEM_MC_BEGIN(0, 0);
1308 IEM_MC_ADVANCE_RIP();
1309 IEM_MC_END();
1310 }
1311 else
1312 {
1313 IEM_MC_BEGIN(0, 1);
1314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1317 /* Currently a NOP. */
1318 NOREF(GCPtrEffSrc);
1319 IEM_MC_ADVANCE_RIP();
1320 IEM_MC_END();
1321 }
1322 return VINF_SUCCESS;
1323}
1324
1325
1326/** Opcode 0x0f 0x20. */
1327FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1328{
1329 /* mod is ignored, as is operand size overrides. */
1330 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1331 IEMOP_HLP_MIN_386();
1332 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1333 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1334 else
1335 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1336
1337 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1338 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1339 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1340 {
1341 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1342 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1343 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1344 iCrReg |= 8;
1345 }
1346 switch (iCrReg)
1347 {
1348 case 0: case 2: case 3: case 4: case 8:
1349 break;
1350 default:
1351 return IEMOP_RAISE_INVALID_OPCODE();
1352 }
1353 IEMOP_HLP_DONE_DECODING();
1354
1355 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1356}
1357
1358
1359/** Opcode 0x0f 0x21. */
1360FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1361{
1362 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1363 IEMOP_HLP_MIN_386();
1364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1366 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1367 return IEMOP_RAISE_INVALID_OPCODE();
1368 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1369 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1370 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1371}
1372
1373
1374/** Opcode 0x0f 0x22. */
1375FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1376{
1377 /* mod is ignored, as is operand size overrides. */
1378 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1379 IEMOP_HLP_MIN_386();
1380 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1381 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1382 else
1383 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1384
1385 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1386 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1387 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1388 {
1389 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1390 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1391 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1392 iCrReg |= 8;
1393 }
1394 switch (iCrReg)
1395 {
1396 case 0: case 2: case 3: case 4: case 8:
1397 break;
1398 default:
1399 return IEMOP_RAISE_INVALID_OPCODE();
1400 }
1401 IEMOP_HLP_DONE_DECODING();
1402
1403 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1404}
1405
1406
1407/** Opcode 0x0f 0x23. */
1408FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1409{
1410 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1411 IEMOP_HLP_MIN_386();
1412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1414 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1415 return IEMOP_RAISE_INVALID_OPCODE();
1416 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1417 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1418 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1419}
1420
1421
1422/** Opcode 0x0f 0x24. */
1423FNIEMOP_DEF(iemOp_mov_Rd_Td)
1424{
1425 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1426 /** @todo works on 386 and 486. */
1427 /* The RM byte is not considered, see testcase. */
1428 return IEMOP_RAISE_INVALID_OPCODE();
1429}
1430
1431
1432/** Opcode 0x0f 0x26. */
1433FNIEMOP_DEF(iemOp_mov_Td_Rd)
1434{
1435 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1436 /** @todo works on 386 and 486. */
1437 /* The RM byte is not considered, see testcase. */
1438 return IEMOP_RAISE_INVALID_OPCODE();
1439}
1440
1441
1442/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
1443FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1444{
1445 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1447 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1448 {
1449 /*
1450 * Register, register.
1451 */
1452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1453 IEM_MC_BEGIN(0, 0);
1454 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1455 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1456 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1457 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1458 IEM_MC_ADVANCE_RIP();
1459 IEM_MC_END();
1460 }
1461 else
1462 {
1463 /*
1464 * Register, memory.
1465 */
1466 IEM_MC_BEGIN(0, 2);
1467 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1469
1470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1472 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1473 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1474
1475 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1476 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1477
1478 IEM_MC_ADVANCE_RIP();
1479 IEM_MC_END();
1480 }
1481 return VINF_SUCCESS;
1482}
1483
1484/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
1485FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1486{
1487 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1488 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1489 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1490 {
1491 /*
1492 * Register, register.
1493 */
1494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1495 IEM_MC_BEGIN(0, 0);
1496 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1497 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1498 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1499 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1500 IEM_MC_ADVANCE_RIP();
1501 IEM_MC_END();
1502 }
1503 else
1504 {
1505 /*
1506 * Register, memory.
1507 */
1508 IEM_MC_BEGIN(0, 2);
1509 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1511
1512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1514 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1515 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1516
1517 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1518 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1519
1520 IEM_MC_ADVANCE_RIP();
1521 IEM_MC_END();
1522 }
1523 return VINF_SUCCESS;
1524}
1525
1526/* Opcode 0xf3 0x0f 0x28 - invalid */
1527/* Opcode 0xf2 0x0f 0x28 - invalid */
1528
1529/** Opcode 0x0f 0x29 - vmovaps Wps, Vps */
1530FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1531{
1532 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1534 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1535 {
1536 /*
1537 * Register, register.
1538 */
1539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1540 IEM_MC_BEGIN(0, 0);
1541 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1542 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1543 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1544 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1545 IEM_MC_ADVANCE_RIP();
1546 IEM_MC_END();
1547 }
1548 else
1549 {
1550 /*
1551 * Memory, register.
1552 */
1553 IEM_MC_BEGIN(0, 2);
1554 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1556
1557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1559 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1560 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1561
1562 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1563 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1564
1565 IEM_MC_ADVANCE_RIP();
1566 IEM_MC_END();
1567 }
1568 return VINF_SUCCESS;
1569}
1570
1571/** Opcode 0x66 0x0f 0x29 - vmovapd Wpd,Vpd */
1572FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
1573{
1574 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1576 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1577 {
1578 /*
1579 * Register, register.
1580 */
1581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1582 IEM_MC_BEGIN(0, 0);
1583 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1584 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1585 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1586 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1587 IEM_MC_ADVANCE_RIP();
1588 IEM_MC_END();
1589 }
1590 else
1591 {
1592 /*
1593 * Memory, register.
1594 */
1595 IEM_MC_BEGIN(0, 2);
1596 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1598
1599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1601 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1602 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1603
1604 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1605 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1606
1607 IEM_MC_ADVANCE_RIP();
1608 IEM_MC_END();
1609 }
1610 return VINF_SUCCESS;
1611}
1612
1613/* Opcode 0xf3 0x0f 0x29 - invalid */
1614/* Opcode 0xf2 0x0f 0x29 - invalid */
1615
1616
1617/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
1618FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
1619/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
1620FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
1621/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
1622FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
1623/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1624FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
1625
1626
1627/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
1628FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
1629{
1630 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1632 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1633 {
1634 /*
1635 * memory, register.
1636 */
1637 IEM_MC_BEGIN(0, 2);
1638 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1640
1641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1643 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1644 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1645
1646 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1647 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1648
1649 IEM_MC_ADVANCE_RIP();
1650 IEM_MC_END();
1651 }
1652 /* The register, register encoding is invalid. */
1653 else
1654 return IEMOP_RAISE_INVALID_OPCODE();
1655 return VINF_SUCCESS;
1656}
1657
1658/** Opcode 0x66 0x0f 0x2b - vmovntpd Mpd, Vpd */
1659FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
1660{
1661 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1663 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1664 {
1665 /*
1666 * memory, register.
1667 */
1668 IEM_MC_BEGIN(0, 2);
1669 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1671
1672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1674 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1675 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1676
1677 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1678 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1679
1680 IEM_MC_ADVANCE_RIP();
1681 IEM_MC_END();
1682 }
1683 /* The register, register encoding is invalid. */
1684 else
1685 return IEMOP_RAISE_INVALID_OPCODE();
1686 return VINF_SUCCESS;
1687}
1688/* Opcode 0xf3 0x0f 0x2b - invalid */
1689/* Opcode 0xf2 0x0f 0x2b - invalid */
1690
1691
1692/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
1693FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
1694/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
1695FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
1696/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
1697FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
1698/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
1699FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
1700
1701/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
1702FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
1703/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
1704FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
1705/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
1706FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
1707/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
1708FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
1709
1710/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
1711FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
1712/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
1713FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
1714/* Opcode 0xf3 0x0f 0x2e - invalid */
1715/* Opcode 0xf2 0x0f 0x2e - invalid */
1716
1717/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
1718FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
1719/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
1720FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
1721/* Opcode 0xf3 0x0f 0x2f - invalid */
1722/* Opcode 0xf2 0x0f 0x2f - invalid */
1723
1724/** Opcode 0x0f 0x30. */
1725FNIEMOP_DEF(iemOp_wrmsr)
1726{
1727 IEMOP_MNEMONIC(wrmsr, "wrmsr");
1728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1729 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1730}
1731
1732
1733/** Opcode 0x0f 0x31. */
1734FNIEMOP_DEF(iemOp_rdtsc)
1735{
1736 IEMOP_MNEMONIC(rdtsc, "rdtsc");
1737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1738 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1739}
1740
1741
1742/** Opcode 0x0f 0x33. */
1743FNIEMOP_DEF(iemOp_rdmsr)
1744{
1745 IEMOP_MNEMONIC(rdmsr, "rdmsr");
1746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1747 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1748}
1749
1750
1751/** Opcode 0x0f 0x34. */
1752FNIEMOP_STUB(iemOp_rdpmc);
1753/** Opcode 0x0f 0x34. */
1754FNIEMOP_STUB(iemOp_sysenter);
1755/** Opcode 0x0f 0x35. */
1756FNIEMOP_STUB(iemOp_sysexit);
1757/** Opcode 0x0f 0x37. */
1758FNIEMOP_STUB(iemOp_getsec);
1759/** Opcode 0x0f 0x38. */
1760FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1761/** Opcode 0x0f 0x3a. */
1762FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1763
1764
1765/**
1766 * Implements a conditional move.
1767 *
1768 * Wish there was an obvious way to do this where we could share and reduce
1769 * code bloat.
1770 *
1771 * @param a_Cnd The conditional "microcode" operation.
1772 */
1773#define CMOV_X(a_Cnd) \
1774 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1775 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1776 { \
1777 switch (pVCpu->iem.s.enmEffOpSize) \
1778 { \
1779 case IEMMODE_16BIT: \
1780 IEM_MC_BEGIN(0, 1); \
1781 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1782 a_Cnd { \
1783 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1784 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
1785 } IEM_MC_ENDIF(); \
1786 IEM_MC_ADVANCE_RIP(); \
1787 IEM_MC_END(); \
1788 return VINF_SUCCESS; \
1789 \
1790 case IEMMODE_32BIT: \
1791 IEM_MC_BEGIN(0, 1); \
1792 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1793 a_Cnd { \
1794 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1795 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
1796 } IEM_MC_ELSE() { \
1797 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
1798 } IEM_MC_ENDIF(); \
1799 IEM_MC_ADVANCE_RIP(); \
1800 IEM_MC_END(); \
1801 return VINF_SUCCESS; \
1802 \
1803 case IEMMODE_64BIT: \
1804 IEM_MC_BEGIN(0, 1); \
1805 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1806 a_Cnd { \
1807 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1808 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
1809 } IEM_MC_ENDIF(); \
1810 IEM_MC_ADVANCE_RIP(); \
1811 IEM_MC_END(); \
1812 return VINF_SUCCESS; \
1813 \
1814 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1815 } \
1816 } \
1817 else \
1818 { \
1819 switch (pVCpu->iem.s.enmEffOpSize) \
1820 { \
1821 case IEMMODE_16BIT: \
1822 IEM_MC_BEGIN(0, 2); \
1823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1824 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1826 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1827 a_Cnd { \
1828 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
1829 } IEM_MC_ENDIF(); \
1830 IEM_MC_ADVANCE_RIP(); \
1831 IEM_MC_END(); \
1832 return VINF_SUCCESS; \
1833 \
1834 case IEMMODE_32BIT: \
1835 IEM_MC_BEGIN(0, 2); \
1836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1837 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1839 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1840 a_Cnd { \
1841 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
1842 } IEM_MC_ELSE() { \
1843 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
1844 } IEM_MC_ENDIF(); \
1845 IEM_MC_ADVANCE_RIP(); \
1846 IEM_MC_END(); \
1847 return VINF_SUCCESS; \
1848 \
1849 case IEMMODE_64BIT: \
1850 IEM_MC_BEGIN(0, 2); \
1851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1852 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1854 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1855 a_Cnd { \
1856 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
1857 } IEM_MC_ENDIF(); \
1858 IEM_MC_ADVANCE_RIP(); \
1859 IEM_MC_END(); \
1860 return VINF_SUCCESS; \
1861 \
1862 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1863 } \
1864 } do {} while (0)
1865
1866
1867
1868/** Opcode 0x0f 0x40. */
1869FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1870{
1871 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
1872 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1873}
1874
1875
1876/** Opcode 0x0f 0x41. */
1877FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1878{
1879 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
1880 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1881}
1882
1883
1884/** Opcode 0x0f 0x42. */
1885FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1886{
1887 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
1888 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1889}
1890
1891
1892/** Opcode 0x0f 0x43. */
1893FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1894{
1895 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
1896 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1897}
1898
1899
1900/** Opcode 0x0f 0x44. */
1901FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1902{
1903 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
1904 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1905}
1906
1907
1908/** Opcode 0x0f 0x45. */
1909FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1910{
1911 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
1912 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1913}
1914
1915
1916/** Opcode 0x0f 0x46. */
1917FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1918{
1919 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
1920 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1921}
1922
1923
1924/** Opcode 0x0f 0x47. */
1925FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1926{
1927 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
1928 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1929}
1930
1931
1932/** Opcode 0x0f 0x48. */
1933FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1934{
1935 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
1936 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1937}
1938
1939
1940/** Opcode 0x0f 0x49. */
1941FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1942{
1943 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
1944 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1945}
1946
1947
1948/** Opcode 0x0f 0x4a. */
1949FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1950{
1951 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
1952 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1953}
1954
1955
1956/** Opcode 0x0f 0x4b. */
1957FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1958{
1959 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
1960 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1961}
1962
1963
1964/** Opcode 0x0f 0x4c. */
1965FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1966{
1967 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
1968 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1969}
1970
1971
1972/** Opcode 0x0f 0x4d. */
1973FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1974{
1975 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
1976 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1977}
1978
1979
1980/** Opcode 0x0f 0x4e. */
1981FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1982{
1983 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
1984 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1985}
1986
1987
1988/** Opcode 0x0f 0x4f. */
1989FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1990{
1991 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
1992 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1993}
1994
1995#undef CMOV_X
1996
1997/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
1998FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
1999/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
2000FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
2001/* Opcode 0xf3 0x0f 0x50 - invalid */
2002/* Opcode 0xf2 0x0f 0x50 - invalid */
2003
2004/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
2005FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2006/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
2007FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2008/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
2009FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2010/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2011FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2012
2013/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
2014FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2015/* Opcode 0x66 0x0f 0x52 - invalid */
2016/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
2017FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2018/* Opcode 0xf2 0x0f 0x52 - invalid */
2019
2020/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
2021FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2022/* Opcode 0x66 0x0f 0x53 - invalid */
2023/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
2024FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2025/* Opcode 0xf2 0x0f 0x53 - invalid */
2026
2027/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
2028FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
2029/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
2030FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
2031/* Opcode 0xf3 0x0f 0x54 - invalid */
2032/* Opcode 0xf2 0x0f 0x54 - invalid */
2033
2034/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
2035FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
2036/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
2037FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
2038/* Opcode 0xf3 0x0f 0x55 - invalid */
2039/* Opcode 0xf2 0x0f 0x55 - invalid */
2040
2041/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
2042FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
2043/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
2044FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
2045/* Opcode 0xf3 0x0f 0x56 - invalid */
2046/* Opcode 0xf2 0x0f 0x56 - invalid */
2047
2048/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
2049FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
2050/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
2051FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
2052/* Opcode 0xf3 0x0f 0x57 - invalid */
2053/* Opcode 0xf2 0x0f 0x57 - invalid */
2054
2055/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2056FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2057/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2058FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2059/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2060FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2061/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2062FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2063
2064/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2065FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2066/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2067FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2068/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2069FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2070/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2071FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2072
2073/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2074FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2075/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2076FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2077/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2078FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2079/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2080FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2081
2082/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2083FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2084/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2085FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2086/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2087FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2088/* Opcode 0xf2 0x0f 0x5b - invalid */
2089
2090/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2091FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2092/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2093FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2094/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2095FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2096/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2097FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2098
2099/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2100FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2101/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2102FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2103/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2104FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2105/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2106FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2107
2108/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2109FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2110/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2111FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2112/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2113FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2114/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2115FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2116
2117/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2118FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2119/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2120FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2121/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2122FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2123/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2124FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2125
2126/**
2127 * Common worker for MMX instructions on the forms:
2128 * pxxxx mm1, mm2/mem32
2129 *
2130 * The 2nd operand is the first half of a register, which in the memory case
2131 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2132 * memory accessed for MMX.
2133 *
2134 * Exceptions type 4.
2135 */
2136FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2137{
2138 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2139 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2140 {
2141 /*
2142 * Register, register.
2143 */
2144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2145 IEM_MC_BEGIN(2, 0);
2146 IEM_MC_ARG(uint128_t *, pDst, 0);
2147 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2148 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2149 IEM_MC_PREPARE_SSE_USAGE();
2150 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2151 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2152 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2153 IEM_MC_ADVANCE_RIP();
2154 IEM_MC_END();
2155 }
2156 else
2157 {
2158 /*
2159 * Register, memory.
2160 */
2161 IEM_MC_BEGIN(2, 2);
2162 IEM_MC_ARG(uint128_t *, pDst, 0);
2163 IEM_MC_LOCAL(uint64_t, uSrc);
2164 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2166
2167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2169 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2170 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2171
2172 IEM_MC_PREPARE_SSE_USAGE();
2173 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2174 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2175
2176 IEM_MC_ADVANCE_RIP();
2177 IEM_MC_END();
2178 }
2179 return VINF_SUCCESS;
2180}
2181
2182
2183/**
2184 * Common worker for SSE2 instructions on the forms:
2185 * pxxxx xmm1, xmm2/mem128
2186 *
2187 * The 2nd operand is the first half of a register, which in the memory case
2188 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2189 * memory accessed for MMX.
2190 *
2191 * Exceptions type 4.
2192 */
2193FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2194{
2195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2196 if (!pImpl->pfnU64)
2197 return IEMOP_RAISE_INVALID_OPCODE();
2198 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2199 {
2200 /*
2201 * Register, register.
2202 */
2203 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2204 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2206 IEM_MC_BEGIN(2, 0);
2207 IEM_MC_ARG(uint64_t *, pDst, 0);
2208 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2209 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2210 IEM_MC_PREPARE_FPU_USAGE();
2211 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2212 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2213 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2214 IEM_MC_ADVANCE_RIP();
2215 IEM_MC_END();
2216 }
2217 else
2218 {
2219 /*
2220 * Register, memory.
2221 */
2222 IEM_MC_BEGIN(2, 2);
2223 IEM_MC_ARG(uint64_t *, pDst, 0);
2224 IEM_MC_LOCAL(uint32_t, uSrc);
2225 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2227
2228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2230 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2231 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2232
2233 IEM_MC_PREPARE_FPU_USAGE();
2234 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2235 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2236
2237 IEM_MC_ADVANCE_RIP();
2238 IEM_MC_END();
2239 }
2240 return VINF_SUCCESS;
2241}
2242
2243
2244/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2245FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2246{
2247 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2248 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2249}
2250
2251/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2252FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2253{
2254 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2255 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2256}
2257
2258/* Opcode 0xf3 0x0f 0x60 - invalid */
2259
2260
2261/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2262FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2263{
2264 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2265 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2266}
2267
2268/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2269FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2270{
2271 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2272 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2273}
2274
2275/* Opcode 0xf3 0x0f 0x61 - invalid */
2276
2277
2278/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2279FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2280{
2281 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2282 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2283}
2284
2285/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2286FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2287{
2288 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2289 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2290}
2291
2292/* Opcode 0xf3 0x0f 0x62 - invalid */
2293
2294
2295
2296/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2297FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2298/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2299FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2300/* Opcode 0xf3 0x0f 0x63 - invalid */
2301
2302/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2303FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2304/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2305FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2306/* Opcode 0xf3 0x0f 0x64 - invalid */
2307
2308/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2309FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2310/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2311FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2312/* Opcode 0xf3 0x0f 0x65 - invalid */
2313
2314/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2315FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2316/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2317FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2318/* Opcode 0xf3 0x0f 0x66 - invalid */
2319
2320/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2321FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2322/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2323FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2324/* Opcode 0xf3 0x0f 0x67 - invalid */
2325
2326
2327/**
2328 * Common worker for MMX instructions on the form:
2329 * pxxxx mm1, mm2/mem64
2330 *
2331 * The 2nd operand is the second half of a register, which in the memory case
2332 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2333 * where it may read the full 128 bits or only the upper 64 bits.
2334 *
2335 * Exceptions type 4.
2336 */
2337FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2338{
2339 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2340 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2341 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2342 {
2343 /*
2344 * Register, register.
2345 */
2346 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2347 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2349 IEM_MC_BEGIN(2, 0);
2350 IEM_MC_ARG(uint64_t *, pDst, 0);
2351 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2352 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2353 IEM_MC_PREPARE_FPU_USAGE();
2354 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2355 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2356 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2357 IEM_MC_ADVANCE_RIP();
2358 IEM_MC_END();
2359 }
2360 else
2361 {
2362 /*
2363 * Register, memory.
2364 */
2365 IEM_MC_BEGIN(2, 2);
2366 IEM_MC_ARG(uint64_t *, pDst, 0);
2367 IEM_MC_LOCAL(uint64_t, uSrc);
2368 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2370
2371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2373 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2374 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2375
2376 IEM_MC_PREPARE_FPU_USAGE();
2377 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2378 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2379
2380 IEM_MC_ADVANCE_RIP();
2381 IEM_MC_END();
2382 }
2383 return VINF_SUCCESS;
2384}
2385
2386
2387/**
2388 * Common worker for SSE2 instructions on the form:
2389 * pxxxx xmm1, xmm2/mem128
2390 *
2391 * The 2nd operand is the second half of a register, which in the memory case
2392 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2393 * where it may read the full 128 bits or only the upper 64 bits.
2394 *
2395 * Exceptions type 4.
2396 */
2397FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2398{
2399 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2400 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2401 {
2402 /*
2403 * Register, register.
2404 */
2405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2406 IEM_MC_BEGIN(2, 0);
2407 IEM_MC_ARG(uint128_t *, pDst, 0);
2408 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2409 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2410 IEM_MC_PREPARE_SSE_USAGE();
2411 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2412 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2413 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2414 IEM_MC_ADVANCE_RIP();
2415 IEM_MC_END();
2416 }
2417 else
2418 {
2419 /*
2420 * Register, memory.
2421 */
2422 IEM_MC_BEGIN(2, 2);
2423 IEM_MC_ARG(uint128_t *, pDst, 0);
2424 IEM_MC_LOCAL(uint128_t, uSrc);
2425 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2427
2428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2430 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2431 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2432
2433 IEM_MC_PREPARE_SSE_USAGE();
2434 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2435 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2436
2437 IEM_MC_ADVANCE_RIP();
2438 IEM_MC_END();
2439 }
2440 return VINF_SUCCESS;
2441}
2442
2443
2444/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2445FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2446{
2447 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2448 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2449}
2450
2451/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2452FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2453{
2454 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2455 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2456}
2457/* Opcode 0xf3 0x0f 0x68 - invalid */
2458
2459
2460/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2461FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2462{
2463 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2464 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2465}
2466
2467/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2468FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2469{
2470 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2471 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2472
2473}
2474/* Opcode 0xf3 0x0f 0x69 - invalid */
2475
2476
2477/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2478FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2479{
2480 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2481 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2482}
2483
2484/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2485FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2486{
2487 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2488 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2489}
2490/* Opcode 0xf3 0x0f 0x6a - invalid */
2491
2492
2493/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2494FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2495/** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */
2496FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2497/* Opcode 0xf3 0x0f 0x6b - invalid */
2498
2499
2500/* Opcode 0x0f 0x6c - invalid */
2501
2502/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2503FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2504{
2505 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
2506 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2507}
2508
2509/* Opcode 0xf3 0x0f 0x6c - invalid */
2510/* Opcode 0xf2 0x0f 0x6c - invalid */
2511
2512
2513/* Opcode 0x0f 0x6d - invalid */
2514
2515/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
2516FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
2517{
2518 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2519 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2520}
2521
2522/* Opcode 0xf3 0x0f 0x6d - invalid */
2523
2524
2525/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
2526FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
2527{
2528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2529 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2530 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2531 else
2532 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2533 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2534 {
2535 /* MMX, greg */
2536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2537 IEM_MC_BEGIN(0, 1);
2538 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2539 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2540 IEM_MC_LOCAL(uint64_t, u64Tmp);
2541 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2542 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2543 else
2544 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2545 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2546 IEM_MC_ADVANCE_RIP();
2547 IEM_MC_END();
2548 }
2549 else
2550 {
2551 /* MMX, [mem] */
2552 IEM_MC_BEGIN(0, 2);
2553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2554 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2557 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2558 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2559 {
2560 IEM_MC_LOCAL(uint64_t, u64Tmp);
2561 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2562 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2563 }
2564 else
2565 {
2566 IEM_MC_LOCAL(uint32_t, u32Tmp);
2567 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2568 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2569 }
2570 IEM_MC_ADVANCE_RIP();
2571 IEM_MC_END();
2572 }
2573 return VINF_SUCCESS;
2574}
2575
2576/** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */
2577FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
2578{
2579 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2580 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2581 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
2582 else
2583 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
2584 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2585 {
2586 /* XMM, greg*/
2587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2588 IEM_MC_BEGIN(0, 1);
2589 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2590 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2591 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2592 {
2593 IEM_MC_LOCAL(uint64_t, u64Tmp);
2594 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2595 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2596 }
2597 else
2598 {
2599 IEM_MC_LOCAL(uint32_t, u32Tmp);
2600 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2601 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2602 }
2603 IEM_MC_ADVANCE_RIP();
2604 IEM_MC_END();
2605 }
2606 else
2607 {
2608 /* XMM, [mem] */
2609 IEM_MC_BEGIN(0, 2);
2610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2611 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2614 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2615 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2616 {
2617 IEM_MC_LOCAL(uint64_t, u64Tmp);
2618 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2619 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2620 }
2621 else
2622 {
2623 IEM_MC_LOCAL(uint32_t, u32Tmp);
2624 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2625 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2626 }
2627 IEM_MC_ADVANCE_RIP();
2628 IEM_MC_END();
2629 }
2630 return VINF_SUCCESS;
2631}
2632
2633/* Opcode 0xf3 0x0f 0x6e - invalid */
2634
2635
2636/** Opcode 0x0f 0x6f - movq Pq, Qq */
2637FNIEMOP_DEF(iemOp_movq_Pq_Qq)
2638{
2639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2640 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2641 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2642 {
2643 /*
2644 * Register, register.
2645 */
2646 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2647 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2649 IEM_MC_BEGIN(0, 1);
2650 IEM_MC_LOCAL(uint64_t, u64Tmp);
2651 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2652 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2653 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2654 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2655 IEM_MC_ADVANCE_RIP();
2656 IEM_MC_END();
2657 }
2658 else
2659 {
2660 /*
2661 * Register, memory.
2662 */
2663 IEM_MC_BEGIN(0, 2);
2664 IEM_MC_LOCAL(uint64_t, u64Tmp);
2665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2666
2667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2669 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2670 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2671 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2672 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2673
2674 IEM_MC_ADVANCE_RIP();
2675 IEM_MC_END();
2676 }
2677 return VINF_SUCCESS;
2678}
2679
2680/** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */
2681FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
2682{
2683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2684 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
2685 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2686 {
2687 /*
2688 * Register, register.
2689 */
2690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2691 IEM_MC_BEGIN(0, 0);
2692 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2693 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2694 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2695 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2696 IEM_MC_ADVANCE_RIP();
2697 IEM_MC_END();
2698 }
2699 else
2700 {
2701 /*
2702 * Register, memory.
2703 */
2704 IEM_MC_BEGIN(0, 2);
2705 IEM_MC_LOCAL(uint128_t, u128Tmp);
2706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2707
2708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2710 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2711 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2712 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2713 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2714
2715 IEM_MC_ADVANCE_RIP();
2716 IEM_MC_END();
2717 }
2718 return VINF_SUCCESS;
2719}
2720
2721/** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */
2722FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
2723{
2724 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2725 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
2726 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2727 {
2728 /*
2729 * Register, register.
2730 */
2731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2732 IEM_MC_BEGIN(0, 0);
2733 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2734 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2735 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2736 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2737 IEM_MC_ADVANCE_RIP();
2738 IEM_MC_END();
2739 }
2740 else
2741 {
2742 /*
2743 * Register, memory.
2744 */
2745 IEM_MC_BEGIN(0, 2);
2746 IEM_MC_LOCAL(uint128_t, u128Tmp);
2747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2748
2749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2751 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2752 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2753 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2754 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2755
2756 IEM_MC_ADVANCE_RIP();
2757 IEM_MC_END();
2758 }
2759 return VINF_SUCCESS;
2760}
2761
2762
2763/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
2764FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
2765{
2766 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
2767 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2768 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2769 {
2770 /*
2771 * Register, register.
2772 */
2773 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2775
2776 IEM_MC_BEGIN(3, 0);
2777 IEM_MC_ARG(uint64_t *, pDst, 0);
2778 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2779 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2780 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2781 IEM_MC_PREPARE_FPU_USAGE();
2782 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2783 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2784 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2785 IEM_MC_ADVANCE_RIP();
2786 IEM_MC_END();
2787 }
2788 else
2789 {
2790 /*
2791 * Register, memory.
2792 */
2793 IEM_MC_BEGIN(3, 2);
2794 IEM_MC_ARG(uint64_t *, pDst, 0);
2795 IEM_MC_LOCAL(uint64_t, uSrc);
2796 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2798
2799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2800 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2801 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2803 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2804
2805 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2806 IEM_MC_PREPARE_FPU_USAGE();
2807 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2808 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2809
2810 IEM_MC_ADVANCE_RIP();
2811 IEM_MC_END();
2812 }
2813 return VINF_SUCCESS;
2814}
2815
2816/** Opcode 0x66 0x0f 0x70 - vpshufd Vx, Wx, Ib */
2817FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
2818{
2819 IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
2820 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2821 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2822 {
2823 /*
2824 * Register, register.
2825 */
2826 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2828
2829 IEM_MC_BEGIN(3, 0);
2830 IEM_MC_ARG(uint128_t *, pDst, 0);
2831 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2832 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2833 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2834 IEM_MC_PREPARE_SSE_USAGE();
2835 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2836 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2837 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2838 IEM_MC_ADVANCE_RIP();
2839 IEM_MC_END();
2840 }
2841 else
2842 {
2843 /*
2844 * Register, memory.
2845 */
2846 IEM_MC_BEGIN(3, 2);
2847 IEM_MC_ARG(uint128_t *, pDst, 0);
2848 IEM_MC_LOCAL(uint128_t, uSrc);
2849 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2851
2852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2853 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2854 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2856 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2857
2858 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2859 IEM_MC_PREPARE_SSE_USAGE();
2860 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2861 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2862
2863 IEM_MC_ADVANCE_RIP();
2864 IEM_MC_END();
2865 }
2866 return VINF_SUCCESS;
2867}
2868
2869/** Opcode 0xf3 0x0f 0x70 - vpshufhw Vx, Wx, Ib */
2870FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
2871{
2872 IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
2873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2874 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2875 {
2876 /*
2877 * Register, register.
2878 */
2879 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2881
2882 IEM_MC_BEGIN(3, 0);
2883 IEM_MC_ARG(uint128_t *, pDst, 0);
2884 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2885 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2886 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2887 IEM_MC_PREPARE_SSE_USAGE();
2888 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2889 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2890 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2891 IEM_MC_ADVANCE_RIP();
2892 IEM_MC_END();
2893 }
2894 else
2895 {
2896 /*
2897 * Register, memory.
2898 */
2899 IEM_MC_BEGIN(3, 2);
2900 IEM_MC_ARG(uint128_t *, pDst, 0);
2901 IEM_MC_LOCAL(uint128_t, uSrc);
2902 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2904
2905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2906 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2907 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2909 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2910
2911 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2912 IEM_MC_PREPARE_SSE_USAGE();
2913 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2914 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2915
2916 IEM_MC_ADVANCE_RIP();
2917 IEM_MC_END();
2918 }
2919 return VINF_SUCCESS;
2920}
2921
2922/** Opcode 0xf2 0x0f 0x70 - vpshuflw Vx, Wx, Ib */
2923FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
2924{
2925 IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
2926 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2927 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2928 {
2929 /*
2930 * Register, register.
2931 */
2932 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2934
2935 IEM_MC_BEGIN(3, 0);
2936 IEM_MC_ARG(uint128_t *, pDst, 0);
2937 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2938 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2939 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2940 IEM_MC_PREPARE_SSE_USAGE();
2941 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2942 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2943 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2944 IEM_MC_ADVANCE_RIP();
2945 IEM_MC_END();
2946 }
2947 else
2948 {
2949 /*
2950 * Register, memory.
2951 */
2952 IEM_MC_BEGIN(3, 2);
2953 IEM_MC_ARG(uint128_t *, pDst, 0);
2954 IEM_MC_LOCAL(uint128_t, uSrc);
2955 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2957
2958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2959 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2960 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2962 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2963
2964 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2965 IEM_MC_PREPARE_SSE_USAGE();
2966 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2967 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2968
2969 IEM_MC_ADVANCE_RIP();
2970 IEM_MC_END();
2971 }
2972 return VINF_SUCCESS;
2973}
2974
2975
2976/** Opcode 0x0f 0x71 11/2. */
2977FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2978
2979/** Opcode 0x66 0x0f 0x71 11/2. */
2980FNIEMOP_STUB_1(iemOp_Grp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
2981
2982/** Opcode 0x0f 0x71 11/4. */
2983FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2984
2985/** Opcode 0x66 0x0f 0x71 11/4. */
2986FNIEMOP_STUB_1(iemOp_Grp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
2987
2988/** Opcode 0x0f 0x71 11/6. */
2989FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2990
2991/** Opcode 0x66 0x0f 0x71 11/6. */
2992FNIEMOP_STUB_1(iemOp_Grp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
2993
2994
2995/**
2996 * Group 12 jump table for register variant.
2997 */
2998IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
2999{
3000 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3001 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3002 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3003 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3004 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3005 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3006 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3007 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3008};
3009AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3010
3011
3012/** Opcode 0x0f 0x71. */
3013FNIEMOP_DEF(iemOp_Grp12)
3014{
3015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3016 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3017 /* register, register */
3018 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3019 + pVCpu->iem.s.idxPrefix], bRm);
3020 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3021}
3022
3023
3024/** Opcode 0x0f 0x72 11/2. */
3025FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3026
3027/** Opcode 0x66 0x0f 0x72 11/2. */
3028FNIEMOP_STUB_1(iemOp_Grp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
3029
3030/** Opcode 0x0f 0x72 11/4. */
3031FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3032
3033/** Opcode 0x66 0x0f 0x72 11/4. */
3034FNIEMOP_STUB_1(iemOp_Grp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
3035
3036/** Opcode 0x0f 0x72 11/6. */
3037FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3038
3039/** Opcode 0x66 0x0f 0x72 11/6. */
3040FNIEMOP_STUB_1(iemOp_Grp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
3041
3042
3043/**
3044 * Group 13 jump table for register variant.
3045 */
3046IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3047{
3048 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3049 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3050 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3051 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3052 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3053 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3054 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3055 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3056};
3057AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3058
3059/** Opcode 0x0f 0x72. */
3060FNIEMOP_DEF(iemOp_Grp13)
3061{
3062 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3063 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3064 /* register, register */
3065 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3066 + pVCpu->iem.s.idxPrefix], bRm);
3067 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3068}
3069
3070
3071/** Opcode 0x0f 0x73 11/2. */
3072FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3073
3074/** Opcode 0x66 0x0f 0x73 11/2. */
3075FNIEMOP_STUB_1(iemOp_Grp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
3076
3077/** Opcode 0x66 0x0f 0x73 11/3. */
3078FNIEMOP_STUB_1(iemOp_Grp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3079
3080/** Opcode 0x0f 0x73 11/6. */
3081FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3082
3083/** Opcode 0x66 0x0f 0x73 11/6. */
3084FNIEMOP_STUB_1(iemOp_Grp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
3085
3086/** Opcode 0x66 0x0f 0x73 11/7. */
3087FNIEMOP_STUB_1(iemOp_Grp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3088
3089/**
3090 * Group 14 jump table for register variant.
3091 */
3092IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
3093{
3094 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3095 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3096 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3097 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3098 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3099 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3100 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3101 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3102};
3103AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
3104
3105
3106/** Opcode 0x0f 0x73. */
3107FNIEMOP_DEF(iemOp_Grp14)
3108{
3109 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3110 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3111 /* register, register */
3112 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3113 + pVCpu->iem.s.idxPrefix], bRm);
3114 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3115}
3116
3117
3118/**
3119 * Common worker for MMX instructions on the form:
3120 * pxxx mm1, mm2/mem64
3121 */
3122FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3123{
3124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3125 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3126 {
3127 /*
3128 * Register, register.
3129 */
3130 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3131 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3133 IEM_MC_BEGIN(2, 0);
3134 IEM_MC_ARG(uint64_t *, pDst, 0);
3135 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3136 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3137 IEM_MC_PREPARE_FPU_USAGE();
3138 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3139 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3140 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3141 IEM_MC_ADVANCE_RIP();
3142 IEM_MC_END();
3143 }
3144 else
3145 {
3146 /*
3147 * Register, memory.
3148 */
3149 IEM_MC_BEGIN(2, 2);
3150 IEM_MC_ARG(uint64_t *, pDst, 0);
3151 IEM_MC_LOCAL(uint64_t, uSrc);
3152 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3154
3155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3157 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3158 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3159
3160 IEM_MC_PREPARE_FPU_USAGE();
3161 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3162 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3163
3164 IEM_MC_ADVANCE_RIP();
3165 IEM_MC_END();
3166 }
3167 return VINF_SUCCESS;
3168}
3169
3170
3171/**
3172 * Common worker for SSE2 instructions on the forms:
3173 * pxxx xmm1, xmm2/mem128
3174 *
3175 * Proper alignment of the 128-bit operand is enforced.
3176 * Exceptions type 4. SSE2 cpuid checks.
3177 */
3178FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3179{
3180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3181 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3182 {
3183 /*
3184 * Register, register.
3185 */
3186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3187 IEM_MC_BEGIN(2, 0);
3188 IEM_MC_ARG(uint128_t *, pDst, 0);
3189 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3190 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3191 IEM_MC_PREPARE_SSE_USAGE();
3192 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3193 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3194 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3195 IEM_MC_ADVANCE_RIP();
3196 IEM_MC_END();
3197 }
3198 else
3199 {
3200 /*
3201 * Register, memory.
3202 */
3203 IEM_MC_BEGIN(2, 2);
3204 IEM_MC_ARG(uint128_t *, pDst, 0);
3205 IEM_MC_LOCAL(uint128_t, uSrc);
3206 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3208
3209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3211 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3212 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3213
3214 IEM_MC_PREPARE_SSE_USAGE();
3215 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3216 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3217
3218 IEM_MC_ADVANCE_RIP();
3219 IEM_MC_END();
3220 }
3221 return VINF_SUCCESS;
3222}
3223
3224
3225/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3226FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3227{
3228 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3229 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3230}
3231
3232/** Opcode 0x66 0x0f 0x74 - vpcmpeqb Vx, Hx, Wx */
3233FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3234{
3235 IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb");
3236 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3237}
3238
3239/* Opcode 0xf3 0x0f 0x74 - invalid */
3240/* Opcode 0xf2 0x0f 0x74 - invalid */
3241
3242
3243/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3244FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3245{
3246 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3247 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3248}
3249
3250/** Opcode 0x66 0x0f 0x75 - vpcmpeqw Vx, Hx, Wx */
3251FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3252{
3253 IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw");
3254 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3255}
3256
3257/* Opcode 0xf3 0x0f 0x75 - invalid */
3258/* Opcode 0xf2 0x0f 0x75 - invalid */
3259
3260
3261/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3262FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3263{
3264 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3265 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3266}
3267
3268/** Opcode 0x66 0x0f 0x76 - vpcmpeqd Vx, Hx, Wx */
3269FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3270{
3271 IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd");
3272 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3273}
3274
3275/* Opcode 0xf3 0x0f 0x76 - invalid */
3276/* Opcode 0xf2 0x0f 0x76 - invalid */
3277
3278
3279/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3280FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3281/* Opcode 0x66 0x0f 0x77 - invalid */
3282/* Opcode 0xf3 0x0f 0x77 - invalid */
3283/* Opcode 0xf2 0x0f 0x77 - invalid */
3284
3285/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3286FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3287/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3288FNIEMOP_STUB(iemOp_AmdGrp17);
3289/* Opcode 0xf3 0x0f 0x78 - invalid */
3290/* Opcode 0xf2 0x0f 0x78 - invalid */
3291
3292/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3293FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3294/* Opcode 0x66 0x0f 0x79 - invalid */
3295/* Opcode 0xf3 0x0f 0x79 - invalid */
3296/* Opcode 0xf2 0x0f 0x79 - invalid */
3297
3298/* Opcode 0x0f 0x7a - invalid */
3299/* Opcode 0x66 0x0f 0x7a - invalid */
3300/* Opcode 0xf3 0x0f 0x7a - invalid */
3301/* Opcode 0xf2 0x0f 0x7a - invalid */
3302
3303/* Opcode 0x0f 0x7b - invalid */
3304/* Opcode 0x66 0x0f 0x7b - invalid */
3305/* Opcode 0xf3 0x0f 0x7b - invalid */
3306/* Opcode 0xf2 0x0f 0x7b - invalid */
3307
3308/* Opcode 0x0f 0x7c - invalid */
3309/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3310FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3311/* Opcode 0xf3 0x0f 0x7c - invalid */
3312/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3313FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3314
3315/* Opcode 0x0f 0x7d - invalid */
3316/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3317FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3318/* Opcode 0xf3 0x0f 0x7d - invalid */
3319/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3320FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3321
3322
3323/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3324FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3325{
3326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3327 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3328 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3329 else
3330 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3331 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3332 {
3333 /* greg, MMX */
3334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3335 IEM_MC_BEGIN(0, 1);
3336 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3337 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3338 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3339 {
3340 IEM_MC_LOCAL(uint64_t, u64Tmp);
3341 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3342 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3343 }
3344 else
3345 {
3346 IEM_MC_LOCAL(uint32_t, u32Tmp);
3347 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3348 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3349 }
3350 IEM_MC_ADVANCE_RIP();
3351 IEM_MC_END();
3352 }
3353 else
3354 {
3355 /* [mem], MMX */
3356 IEM_MC_BEGIN(0, 2);
3357 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3358 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3361 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3362 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3363 {
3364 IEM_MC_LOCAL(uint64_t, u64Tmp);
3365 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3366 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3367 }
3368 else
3369 {
3370 IEM_MC_LOCAL(uint32_t, u32Tmp);
3371 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3372 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3373 }
3374 IEM_MC_ADVANCE_RIP();
3375 IEM_MC_END();
3376 }
3377 return VINF_SUCCESS;
3378}
3379
3380/** Opcode 0x66 0x0f 0x7e - vmovd_q Ey, Vy */
3381FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3382{
3383 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3384 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3385 IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq");
3386 else
3387 IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd");
3388 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3389 {
3390 /* greg, XMM */
3391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3392 IEM_MC_BEGIN(0, 1);
3393 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3394 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3395 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3396 {
3397 IEM_MC_LOCAL(uint64_t, u64Tmp);
3398 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3399 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3400 }
3401 else
3402 {
3403 IEM_MC_LOCAL(uint32_t, u32Tmp);
3404 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3405 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3406 }
3407 IEM_MC_ADVANCE_RIP();
3408 IEM_MC_END();
3409 }
3410 else
3411 {
3412 /* [mem], XMM */
3413 IEM_MC_BEGIN(0, 2);
3414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3415 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3418 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3419 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3420 {
3421 IEM_MC_LOCAL(uint64_t, u64Tmp);
3422 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3423 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3424 }
3425 else
3426 {
3427 IEM_MC_LOCAL(uint32_t, u32Tmp);
3428 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3429 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3430 }
3431 IEM_MC_ADVANCE_RIP();
3432 IEM_MC_END();
3433 }
3434 return VINF_SUCCESS;
3435}
3436
3437/** Opcode 0xf3 0x0f 0x7e - vmovq Vq, Wq */
3438FNIEMOP_STUB(iemOp_vmovq_Vq_Wq);
3439/* Opcode 0xf2 0x0f 0x7e - invalid */
3440
3441
3442/** Opcode 0x0f 0x7f - movq Qq, Pq */
3443FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3444{
3445 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3447 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3448 {
3449 /*
3450 * Register, register.
3451 */
3452 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3453 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3455 IEM_MC_BEGIN(0, 1);
3456 IEM_MC_LOCAL(uint64_t, u64Tmp);
3457 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3458 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3459 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3460 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3461 IEM_MC_ADVANCE_RIP();
3462 IEM_MC_END();
3463 }
3464 else
3465 {
3466 /*
3467 * Register, memory.
3468 */
3469 IEM_MC_BEGIN(0, 2);
3470 IEM_MC_LOCAL(uint64_t, u64Tmp);
3471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3472
3473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3475 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3476 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3477
3478 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3479 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3480
3481 IEM_MC_ADVANCE_RIP();
3482 IEM_MC_END();
3483 }
3484 return VINF_SUCCESS;
3485}
3486
3487/** Opcode 0x66 0x0f 0x7f - vmovdqa Wx,Vx */
3488FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3489{
3490 IEMOP_MNEMONIC(vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx");
3491 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3492 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3493 {
3494 /*
3495 * Register, register.
3496 */
3497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3498 IEM_MC_BEGIN(0, 0);
3499 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3500 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3501 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3502 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3503 IEM_MC_ADVANCE_RIP();
3504 IEM_MC_END();
3505 }
3506 else
3507 {
3508 /*
3509 * Register, memory.
3510 */
3511 IEM_MC_BEGIN(0, 2);
3512 IEM_MC_LOCAL(uint128_t, u128Tmp);
3513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3514
3515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3517 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3518 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3519
3520 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3521 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3522
3523 IEM_MC_ADVANCE_RIP();
3524 IEM_MC_END();
3525 }
3526 return VINF_SUCCESS;
3527}
3528
3529/** Opcode 0xf3 0x0f 0x7f - vmovdqu Wx,Vx */
3530FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
3531{
3532 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3533 IEMOP_MNEMONIC(vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx");
3534 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3535 {
3536 /*
3537 * Register, register.
3538 */
3539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3540 IEM_MC_BEGIN(0, 0);
3541 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3542 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3543 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3544 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3545 IEM_MC_ADVANCE_RIP();
3546 IEM_MC_END();
3547 }
3548 else
3549 {
3550 /*
3551 * Register, memory.
3552 */
3553 IEM_MC_BEGIN(0, 2);
3554 IEM_MC_LOCAL(uint128_t, u128Tmp);
3555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3556
3557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3559 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3560 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3561
3562 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3563 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3564
3565 IEM_MC_ADVANCE_RIP();
3566 IEM_MC_END();
3567 }
3568 return VINF_SUCCESS;
3569}
3570
3571/* Opcode 0xf2 0x0f 0x7f - invalid */
3572
3573
3574
3575/** Opcode 0x0f 0x80. */
3576FNIEMOP_DEF(iemOp_jo_Jv)
3577{
3578 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3579 IEMOP_HLP_MIN_386();
3580 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3581 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3582 {
3583 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3585
3586 IEM_MC_BEGIN(0, 0);
3587 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3588 IEM_MC_REL_JMP_S16(i16Imm);
3589 } IEM_MC_ELSE() {
3590 IEM_MC_ADVANCE_RIP();
3591 } IEM_MC_ENDIF();
3592 IEM_MC_END();
3593 }
3594 else
3595 {
3596 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3598
3599 IEM_MC_BEGIN(0, 0);
3600 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3601 IEM_MC_REL_JMP_S32(i32Imm);
3602 } IEM_MC_ELSE() {
3603 IEM_MC_ADVANCE_RIP();
3604 } IEM_MC_ENDIF();
3605 IEM_MC_END();
3606 }
3607 return VINF_SUCCESS;
3608}
3609
3610
3611/** Opcode 0x0f 0x81. */
3612FNIEMOP_DEF(iemOp_jno_Jv)
3613{
3614 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3615 IEMOP_HLP_MIN_386();
3616 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3617 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3618 {
3619 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3621
3622 IEM_MC_BEGIN(0, 0);
3623 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3624 IEM_MC_ADVANCE_RIP();
3625 } IEM_MC_ELSE() {
3626 IEM_MC_REL_JMP_S16(i16Imm);
3627 } IEM_MC_ENDIF();
3628 IEM_MC_END();
3629 }
3630 else
3631 {
3632 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3634
3635 IEM_MC_BEGIN(0, 0);
3636 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3637 IEM_MC_ADVANCE_RIP();
3638 } IEM_MC_ELSE() {
3639 IEM_MC_REL_JMP_S32(i32Imm);
3640 } IEM_MC_ENDIF();
3641 IEM_MC_END();
3642 }
3643 return VINF_SUCCESS;
3644}
3645
3646
3647/** Opcode 0x0f 0x82. */
3648FNIEMOP_DEF(iemOp_jc_Jv)
3649{
3650 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3651 IEMOP_HLP_MIN_386();
3652 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3653 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3654 {
3655 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3657
3658 IEM_MC_BEGIN(0, 0);
3659 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3660 IEM_MC_REL_JMP_S16(i16Imm);
3661 } IEM_MC_ELSE() {
3662 IEM_MC_ADVANCE_RIP();
3663 } IEM_MC_ENDIF();
3664 IEM_MC_END();
3665 }
3666 else
3667 {
3668 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3670
3671 IEM_MC_BEGIN(0, 0);
3672 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3673 IEM_MC_REL_JMP_S32(i32Imm);
3674 } IEM_MC_ELSE() {
3675 IEM_MC_ADVANCE_RIP();
3676 } IEM_MC_ENDIF();
3677 IEM_MC_END();
3678 }
3679 return VINF_SUCCESS;
3680}
3681
3682
3683/** Opcode 0x0f 0x83. */
3684FNIEMOP_DEF(iemOp_jnc_Jv)
3685{
3686 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3687 IEMOP_HLP_MIN_386();
3688 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3689 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3690 {
3691 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3693
3694 IEM_MC_BEGIN(0, 0);
3695 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3696 IEM_MC_ADVANCE_RIP();
3697 } IEM_MC_ELSE() {
3698 IEM_MC_REL_JMP_S16(i16Imm);
3699 } IEM_MC_ENDIF();
3700 IEM_MC_END();
3701 }
3702 else
3703 {
3704 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3706
3707 IEM_MC_BEGIN(0, 0);
3708 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3709 IEM_MC_ADVANCE_RIP();
3710 } IEM_MC_ELSE() {
3711 IEM_MC_REL_JMP_S32(i32Imm);
3712 } IEM_MC_ENDIF();
3713 IEM_MC_END();
3714 }
3715 return VINF_SUCCESS;
3716}
3717
3718
3719/** Opcode 0x0f 0x84. */
3720FNIEMOP_DEF(iemOp_je_Jv)
3721{
3722 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
3723 IEMOP_HLP_MIN_386();
3724 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3725 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3726 {
3727 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3729
3730 IEM_MC_BEGIN(0, 0);
3731 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3732 IEM_MC_REL_JMP_S16(i16Imm);
3733 } IEM_MC_ELSE() {
3734 IEM_MC_ADVANCE_RIP();
3735 } IEM_MC_ENDIF();
3736 IEM_MC_END();
3737 }
3738 else
3739 {
3740 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3742
3743 IEM_MC_BEGIN(0, 0);
3744 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3745 IEM_MC_REL_JMP_S32(i32Imm);
3746 } IEM_MC_ELSE() {
3747 IEM_MC_ADVANCE_RIP();
3748 } IEM_MC_ENDIF();
3749 IEM_MC_END();
3750 }
3751 return VINF_SUCCESS;
3752}
3753
3754
3755/** Opcode 0x0f 0x85. */
3756FNIEMOP_DEF(iemOp_jne_Jv)
3757{
3758 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
3759 IEMOP_HLP_MIN_386();
3760 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3761 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3762 {
3763 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3765
3766 IEM_MC_BEGIN(0, 0);
3767 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3768 IEM_MC_ADVANCE_RIP();
3769 } IEM_MC_ELSE() {
3770 IEM_MC_REL_JMP_S16(i16Imm);
3771 } IEM_MC_ENDIF();
3772 IEM_MC_END();
3773 }
3774 else
3775 {
3776 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3778
3779 IEM_MC_BEGIN(0, 0);
3780 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3781 IEM_MC_ADVANCE_RIP();
3782 } IEM_MC_ELSE() {
3783 IEM_MC_REL_JMP_S32(i32Imm);
3784 } IEM_MC_ENDIF();
3785 IEM_MC_END();
3786 }
3787 return VINF_SUCCESS;
3788}
3789
3790
3791/** Opcode 0x0f 0x86. */
3792FNIEMOP_DEF(iemOp_jbe_Jv)
3793{
3794 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
3795 IEMOP_HLP_MIN_386();
3796 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3797 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3798 {
3799 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3801
3802 IEM_MC_BEGIN(0, 0);
3803 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3804 IEM_MC_REL_JMP_S16(i16Imm);
3805 } IEM_MC_ELSE() {
3806 IEM_MC_ADVANCE_RIP();
3807 } IEM_MC_ENDIF();
3808 IEM_MC_END();
3809 }
3810 else
3811 {
3812 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3814
3815 IEM_MC_BEGIN(0, 0);
3816 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3817 IEM_MC_REL_JMP_S32(i32Imm);
3818 } IEM_MC_ELSE() {
3819 IEM_MC_ADVANCE_RIP();
3820 } IEM_MC_ENDIF();
3821 IEM_MC_END();
3822 }
3823 return VINF_SUCCESS;
3824}
3825
3826
3827/** Opcode 0x0f 0x87. */
3828FNIEMOP_DEF(iemOp_jnbe_Jv)
3829{
3830 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
3831 IEMOP_HLP_MIN_386();
3832 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3833 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3834 {
3835 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3837
3838 IEM_MC_BEGIN(0, 0);
3839 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3840 IEM_MC_ADVANCE_RIP();
3841 } IEM_MC_ELSE() {
3842 IEM_MC_REL_JMP_S16(i16Imm);
3843 } IEM_MC_ENDIF();
3844 IEM_MC_END();
3845 }
3846 else
3847 {
3848 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3850
3851 IEM_MC_BEGIN(0, 0);
3852 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3853 IEM_MC_ADVANCE_RIP();
3854 } IEM_MC_ELSE() {
3855 IEM_MC_REL_JMP_S32(i32Imm);
3856 } IEM_MC_ENDIF();
3857 IEM_MC_END();
3858 }
3859 return VINF_SUCCESS;
3860}
3861
3862
3863/** Opcode 0x0f 0x88. */
3864FNIEMOP_DEF(iemOp_js_Jv)
3865{
3866 IEMOP_MNEMONIC(js_Jv, "js Jv");
3867 IEMOP_HLP_MIN_386();
3868 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3869 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3870 {
3871 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3873
3874 IEM_MC_BEGIN(0, 0);
3875 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3876 IEM_MC_REL_JMP_S16(i16Imm);
3877 } IEM_MC_ELSE() {
3878 IEM_MC_ADVANCE_RIP();
3879 } IEM_MC_ENDIF();
3880 IEM_MC_END();
3881 }
3882 else
3883 {
3884 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3886
3887 IEM_MC_BEGIN(0, 0);
3888 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3889 IEM_MC_REL_JMP_S32(i32Imm);
3890 } IEM_MC_ELSE() {
3891 IEM_MC_ADVANCE_RIP();
3892 } IEM_MC_ENDIF();
3893 IEM_MC_END();
3894 }
3895 return VINF_SUCCESS;
3896}
3897
3898
3899/** Opcode 0x0f 0x89. */
3900FNIEMOP_DEF(iemOp_jns_Jv)
3901{
3902 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
3903 IEMOP_HLP_MIN_386();
3904 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3905 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3906 {
3907 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3909
3910 IEM_MC_BEGIN(0, 0);
3911 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3912 IEM_MC_ADVANCE_RIP();
3913 } IEM_MC_ELSE() {
3914 IEM_MC_REL_JMP_S16(i16Imm);
3915 } IEM_MC_ENDIF();
3916 IEM_MC_END();
3917 }
3918 else
3919 {
3920 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3922
3923 IEM_MC_BEGIN(0, 0);
3924 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3925 IEM_MC_ADVANCE_RIP();
3926 } IEM_MC_ELSE() {
3927 IEM_MC_REL_JMP_S32(i32Imm);
3928 } IEM_MC_ENDIF();
3929 IEM_MC_END();
3930 }
3931 return VINF_SUCCESS;
3932}
3933
3934
3935/** Opcode 0x0f 0x8a. */
3936FNIEMOP_DEF(iemOp_jp_Jv)
3937{
3938 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
3939 IEMOP_HLP_MIN_386();
3940 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3941 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3942 {
3943 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3945
3946 IEM_MC_BEGIN(0, 0);
3947 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3948 IEM_MC_REL_JMP_S16(i16Imm);
3949 } IEM_MC_ELSE() {
3950 IEM_MC_ADVANCE_RIP();
3951 } IEM_MC_ENDIF();
3952 IEM_MC_END();
3953 }
3954 else
3955 {
3956 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3958
3959 IEM_MC_BEGIN(0, 0);
3960 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3961 IEM_MC_REL_JMP_S32(i32Imm);
3962 } IEM_MC_ELSE() {
3963 IEM_MC_ADVANCE_RIP();
3964 } IEM_MC_ENDIF();
3965 IEM_MC_END();
3966 }
3967 return VINF_SUCCESS;
3968}
3969
3970
3971/** Opcode 0x0f 0x8b. */
3972FNIEMOP_DEF(iemOp_jnp_Jv)
3973{
3974 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
3975 IEMOP_HLP_MIN_386();
3976 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3977 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3978 {
3979 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3981
3982 IEM_MC_BEGIN(0, 0);
3983 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3984 IEM_MC_ADVANCE_RIP();
3985 } IEM_MC_ELSE() {
3986 IEM_MC_REL_JMP_S16(i16Imm);
3987 } IEM_MC_ENDIF();
3988 IEM_MC_END();
3989 }
3990 else
3991 {
3992 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3994
3995 IEM_MC_BEGIN(0, 0);
3996 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3997 IEM_MC_ADVANCE_RIP();
3998 } IEM_MC_ELSE() {
3999 IEM_MC_REL_JMP_S32(i32Imm);
4000 } IEM_MC_ENDIF();
4001 IEM_MC_END();
4002 }
4003 return VINF_SUCCESS;
4004}
4005
4006
4007/** Opcode 0x0f 0x8c. */
4008FNIEMOP_DEF(iemOp_jl_Jv)
4009{
4010 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4011 IEMOP_HLP_MIN_386();
4012 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4013 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4014 {
4015 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4017
4018 IEM_MC_BEGIN(0, 0);
4019 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4020 IEM_MC_REL_JMP_S16(i16Imm);
4021 } IEM_MC_ELSE() {
4022 IEM_MC_ADVANCE_RIP();
4023 } IEM_MC_ENDIF();
4024 IEM_MC_END();
4025 }
4026 else
4027 {
4028 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4030
4031 IEM_MC_BEGIN(0, 0);
4032 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4033 IEM_MC_REL_JMP_S32(i32Imm);
4034 } IEM_MC_ELSE() {
4035 IEM_MC_ADVANCE_RIP();
4036 } IEM_MC_ENDIF();
4037 IEM_MC_END();
4038 }
4039 return VINF_SUCCESS;
4040}
4041
4042
4043/** Opcode 0x0f 0x8d. */
4044FNIEMOP_DEF(iemOp_jnl_Jv)
4045{
4046 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4047 IEMOP_HLP_MIN_386();
4048 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4049 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4050 {
4051 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4053
4054 IEM_MC_BEGIN(0, 0);
4055 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4056 IEM_MC_ADVANCE_RIP();
4057 } IEM_MC_ELSE() {
4058 IEM_MC_REL_JMP_S16(i16Imm);
4059 } IEM_MC_ENDIF();
4060 IEM_MC_END();
4061 }
4062 else
4063 {
4064 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4066
4067 IEM_MC_BEGIN(0, 0);
4068 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4069 IEM_MC_ADVANCE_RIP();
4070 } IEM_MC_ELSE() {
4071 IEM_MC_REL_JMP_S32(i32Imm);
4072 } IEM_MC_ENDIF();
4073 IEM_MC_END();
4074 }
4075 return VINF_SUCCESS;
4076}
4077
4078
4079/** Opcode 0x0f 0x8e. */
4080FNIEMOP_DEF(iemOp_jle_Jv)
4081{
4082 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4083 IEMOP_HLP_MIN_386();
4084 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4085 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4086 {
4087 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4089
4090 IEM_MC_BEGIN(0, 0);
4091 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4092 IEM_MC_REL_JMP_S16(i16Imm);
4093 } IEM_MC_ELSE() {
4094 IEM_MC_ADVANCE_RIP();
4095 } IEM_MC_ENDIF();
4096 IEM_MC_END();
4097 }
4098 else
4099 {
4100 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4102
4103 IEM_MC_BEGIN(0, 0);
4104 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4105 IEM_MC_REL_JMP_S32(i32Imm);
4106 } IEM_MC_ELSE() {
4107 IEM_MC_ADVANCE_RIP();
4108 } IEM_MC_ENDIF();
4109 IEM_MC_END();
4110 }
4111 return VINF_SUCCESS;
4112}
4113
4114
4115/** Opcode 0x0f 0x8f. */
4116FNIEMOP_DEF(iemOp_jnle_Jv)
4117{
4118 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4119 IEMOP_HLP_MIN_386();
4120 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4121 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4122 {
4123 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4125
4126 IEM_MC_BEGIN(0, 0);
4127 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4128 IEM_MC_ADVANCE_RIP();
4129 } IEM_MC_ELSE() {
4130 IEM_MC_REL_JMP_S16(i16Imm);
4131 } IEM_MC_ENDIF();
4132 IEM_MC_END();
4133 }
4134 else
4135 {
4136 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4138
4139 IEM_MC_BEGIN(0, 0);
4140 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4141 IEM_MC_ADVANCE_RIP();
4142 } IEM_MC_ELSE() {
4143 IEM_MC_REL_JMP_S32(i32Imm);
4144 } IEM_MC_ENDIF();
4145 IEM_MC_END();
4146 }
4147 return VINF_SUCCESS;
4148}
4149
4150
4151/** Opcode 0x0f 0x90. */
4152FNIEMOP_DEF(iemOp_seto_Eb)
4153{
4154 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4155 IEMOP_HLP_MIN_386();
4156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4157
4158 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4159 * any way. AMD says it's "unused", whatever that means. We're
4160 * ignoring for now. */
4161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4162 {
4163 /* register target */
4164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4165 IEM_MC_BEGIN(0, 0);
4166 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4167 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4168 } IEM_MC_ELSE() {
4169 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4170 } IEM_MC_ENDIF();
4171 IEM_MC_ADVANCE_RIP();
4172 IEM_MC_END();
4173 }
4174 else
4175 {
4176 /* memory target */
4177 IEM_MC_BEGIN(0, 1);
4178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4181 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4182 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4183 } IEM_MC_ELSE() {
4184 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4185 } IEM_MC_ENDIF();
4186 IEM_MC_ADVANCE_RIP();
4187 IEM_MC_END();
4188 }
4189 return VINF_SUCCESS;
4190}
4191
4192
4193/** Opcode 0x0f 0x91. */
4194FNIEMOP_DEF(iemOp_setno_Eb)
4195{
4196 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4197 IEMOP_HLP_MIN_386();
4198 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4199
4200 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4201 * any way. AMD says it's "unused", whatever that means. We're
4202 * ignoring for now. */
4203 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4204 {
4205 /* register target */
4206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4207 IEM_MC_BEGIN(0, 0);
4208 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4209 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4210 } IEM_MC_ELSE() {
4211 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4212 } IEM_MC_ENDIF();
4213 IEM_MC_ADVANCE_RIP();
4214 IEM_MC_END();
4215 }
4216 else
4217 {
4218 /* memory target */
4219 IEM_MC_BEGIN(0, 1);
4220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4223 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4224 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4225 } IEM_MC_ELSE() {
4226 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4227 } IEM_MC_ENDIF();
4228 IEM_MC_ADVANCE_RIP();
4229 IEM_MC_END();
4230 }
4231 return VINF_SUCCESS;
4232}
4233
4234
4235/** Opcode 0x0f 0x92. */
4236FNIEMOP_DEF(iemOp_setc_Eb)
4237{
4238 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4239 IEMOP_HLP_MIN_386();
4240 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4241
4242 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4243 * any way. AMD says it's "unused", whatever that means. We're
4244 * ignoring for now. */
4245 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4246 {
4247 /* register target */
4248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4249 IEM_MC_BEGIN(0, 0);
4250 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4251 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4252 } IEM_MC_ELSE() {
4253 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4254 } IEM_MC_ENDIF();
4255 IEM_MC_ADVANCE_RIP();
4256 IEM_MC_END();
4257 }
4258 else
4259 {
4260 /* memory target */
4261 IEM_MC_BEGIN(0, 1);
4262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4265 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4266 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4267 } IEM_MC_ELSE() {
4268 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4269 } IEM_MC_ENDIF();
4270 IEM_MC_ADVANCE_RIP();
4271 IEM_MC_END();
4272 }
4273 return VINF_SUCCESS;
4274}
4275
4276
4277/** Opcode 0x0f 0x93. */
4278FNIEMOP_DEF(iemOp_setnc_Eb)
4279{
4280 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4281 IEMOP_HLP_MIN_386();
4282 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4283
4284 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4285 * any way. AMD says it's "unused", whatever that means. We're
4286 * ignoring for now. */
4287 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4288 {
4289 /* register target */
4290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4291 IEM_MC_BEGIN(0, 0);
4292 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4293 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4294 } IEM_MC_ELSE() {
4295 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4296 } IEM_MC_ENDIF();
4297 IEM_MC_ADVANCE_RIP();
4298 IEM_MC_END();
4299 }
4300 else
4301 {
4302 /* memory target */
4303 IEM_MC_BEGIN(0, 1);
4304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4307 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4308 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4309 } IEM_MC_ELSE() {
4310 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4311 } IEM_MC_ENDIF();
4312 IEM_MC_ADVANCE_RIP();
4313 IEM_MC_END();
4314 }
4315 return VINF_SUCCESS;
4316}
4317
4318
4319/** Opcode 0x0f 0x94. */
4320FNIEMOP_DEF(iemOp_sete_Eb)
4321{
4322 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4323 IEMOP_HLP_MIN_386();
4324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4325
4326 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4327 * any way. AMD says it's "unused", whatever that means. We're
4328 * ignoring for now. */
4329 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4330 {
4331 /* register target */
4332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4333 IEM_MC_BEGIN(0, 0);
4334 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4335 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4336 } IEM_MC_ELSE() {
4337 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4338 } IEM_MC_ENDIF();
4339 IEM_MC_ADVANCE_RIP();
4340 IEM_MC_END();
4341 }
4342 else
4343 {
4344 /* memory target */
4345 IEM_MC_BEGIN(0, 1);
4346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4349 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4350 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4351 } IEM_MC_ELSE() {
4352 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4353 } IEM_MC_ENDIF();
4354 IEM_MC_ADVANCE_RIP();
4355 IEM_MC_END();
4356 }
4357 return VINF_SUCCESS;
4358}
4359
4360
4361/** Opcode 0x0f 0x95. */
4362FNIEMOP_DEF(iemOp_setne_Eb)
4363{
4364 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4365 IEMOP_HLP_MIN_386();
4366 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4367
4368 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4369 * any way. AMD says it's "unused", whatever that means. We're
4370 * ignoring for now. */
4371 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4372 {
4373 /* register target */
4374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4375 IEM_MC_BEGIN(0, 0);
4376 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4377 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4378 } IEM_MC_ELSE() {
4379 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4380 } IEM_MC_ENDIF();
4381 IEM_MC_ADVANCE_RIP();
4382 IEM_MC_END();
4383 }
4384 else
4385 {
4386 /* memory target */
4387 IEM_MC_BEGIN(0, 1);
4388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4391 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4392 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4393 } IEM_MC_ELSE() {
4394 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4395 } IEM_MC_ENDIF();
4396 IEM_MC_ADVANCE_RIP();
4397 IEM_MC_END();
4398 }
4399 return VINF_SUCCESS;
4400}
4401
4402
4403/** Opcode 0x0f 0x96. */
4404FNIEMOP_DEF(iemOp_setbe_Eb)
4405{
4406 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4407 IEMOP_HLP_MIN_386();
4408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4409
4410 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4411 * any way. AMD says it's "unused", whatever that means. We're
4412 * ignoring for now. */
4413 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4414 {
4415 /* register target */
4416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4417 IEM_MC_BEGIN(0, 0);
4418 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4419 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4420 } IEM_MC_ELSE() {
4421 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4422 } IEM_MC_ENDIF();
4423 IEM_MC_ADVANCE_RIP();
4424 IEM_MC_END();
4425 }
4426 else
4427 {
4428 /* memory target */
4429 IEM_MC_BEGIN(0, 1);
4430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4433 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4434 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4435 } IEM_MC_ELSE() {
4436 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4437 } IEM_MC_ENDIF();
4438 IEM_MC_ADVANCE_RIP();
4439 IEM_MC_END();
4440 }
4441 return VINF_SUCCESS;
4442}
4443
4444
4445/** Opcode 0x0f 0x97. */
4446FNIEMOP_DEF(iemOp_setnbe_Eb)
4447{
4448 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4449 IEMOP_HLP_MIN_386();
4450 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4451
4452 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4453 * any way. AMD says it's "unused", whatever that means. We're
4454 * ignoring for now. */
4455 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4456 {
4457 /* register target */
4458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4459 IEM_MC_BEGIN(0, 0);
4460 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4461 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4462 } IEM_MC_ELSE() {
4463 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4464 } IEM_MC_ENDIF();
4465 IEM_MC_ADVANCE_RIP();
4466 IEM_MC_END();
4467 }
4468 else
4469 {
4470 /* memory target */
4471 IEM_MC_BEGIN(0, 1);
4472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4475 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4476 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4477 } IEM_MC_ELSE() {
4478 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4479 } IEM_MC_ENDIF();
4480 IEM_MC_ADVANCE_RIP();
4481 IEM_MC_END();
4482 }
4483 return VINF_SUCCESS;
4484}
4485
4486
4487/** Opcode 0x0f 0x98. */
4488FNIEMOP_DEF(iemOp_sets_Eb)
4489{
4490 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4491 IEMOP_HLP_MIN_386();
4492 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4493
4494 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4495 * any way. AMD says it's "unused", whatever that means. We're
4496 * ignoring for now. */
4497 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4498 {
4499 /* register target */
4500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4501 IEM_MC_BEGIN(0, 0);
4502 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4503 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4504 } IEM_MC_ELSE() {
4505 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4506 } IEM_MC_ENDIF();
4507 IEM_MC_ADVANCE_RIP();
4508 IEM_MC_END();
4509 }
4510 else
4511 {
4512 /* memory target */
4513 IEM_MC_BEGIN(0, 1);
4514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4517 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4518 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4519 } IEM_MC_ELSE() {
4520 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4521 } IEM_MC_ENDIF();
4522 IEM_MC_ADVANCE_RIP();
4523 IEM_MC_END();
4524 }
4525 return VINF_SUCCESS;
4526}
4527
4528
4529/** Opcode 0x0f 0x99. */
4530FNIEMOP_DEF(iemOp_setns_Eb)
4531{
4532 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4533 IEMOP_HLP_MIN_386();
4534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4535
4536 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4537 * any way. AMD says it's "unused", whatever that means. We're
4538 * ignoring for now. */
4539 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4540 {
4541 /* register target */
4542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4543 IEM_MC_BEGIN(0, 0);
4544 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4545 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4546 } IEM_MC_ELSE() {
4547 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4548 } IEM_MC_ENDIF();
4549 IEM_MC_ADVANCE_RIP();
4550 IEM_MC_END();
4551 }
4552 else
4553 {
4554 /* memory target */
4555 IEM_MC_BEGIN(0, 1);
4556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4559 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4560 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4561 } IEM_MC_ELSE() {
4562 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4563 } IEM_MC_ENDIF();
4564 IEM_MC_ADVANCE_RIP();
4565 IEM_MC_END();
4566 }
4567 return VINF_SUCCESS;
4568}
4569
4570
4571/** Opcode 0x0f 0x9a. */
4572FNIEMOP_DEF(iemOp_setp_Eb)
4573{
4574 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4575 IEMOP_HLP_MIN_386();
4576 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4577
4578 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4579 * any way. AMD says it's "unused", whatever that means. We're
4580 * ignoring for now. */
4581 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4582 {
4583 /* register target */
4584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4585 IEM_MC_BEGIN(0, 0);
4586 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4587 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4588 } IEM_MC_ELSE() {
4589 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4590 } IEM_MC_ENDIF();
4591 IEM_MC_ADVANCE_RIP();
4592 IEM_MC_END();
4593 }
4594 else
4595 {
4596 /* memory target */
4597 IEM_MC_BEGIN(0, 1);
4598 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4601 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4602 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4603 } IEM_MC_ELSE() {
4604 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4605 } IEM_MC_ENDIF();
4606 IEM_MC_ADVANCE_RIP();
4607 IEM_MC_END();
4608 }
4609 return VINF_SUCCESS;
4610}
4611
4612
4613/** Opcode 0x0f 0x9b. */
4614FNIEMOP_DEF(iemOp_setnp_Eb)
4615{
4616 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4617 IEMOP_HLP_MIN_386();
4618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4619
4620 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4621 * any way. AMD says it's "unused", whatever that means. We're
4622 * ignoring for now. */
4623 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4624 {
4625 /* register target */
4626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4627 IEM_MC_BEGIN(0, 0);
4628 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4629 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4630 } IEM_MC_ELSE() {
4631 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4632 } IEM_MC_ENDIF();
4633 IEM_MC_ADVANCE_RIP();
4634 IEM_MC_END();
4635 }
4636 else
4637 {
4638 /* memory target */
4639 IEM_MC_BEGIN(0, 1);
4640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4643 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4644 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4645 } IEM_MC_ELSE() {
4646 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4647 } IEM_MC_ENDIF();
4648 IEM_MC_ADVANCE_RIP();
4649 IEM_MC_END();
4650 }
4651 return VINF_SUCCESS;
4652}
4653
4654
4655/** Opcode 0x0f 0x9c. */
4656FNIEMOP_DEF(iemOp_setl_Eb)
4657{
4658 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4659 IEMOP_HLP_MIN_386();
4660 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4661
4662 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4663 * any way. AMD says it's "unused", whatever that means. We're
4664 * ignoring for now. */
4665 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4666 {
4667 /* register target */
4668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4669 IEM_MC_BEGIN(0, 0);
4670 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4671 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4672 } IEM_MC_ELSE() {
4673 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4674 } IEM_MC_ENDIF();
4675 IEM_MC_ADVANCE_RIP();
4676 IEM_MC_END();
4677 }
4678 else
4679 {
4680 /* memory target */
4681 IEM_MC_BEGIN(0, 1);
4682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4685 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4686 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4687 } IEM_MC_ELSE() {
4688 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4689 } IEM_MC_ENDIF();
4690 IEM_MC_ADVANCE_RIP();
4691 IEM_MC_END();
4692 }
4693 return VINF_SUCCESS;
4694}
4695
4696
4697/** Opcode 0x0f 0x9d. */
4698FNIEMOP_DEF(iemOp_setnl_Eb)
4699{
4700 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4701 IEMOP_HLP_MIN_386();
4702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4703
4704 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4705 * any way. AMD says it's "unused", whatever that means. We're
4706 * ignoring for now. */
4707 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4708 {
4709 /* register target */
4710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4711 IEM_MC_BEGIN(0, 0);
4712 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4713 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4714 } IEM_MC_ELSE() {
4715 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4716 } IEM_MC_ENDIF();
4717 IEM_MC_ADVANCE_RIP();
4718 IEM_MC_END();
4719 }
4720 else
4721 {
4722 /* memory target */
4723 IEM_MC_BEGIN(0, 1);
4724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4727 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4728 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4729 } IEM_MC_ELSE() {
4730 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4731 } IEM_MC_ENDIF();
4732 IEM_MC_ADVANCE_RIP();
4733 IEM_MC_END();
4734 }
4735 return VINF_SUCCESS;
4736}
4737
4738
4739/** Opcode 0x0f 0x9e. */
4740FNIEMOP_DEF(iemOp_setle_Eb)
4741{
4742 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
4743 IEMOP_HLP_MIN_386();
4744 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4745
4746 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4747 * any way. AMD says it's "unused", whatever that means. We're
4748 * ignoring for now. */
4749 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4750 {
4751 /* register target */
4752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4753 IEM_MC_BEGIN(0, 0);
4754 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4755 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4756 } IEM_MC_ELSE() {
4757 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4758 } IEM_MC_ENDIF();
4759 IEM_MC_ADVANCE_RIP();
4760 IEM_MC_END();
4761 }
4762 else
4763 {
4764 /* memory target */
4765 IEM_MC_BEGIN(0, 1);
4766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4769 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4770 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4771 } IEM_MC_ELSE() {
4772 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4773 } IEM_MC_ENDIF();
4774 IEM_MC_ADVANCE_RIP();
4775 IEM_MC_END();
4776 }
4777 return VINF_SUCCESS;
4778}
4779
4780
4781/** Opcode 0x0f 0x9f. */
4782FNIEMOP_DEF(iemOp_setnle_Eb)
4783{
4784 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
4785 IEMOP_HLP_MIN_386();
4786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4787
4788 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4789 * any way. AMD says it's "unused", whatever that means. We're
4790 * ignoring for now. */
4791 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4792 {
4793 /* register target */
4794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4795 IEM_MC_BEGIN(0, 0);
4796 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4797 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4798 } IEM_MC_ELSE() {
4799 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4800 } IEM_MC_ENDIF();
4801 IEM_MC_ADVANCE_RIP();
4802 IEM_MC_END();
4803 }
4804 else
4805 {
4806 /* memory target */
4807 IEM_MC_BEGIN(0, 1);
4808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4811 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4812 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4813 } IEM_MC_ELSE() {
4814 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4815 } IEM_MC_ENDIF();
4816 IEM_MC_ADVANCE_RIP();
4817 IEM_MC_END();
4818 }
4819 return VINF_SUCCESS;
4820}
4821
4822
4823/**
4824 * Common 'push segment-register' helper.
4825 */
4826FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4827{
4828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4829 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
4830 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4831
4832 switch (pVCpu->iem.s.enmEffOpSize)
4833 {
4834 case IEMMODE_16BIT:
4835 IEM_MC_BEGIN(0, 1);
4836 IEM_MC_LOCAL(uint16_t, u16Value);
4837 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4838 IEM_MC_PUSH_U16(u16Value);
4839 IEM_MC_ADVANCE_RIP();
4840 IEM_MC_END();
4841 break;
4842
4843 case IEMMODE_32BIT:
4844 IEM_MC_BEGIN(0, 1);
4845 IEM_MC_LOCAL(uint32_t, u32Value);
4846 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4847 IEM_MC_PUSH_U32_SREG(u32Value);
4848 IEM_MC_ADVANCE_RIP();
4849 IEM_MC_END();
4850 break;
4851
4852 case IEMMODE_64BIT:
4853 IEM_MC_BEGIN(0, 1);
4854 IEM_MC_LOCAL(uint64_t, u64Value);
4855 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4856 IEM_MC_PUSH_U64(u64Value);
4857 IEM_MC_ADVANCE_RIP();
4858 IEM_MC_END();
4859 break;
4860 }
4861
4862 return VINF_SUCCESS;
4863}
4864
4865
4866/** Opcode 0x0f 0xa0. */
4867FNIEMOP_DEF(iemOp_push_fs)
4868{
4869 IEMOP_MNEMONIC(push_fs, "push fs");
4870 IEMOP_HLP_MIN_386();
4871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4872 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4873}
4874
4875
4876/** Opcode 0x0f 0xa1. */
4877FNIEMOP_DEF(iemOp_pop_fs)
4878{
4879 IEMOP_MNEMONIC(pop_fs, "pop fs");
4880 IEMOP_HLP_MIN_386();
4881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4882 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
4883}
4884
4885
4886/** Opcode 0x0f 0xa2. */
4887FNIEMOP_DEF(iemOp_cpuid)
4888{
4889 IEMOP_MNEMONIC(cpuid, "cpuid");
4890 IEMOP_HLP_MIN_486(); /* not all 486es. */
4891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4892 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4893}
4894
4895
4896/**
4897 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4898 * iemOp_bts_Ev_Gv.
4899 */
4900FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4901{
4902 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4903 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4904
4905 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4906 {
4907 /* register destination. */
4908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4909 switch (pVCpu->iem.s.enmEffOpSize)
4910 {
4911 case IEMMODE_16BIT:
4912 IEM_MC_BEGIN(3, 0);
4913 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4914 IEM_MC_ARG(uint16_t, u16Src, 1);
4915 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4916
4917 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4918 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4919 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4920 IEM_MC_REF_EFLAGS(pEFlags);
4921 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4922
4923 IEM_MC_ADVANCE_RIP();
4924 IEM_MC_END();
4925 return VINF_SUCCESS;
4926
4927 case IEMMODE_32BIT:
4928 IEM_MC_BEGIN(3, 0);
4929 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4930 IEM_MC_ARG(uint32_t, u32Src, 1);
4931 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4932
4933 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4934 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4935 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4936 IEM_MC_REF_EFLAGS(pEFlags);
4937 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4938
4939 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4940 IEM_MC_ADVANCE_RIP();
4941 IEM_MC_END();
4942 return VINF_SUCCESS;
4943
4944 case IEMMODE_64BIT:
4945 IEM_MC_BEGIN(3, 0);
4946 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4947 IEM_MC_ARG(uint64_t, u64Src, 1);
4948 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4949
4950 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4951 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4952 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4953 IEM_MC_REF_EFLAGS(pEFlags);
4954 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4955
4956 IEM_MC_ADVANCE_RIP();
4957 IEM_MC_END();
4958 return VINF_SUCCESS;
4959
4960 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4961 }
4962 }
4963 else
4964 {
4965 /* memory destination. */
4966
4967 uint32_t fAccess;
4968 if (pImpl->pfnLockedU16)
4969 fAccess = IEM_ACCESS_DATA_RW;
4970 else /* BT */
4971 fAccess = IEM_ACCESS_DATA_R;
4972
4973 /** @todo test negative bit offsets! */
4974 switch (pVCpu->iem.s.enmEffOpSize)
4975 {
4976 case IEMMODE_16BIT:
4977 IEM_MC_BEGIN(3, 2);
4978 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4979 IEM_MC_ARG(uint16_t, u16Src, 1);
4980 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4982 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4983
4984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4985 if (pImpl->pfnLockedU16)
4986 IEMOP_HLP_DONE_DECODING();
4987 else
4988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4989 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4990 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4991 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4992 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4993 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
4994 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4995 IEM_MC_FETCH_EFLAGS(EFlags);
4996
4997 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4998 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4999 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5000 else
5001 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5002 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5003
5004 IEM_MC_COMMIT_EFLAGS(EFlags);
5005 IEM_MC_ADVANCE_RIP();
5006 IEM_MC_END();
5007 return VINF_SUCCESS;
5008
5009 case IEMMODE_32BIT:
5010 IEM_MC_BEGIN(3, 2);
5011 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5012 IEM_MC_ARG(uint32_t, u32Src, 1);
5013 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5015 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5016
5017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5018 if (pImpl->pfnLockedU16)
5019 IEMOP_HLP_DONE_DECODING();
5020 else
5021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5022 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5023 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5024 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5025 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5026 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5027 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5028 IEM_MC_FETCH_EFLAGS(EFlags);
5029
5030 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5031 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5032 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5033 else
5034 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5035 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5036
5037 IEM_MC_COMMIT_EFLAGS(EFlags);
5038 IEM_MC_ADVANCE_RIP();
5039 IEM_MC_END();
5040 return VINF_SUCCESS;
5041
5042 case IEMMODE_64BIT:
5043 IEM_MC_BEGIN(3, 2);
5044 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5045 IEM_MC_ARG(uint64_t, u64Src, 1);
5046 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5048 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5049
5050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5051 if (pImpl->pfnLockedU16)
5052 IEMOP_HLP_DONE_DECODING();
5053 else
5054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5055 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5056 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5057 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5058 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5059 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5060 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5061 IEM_MC_FETCH_EFLAGS(EFlags);
5062
5063 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5064 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5065 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5066 else
5067 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5068 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5069
5070 IEM_MC_COMMIT_EFLAGS(EFlags);
5071 IEM_MC_ADVANCE_RIP();
5072 IEM_MC_END();
5073 return VINF_SUCCESS;
5074
5075 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5076 }
5077 }
5078}
5079
5080
5081/** Opcode 0x0f 0xa3. */
5082FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5083{
5084 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5085 IEMOP_HLP_MIN_386();
5086 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5087}
5088
5089
5090/**
5091 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5092 */
5093FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5094{
5095 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5096 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5097
5098 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5099 {
5100 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5102
5103 switch (pVCpu->iem.s.enmEffOpSize)
5104 {
5105 case IEMMODE_16BIT:
5106 IEM_MC_BEGIN(4, 0);
5107 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5108 IEM_MC_ARG(uint16_t, u16Src, 1);
5109 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5110 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5111
5112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5114 IEM_MC_REF_EFLAGS(pEFlags);
5115 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5116
5117 IEM_MC_ADVANCE_RIP();
5118 IEM_MC_END();
5119 return VINF_SUCCESS;
5120
5121 case IEMMODE_32BIT:
5122 IEM_MC_BEGIN(4, 0);
5123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5124 IEM_MC_ARG(uint32_t, u32Src, 1);
5125 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5126 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5127
5128 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5129 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5130 IEM_MC_REF_EFLAGS(pEFlags);
5131 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5132
5133 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5134 IEM_MC_ADVANCE_RIP();
5135 IEM_MC_END();
5136 return VINF_SUCCESS;
5137
5138 case IEMMODE_64BIT:
5139 IEM_MC_BEGIN(4, 0);
5140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5141 IEM_MC_ARG(uint64_t, u64Src, 1);
5142 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5143 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5144
5145 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5146 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5147 IEM_MC_REF_EFLAGS(pEFlags);
5148 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5149
5150 IEM_MC_ADVANCE_RIP();
5151 IEM_MC_END();
5152 return VINF_SUCCESS;
5153
5154 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5155 }
5156 }
5157 else
5158 {
5159 switch (pVCpu->iem.s.enmEffOpSize)
5160 {
5161 case IEMMODE_16BIT:
5162 IEM_MC_BEGIN(4, 2);
5163 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5164 IEM_MC_ARG(uint16_t, u16Src, 1);
5165 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5166 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5168
5169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5170 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5171 IEM_MC_ASSIGN(cShiftArg, cShift);
5172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5173 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5174 IEM_MC_FETCH_EFLAGS(EFlags);
5175 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5176 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5177
5178 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5179 IEM_MC_COMMIT_EFLAGS(EFlags);
5180 IEM_MC_ADVANCE_RIP();
5181 IEM_MC_END();
5182 return VINF_SUCCESS;
5183
5184 case IEMMODE_32BIT:
5185 IEM_MC_BEGIN(4, 2);
5186 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5187 IEM_MC_ARG(uint32_t, u32Src, 1);
5188 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5189 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5191
5192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5193 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5194 IEM_MC_ASSIGN(cShiftArg, cShift);
5195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5196 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5197 IEM_MC_FETCH_EFLAGS(EFlags);
5198 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5199 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5200
5201 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5202 IEM_MC_COMMIT_EFLAGS(EFlags);
5203 IEM_MC_ADVANCE_RIP();
5204 IEM_MC_END();
5205 return VINF_SUCCESS;
5206
5207 case IEMMODE_64BIT:
5208 IEM_MC_BEGIN(4, 2);
5209 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5210 IEM_MC_ARG(uint64_t, u64Src, 1);
5211 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5212 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5214
5215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5216 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5217 IEM_MC_ASSIGN(cShiftArg, cShift);
5218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5219 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5220 IEM_MC_FETCH_EFLAGS(EFlags);
5221 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5222 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5223
5224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5225 IEM_MC_COMMIT_EFLAGS(EFlags);
5226 IEM_MC_ADVANCE_RIP();
5227 IEM_MC_END();
5228 return VINF_SUCCESS;
5229
5230 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5231 }
5232 }
5233}
5234
5235
5236/**
5237 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5238 */
5239FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5240{
5241 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5242 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5243
5244 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5245 {
5246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5247
5248 switch (pVCpu->iem.s.enmEffOpSize)
5249 {
5250 case IEMMODE_16BIT:
5251 IEM_MC_BEGIN(4, 0);
5252 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5253 IEM_MC_ARG(uint16_t, u16Src, 1);
5254 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5255 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5256
5257 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5258 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5259 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5260 IEM_MC_REF_EFLAGS(pEFlags);
5261 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5262
5263 IEM_MC_ADVANCE_RIP();
5264 IEM_MC_END();
5265 return VINF_SUCCESS;
5266
5267 case IEMMODE_32BIT:
5268 IEM_MC_BEGIN(4, 0);
5269 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5270 IEM_MC_ARG(uint32_t, u32Src, 1);
5271 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5272 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5273
5274 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5275 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5276 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5277 IEM_MC_REF_EFLAGS(pEFlags);
5278 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5279
5280 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5281 IEM_MC_ADVANCE_RIP();
5282 IEM_MC_END();
5283 return VINF_SUCCESS;
5284
5285 case IEMMODE_64BIT:
5286 IEM_MC_BEGIN(4, 0);
5287 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5288 IEM_MC_ARG(uint64_t, u64Src, 1);
5289 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5290 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5291
5292 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5293 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5294 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5295 IEM_MC_REF_EFLAGS(pEFlags);
5296 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5297
5298 IEM_MC_ADVANCE_RIP();
5299 IEM_MC_END();
5300 return VINF_SUCCESS;
5301
5302 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5303 }
5304 }
5305 else
5306 {
5307 switch (pVCpu->iem.s.enmEffOpSize)
5308 {
5309 case IEMMODE_16BIT:
5310 IEM_MC_BEGIN(4, 2);
5311 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5312 IEM_MC_ARG(uint16_t, u16Src, 1);
5313 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5314 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5316
5317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5319 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5320 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5321 IEM_MC_FETCH_EFLAGS(EFlags);
5322 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5323 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5324
5325 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5326 IEM_MC_COMMIT_EFLAGS(EFlags);
5327 IEM_MC_ADVANCE_RIP();
5328 IEM_MC_END();
5329 return VINF_SUCCESS;
5330
5331 case IEMMODE_32BIT:
5332 IEM_MC_BEGIN(4, 2);
5333 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5334 IEM_MC_ARG(uint32_t, u32Src, 1);
5335 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5336 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5338
5339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5341 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5342 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5343 IEM_MC_FETCH_EFLAGS(EFlags);
5344 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5345 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5346
5347 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5348 IEM_MC_COMMIT_EFLAGS(EFlags);
5349 IEM_MC_ADVANCE_RIP();
5350 IEM_MC_END();
5351 return VINF_SUCCESS;
5352
5353 case IEMMODE_64BIT:
5354 IEM_MC_BEGIN(4, 2);
5355 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5356 IEM_MC_ARG(uint64_t, u64Src, 1);
5357 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5358 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5360
5361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5363 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5364 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5365 IEM_MC_FETCH_EFLAGS(EFlags);
5366 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5367 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5368
5369 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5370 IEM_MC_COMMIT_EFLAGS(EFlags);
5371 IEM_MC_ADVANCE_RIP();
5372 IEM_MC_END();
5373 return VINF_SUCCESS;
5374
5375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5376 }
5377 }
5378}
5379
5380
5381
5382/** Opcode 0x0f 0xa4. */
5383FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5384{
5385 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5386 IEMOP_HLP_MIN_386();
5387 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5388}
5389
5390
5391/** Opcode 0x0f 0xa5. */
5392FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5393{
5394 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5395 IEMOP_HLP_MIN_386();
5396 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5397}
5398
5399
5400/** Opcode 0x0f 0xa8. */
5401FNIEMOP_DEF(iemOp_push_gs)
5402{
5403 IEMOP_MNEMONIC(push_gs, "push gs");
5404 IEMOP_HLP_MIN_386();
5405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5406 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5407}
5408
5409
5410/** Opcode 0x0f 0xa9. */
5411FNIEMOP_DEF(iemOp_pop_gs)
5412{
5413 IEMOP_MNEMONIC(pop_gs, "pop gs");
5414 IEMOP_HLP_MIN_386();
5415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5416 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5417}
5418
5419
5420/** Opcode 0x0f 0xaa. */
5421FNIEMOP_STUB(iemOp_rsm);
5422//IEMOP_HLP_MIN_386();
5423
5424
5425/** Opcode 0x0f 0xab. */
5426FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5427{
5428 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5429 IEMOP_HLP_MIN_386();
5430 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5431}
5432
5433
5434/** Opcode 0x0f 0xac. */
5435FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5436{
5437 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5438 IEMOP_HLP_MIN_386();
5439 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5440}
5441
5442
5443/** Opcode 0x0f 0xad. */
5444FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5445{
5446 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5447 IEMOP_HLP_MIN_386();
5448 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5449}
5450
5451
5452/** Opcode 0x0f 0xae mem/0. */
5453FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5454{
5455 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5456 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5457 return IEMOP_RAISE_INVALID_OPCODE();
5458
5459 IEM_MC_BEGIN(3, 1);
5460 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5461 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5462 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5465 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5466 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5467 IEM_MC_END();
5468 return VINF_SUCCESS;
5469}
5470
5471
5472/** Opcode 0x0f 0xae mem/1. */
5473FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5474{
5475 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5476 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5477 return IEMOP_RAISE_INVALID_OPCODE();
5478
5479 IEM_MC_BEGIN(3, 1);
5480 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5481 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5482 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5485 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5486 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5487 IEM_MC_END();
5488 return VINF_SUCCESS;
5489}
5490
5491
5492/** Opcode 0x0f 0xae mem/2. */
5493FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5494
5495/** Opcode 0x0f 0xae mem/3. */
5496FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5497
5498/** Opcode 0x0f 0xae mem/4. */
5499FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5500
5501/** Opcode 0x0f 0xae mem/5. */
5502FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5503
5504/** Opcode 0x0f 0xae mem/6. */
5505FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5506
5507/** Opcode 0x0f 0xae mem/7. */
5508FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5509
5510
5511/** Opcode 0x0f 0xae 11b/5. */
5512FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5513{
5514 RT_NOREF_PV(bRm);
5515 IEMOP_MNEMONIC(lfence, "lfence");
5516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5517 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5518 return IEMOP_RAISE_INVALID_OPCODE();
5519
5520 IEM_MC_BEGIN(0, 0);
5521 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5522 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5523 else
5524 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5525 IEM_MC_ADVANCE_RIP();
5526 IEM_MC_END();
5527 return VINF_SUCCESS;
5528}
5529
5530
5531/** Opcode 0x0f 0xae 11b/6. */
5532FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5533{
5534 RT_NOREF_PV(bRm);
5535 IEMOP_MNEMONIC(mfence, "mfence");
5536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5537 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5538 return IEMOP_RAISE_INVALID_OPCODE();
5539
5540 IEM_MC_BEGIN(0, 0);
5541 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5542 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5543 else
5544 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5545 IEM_MC_ADVANCE_RIP();
5546 IEM_MC_END();
5547 return VINF_SUCCESS;
5548}
5549
5550
5551/** Opcode 0x0f 0xae 11b/7. */
5552FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5553{
5554 RT_NOREF_PV(bRm);
5555 IEMOP_MNEMONIC(sfence, "sfence");
5556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5557 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5558 return IEMOP_RAISE_INVALID_OPCODE();
5559
5560 IEM_MC_BEGIN(0, 0);
5561 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5562 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5563 else
5564 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5565 IEM_MC_ADVANCE_RIP();
5566 IEM_MC_END();
5567 return VINF_SUCCESS;
5568}
5569
5570
5571/** Opcode 0xf3 0x0f 0xae 11b/0. */
5572FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5573
5574/** Opcode 0xf3 0x0f 0xae 11b/1. */
5575FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5576
5577/** Opcode 0xf3 0x0f 0xae 11b/2. */
5578FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5579
5580/** Opcode 0xf3 0x0f 0xae 11b/3. */
5581FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5582
5583
5584/** Opcode 0x0f 0xae. */
5585FNIEMOP_DEF(iemOp_Grp15)
5586{
5587/** @todo continue here tomorrow! (see bs3-cpu-decoding-1.c32 r113507). */
5588 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5589 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5590 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5591 {
5592 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5593 {
5594 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5595 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5596 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5597 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5598 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5599 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5600 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5601 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5602 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5603 }
5604 }
5605 else
5606 {
5607 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5608 {
5609 case 0:
5610 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5611 {
5612 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5613 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5614 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5615 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5616 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5617 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5618 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5619 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5620 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5621 }
5622 break;
5623
5624 case IEM_OP_PRF_REPZ:
5625 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5626 {
5627 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5628 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5629 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5630 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5631 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5632 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5633 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5634 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5636 }
5637 break;
5638
5639 default:
5640 return IEMOP_RAISE_INVALID_OPCODE();
5641 }
5642 }
5643}
5644
5645
5646/** Opcode 0x0f 0xaf. */
5647FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5648{
5649 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5650 IEMOP_HLP_MIN_386();
5651 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5652 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5653}
5654
5655
5656/** Opcode 0x0f 0xb0. */
5657FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5658{
5659 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5660 IEMOP_HLP_MIN_486();
5661 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5662
5663 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5664 {
5665 IEMOP_HLP_DONE_DECODING();
5666 IEM_MC_BEGIN(4, 0);
5667 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5668 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5669 IEM_MC_ARG(uint8_t, u8Src, 2);
5670 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5671
5672 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5673 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5674 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5675 IEM_MC_REF_EFLAGS(pEFlags);
5676 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5677 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5678 else
5679 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5680
5681 IEM_MC_ADVANCE_RIP();
5682 IEM_MC_END();
5683 }
5684 else
5685 {
5686 IEM_MC_BEGIN(4, 3);
5687 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5688 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5689 IEM_MC_ARG(uint8_t, u8Src, 2);
5690 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5692 IEM_MC_LOCAL(uint8_t, u8Al);
5693
5694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5695 IEMOP_HLP_DONE_DECODING();
5696 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5697 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5698 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5699 IEM_MC_FETCH_EFLAGS(EFlags);
5700 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5701 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5702 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5703 else
5704 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5705
5706 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5707 IEM_MC_COMMIT_EFLAGS(EFlags);
5708 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5709 IEM_MC_ADVANCE_RIP();
5710 IEM_MC_END();
5711 }
5712 return VINF_SUCCESS;
5713}
5714
5715/** Opcode 0x0f 0xb1. */
5716FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5717{
5718 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
5719 IEMOP_HLP_MIN_486();
5720 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5721
5722 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5723 {
5724 IEMOP_HLP_DONE_DECODING();
5725 switch (pVCpu->iem.s.enmEffOpSize)
5726 {
5727 case IEMMODE_16BIT:
5728 IEM_MC_BEGIN(4, 0);
5729 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5730 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5731 IEM_MC_ARG(uint16_t, u16Src, 2);
5732 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5733
5734 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5735 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5736 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5737 IEM_MC_REF_EFLAGS(pEFlags);
5738 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5739 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5740 else
5741 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5742
5743 IEM_MC_ADVANCE_RIP();
5744 IEM_MC_END();
5745 return VINF_SUCCESS;
5746
5747 case IEMMODE_32BIT:
5748 IEM_MC_BEGIN(4, 0);
5749 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5750 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5751 IEM_MC_ARG(uint32_t, u32Src, 2);
5752 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5753
5754 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5755 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5756 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5757 IEM_MC_REF_EFLAGS(pEFlags);
5758 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5759 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5760 else
5761 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5762
5763 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5764 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5765 IEM_MC_ADVANCE_RIP();
5766 IEM_MC_END();
5767 return VINF_SUCCESS;
5768
5769 case IEMMODE_64BIT:
5770 IEM_MC_BEGIN(4, 0);
5771 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5772 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5773#ifdef RT_ARCH_X86
5774 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5775#else
5776 IEM_MC_ARG(uint64_t, u64Src, 2);
5777#endif
5778 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5779
5780 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5781 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5782 IEM_MC_REF_EFLAGS(pEFlags);
5783#ifdef RT_ARCH_X86
5784 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5785 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5786 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5787 else
5788 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5789#else
5790 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5791 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5792 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5793 else
5794 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5795#endif
5796
5797 IEM_MC_ADVANCE_RIP();
5798 IEM_MC_END();
5799 return VINF_SUCCESS;
5800
5801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5802 }
5803 }
5804 else
5805 {
5806 switch (pVCpu->iem.s.enmEffOpSize)
5807 {
5808 case IEMMODE_16BIT:
5809 IEM_MC_BEGIN(4, 3);
5810 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5811 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5812 IEM_MC_ARG(uint16_t, u16Src, 2);
5813 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5815 IEM_MC_LOCAL(uint16_t, u16Ax);
5816
5817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5818 IEMOP_HLP_DONE_DECODING();
5819 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5820 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5821 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5822 IEM_MC_FETCH_EFLAGS(EFlags);
5823 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5824 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5825 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5826 else
5827 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5828
5829 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5830 IEM_MC_COMMIT_EFLAGS(EFlags);
5831 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5832 IEM_MC_ADVANCE_RIP();
5833 IEM_MC_END();
5834 return VINF_SUCCESS;
5835
5836 case IEMMODE_32BIT:
5837 IEM_MC_BEGIN(4, 3);
5838 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5839 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5840 IEM_MC_ARG(uint32_t, u32Src, 2);
5841 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5843 IEM_MC_LOCAL(uint32_t, u32Eax);
5844
5845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5846 IEMOP_HLP_DONE_DECODING();
5847 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5848 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5849 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5850 IEM_MC_FETCH_EFLAGS(EFlags);
5851 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5852 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5853 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5854 else
5855 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5856
5857 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5858 IEM_MC_COMMIT_EFLAGS(EFlags);
5859 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5860 IEM_MC_ADVANCE_RIP();
5861 IEM_MC_END();
5862 return VINF_SUCCESS;
5863
5864 case IEMMODE_64BIT:
5865 IEM_MC_BEGIN(4, 3);
5866 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5867 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5868#ifdef RT_ARCH_X86
5869 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5870#else
5871 IEM_MC_ARG(uint64_t, u64Src, 2);
5872#endif
5873 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5875 IEM_MC_LOCAL(uint64_t, u64Rax);
5876
5877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5878 IEMOP_HLP_DONE_DECODING();
5879 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5880 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5881 IEM_MC_FETCH_EFLAGS(EFlags);
5882 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5883#ifdef RT_ARCH_X86
5884 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5885 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5886 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5887 else
5888 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5889#else
5890 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5891 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5892 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5893 else
5894 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5895#endif
5896
5897 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5898 IEM_MC_COMMIT_EFLAGS(EFlags);
5899 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5900 IEM_MC_ADVANCE_RIP();
5901 IEM_MC_END();
5902 return VINF_SUCCESS;
5903
5904 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5905 }
5906 }
5907}
5908
5909
5910FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5911{
5912 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5913 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
5914
5915 switch (pVCpu->iem.s.enmEffOpSize)
5916 {
5917 case IEMMODE_16BIT:
5918 IEM_MC_BEGIN(5, 1);
5919 IEM_MC_ARG(uint16_t, uSel, 0);
5920 IEM_MC_ARG(uint16_t, offSeg, 1);
5921 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5922 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5923 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5924 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5927 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5928 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
5929 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5930 IEM_MC_END();
5931 return VINF_SUCCESS;
5932
5933 case IEMMODE_32BIT:
5934 IEM_MC_BEGIN(5, 1);
5935 IEM_MC_ARG(uint16_t, uSel, 0);
5936 IEM_MC_ARG(uint32_t, offSeg, 1);
5937 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5938 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5939 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5940 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5943 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5944 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
5945 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5946 IEM_MC_END();
5947 return VINF_SUCCESS;
5948
5949 case IEMMODE_64BIT:
5950 IEM_MC_BEGIN(5, 1);
5951 IEM_MC_ARG(uint16_t, uSel, 0);
5952 IEM_MC_ARG(uint64_t, offSeg, 1);
5953 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5954 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5955 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5956 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5959 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5960 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5961 else
5962 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5963 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
5964 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5965 IEM_MC_END();
5966 return VINF_SUCCESS;
5967
5968 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5969 }
5970}
5971
5972
5973/** Opcode 0x0f 0xb2. */
5974FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5975{
5976 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
5977 IEMOP_HLP_MIN_386();
5978 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5979 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5980 return IEMOP_RAISE_INVALID_OPCODE();
5981 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5982}
5983
5984
5985/** Opcode 0x0f 0xb3. */
5986FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5987{
5988 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
5989 IEMOP_HLP_MIN_386();
5990 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5991}
5992
5993
5994/** Opcode 0x0f 0xb4. */
5995FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5996{
5997 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
5998 IEMOP_HLP_MIN_386();
5999 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6000 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6001 return IEMOP_RAISE_INVALID_OPCODE();
6002 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6003}
6004
6005
6006/** Opcode 0x0f 0xb5. */
6007FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6008{
6009 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6010 IEMOP_HLP_MIN_386();
6011 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6012 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6013 return IEMOP_RAISE_INVALID_OPCODE();
6014 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6015}
6016
6017
6018/** Opcode 0x0f 0xb6. */
6019FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6020{
6021 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6022 IEMOP_HLP_MIN_386();
6023
6024 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6025
6026 /*
6027 * If rm is denoting a register, no more instruction bytes.
6028 */
6029 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6030 {
6031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6032 switch (pVCpu->iem.s.enmEffOpSize)
6033 {
6034 case IEMMODE_16BIT:
6035 IEM_MC_BEGIN(0, 1);
6036 IEM_MC_LOCAL(uint16_t, u16Value);
6037 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6038 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6039 IEM_MC_ADVANCE_RIP();
6040 IEM_MC_END();
6041 return VINF_SUCCESS;
6042
6043 case IEMMODE_32BIT:
6044 IEM_MC_BEGIN(0, 1);
6045 IEM_MC_LOCAL(uint32_t, u32Value);
6046 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6047 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6048 IEM_MC_ADVANCE_RIP();
6049 IEM_MC_END();
6050 return VINF_SUCCESS;
6051
6052 case IEMMODE_64BIT:
6053 IEM_MC_BEGIN(0, 1);
6054 IEM_MC_LOCAL(uint64_t, u64Value);
6055 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6056 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6057 IEM_MC_ADVANCE_RIP();
6058 IEM_MC_END();
6059 return VINF_SUCCESS;
6060
6061 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6062 }
6063 }
6064 else
6065 {
6066 /*
6067 * We're loading a register from memory.
6068 */
6069 switch (pVCpu->iem.s.enmEffOpSize)
6070 {
6071 case IEMMODE_16BIT:
6072 IEM_MC_BEGIN(0, 2);
6073 IEM_MC_LOCAL(uint16_t, u16Value);
6074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6077 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6078 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6079 IEM_MC_ADVANCE_RIP();
6080 IEM_MC_END();
6081 return VINF_SUCCESS;
6082
6083 case IEMMODE_32BIT:
6084 IEM_MC_BEGIN(0, 2);
6085 IEM_MC_LOCAL(uint32_t, u32Value);
6086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6089 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6090 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6091 IEM_MC_ADVANCE_RIP();
6092 IEM_MC_END();
6093 return VINF_SUCCESS;
6094
6095 case IEMMODE_64BIT:
6096 IEM_MC_BEGIN(0, 2);
6097 IEM_MC_LOCAL(uint64_t, u64Value);
6098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6099 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6101 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6102 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6103 IEM_MC_ADVANCE_RIP();
6104 IEM_MC_END();
6105 return VINF_SUCCESS;
6106
6107 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6108 }
6109 }
6110}
6111
6112
6113/** Opcode 0x0f 0xb7. */
6114FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6115{
6116 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6117 IEMOP_HLP_MIN_386();
6118
6119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6120
6121 /** @todo Not entirely sure how the operand size prefix is handled here,
6122 * assuming that it will be ignored. Would be nice to have a few
6123 * test for this. */
6124 /*
6125 * If rm is denoting a register, no more instruction bytes.
6126 */
6127 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6128 {
6129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6130 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6131 {
6132 IEM_MC_BEGIN(0, 1);
6133 IEM_MC_LOCAL(uint32_t, u32Value);
6134 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6135 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6136 IEM_MC_ADVANCE_RIP();
6137 IEM_MC_END();
6138 }
6139 else
6140 {
6141 IEM_MC_BEGIN(0, 1);
6142 IEM_MC_LOCAL(uint64_t, u64Value);
6143 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6144 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6145 IEM_MC_ADVANCE_RIP();
6146 IEM_MC_END();
6147 }
6148 }
6149 else
6150 {
6151 /*
6152 * We're loading a register from memory.
6153 */
6154 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6155 {
6156 IEM_MC_BEGIN(0, 2);
6157 IEM_MC_LOCAL(uint32_t, u32Value);
6158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6161 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6162 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6163 IEM_MC_ADVANCE_RIP();
6164 IEM_MC_END();
6165 }
6166 else
6167 {
6168 IEM_MC_BEGIN(0, 2);
6169 IEM_MC_LOCAL(uint64_t, u64Value);
6170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6173 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6174 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6175 IEM_MC_ADVANCE_RIP();
6176 IEM_MC_END();
6177 }
6178 }
6179 return VINF_SUCCESS;
6180}
6181
6182
6183/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6184FNIEMOP_UD_STUB(iemOp_jmpe);
6185/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6186FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6187
6188
6189/** Opcode 0x0f 0xb9. */
6190FNIEMOP_DEF(iemOp_Grp10)
6191{
6192 Log(("iemOp_Grp10 -> #UD\n"));
6193 return IEMOP_RAISE_INVALID_OPCODE();
6194}
6195
6196
6197/** Opcode 0x0f 0xba. */
6198FNIEMOP_DEF(iemOp_Grp8)
6199{
6200 IEMOP_HLP_MIN_386();
6201 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6202 PCIEMOPBINSIZES pImpl;
6203 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6204 {
6205 case 0: case 1: case 2: case 3:
6206 return IEMOP_RAISE_INVALID_OPCODE();
6207 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6208 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6209 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6210 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6212 }
6213 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6214
6215 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6216 {
6217 /* register destination. */
6218 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6220
6221 switch (pVCpu->iem.s.enmEffOpSize)
6222 {
6223 case IEMMODE_16BIT:
6224 IEM_MC_BEGIN(3, 0);
6225 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6226 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6227 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6228
6229 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6230 IEM_MC_REF_EFLAGS(pEFlags);
6231 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6232
6233 IEM_MC_ADVANCE_RIP();
6234 IEM_MC_END();
6235 return VINF_SUCCESS;
6236
6237 case IEMMODE_32BIT:
6238 IEM_MC_BEGIN(3, 0);
6239 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6240 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6241 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6242
6243 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6244 IEM_MC_REF_EFLAGS(pEFlags);
6245 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6246
6247 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6248 IEM_MC_ADVANCE_RIP();
6249 IEM_MC_END();
6250 return VINF_SUCCESS;
6251
6252 case IEMMODE_64BIT:
6253 IEM_MC_BEGIN(3, 0);
6254 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6255 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6256 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6257
6258 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6259 IEM_MC_REF_EFLAGS(pEFlags);
6260 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6261
6262 IEM_MC_ADVANCE_RIP();
6263 IEM_MC_END();
6264 return VINF_SUCCESS;
6265
6266 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6267 }
6268 }
6269 else
6270 {
6271 /* memory destination. */
6272
6273 uint32_t fAccess;
6274 if (pImpl->pfnLockedU16)
6275 fAccess = IEM_ACCESS_DATA_RW;
6276 else /* BT */
6277 fAccess = IEM_ACCESS_DATA_R;
6278
6279 /** @todo test negative bit offsets! */
6280 switch (pVCpu->iem.s.enmEffOpSize)
6281 {
6282 case IEMMODE_16BIT:
6283 IEM_MC_BEGIN(3, 1);
6284 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6285 IEM_MC_ARG(uint16_t, u16Src, 1);
6286 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6288
6289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6290 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6291 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6292 if (pImpl->pfnLockedU16)
6293 IEMOP_HLP_DONE_DECODING();
6294 else
6295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6296 IEM_MC_FETCH_EFLAGS(EFlags);
6297 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6298 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6299 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6300 else
6301 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6302 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6303
6304 IEM_MC_COMMIT_EFLAGS(EFlags);
6305 IEM_MC_ADVANCE_RIP();
6306 IEM_MC_END();
6307 return VINF_SUCCESS;
6308
6309 case IEMMODE_32BIT:
6310 IEM_MC_BEGIN(3, 1);
6311 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6312 IEM_MC_ARG(uint32_t, u32Src, 1);
6313 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6315
6316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6317 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6318 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6319 if (pImpl->pfnLockedU16)
6320 IEMOP_HLP_DONE_DECODING();
6321 else
6322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6323 IEM_MC_FETCH_EFLAGS(EFlags);
6324 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6325 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6326 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6327 else
6328 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6329 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6330
6331 IEM_MC_COMMIT_EFLAGS(EFlags);
6332 IEM_MC_ADVANCE_RIP();
6333 IEM_MC_END();
6334 return VINF_SUCCESS;
6335
6336 case IEMMODE_64BIT:
6337 IEM_MC_BEGIN(3, 1);
6338 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6339 IEM_MC_ARG(uint64_t, u64Src, 1);
6340 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6342
6343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6344 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6345 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6346 if (pImpl->pfnLockedU16)
6347 IEMOP_HLP_DONE_DECODING();
6348 else
6349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6350 IEM_MC_FETCH_EFLAGS(EFlags);
6351 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6352 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6353 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6354 else
6355 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6356 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6357
6358 IEM_MC_COMMIT_EFLAGS(EFlags);
6359 IEM_MC_ADVANCE_RIP();
6360 IEM_MC_END();
6361 return VINF_SUCCESS;
6362
6363 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6364 }
6365 }
6366
6367}
6368
6369
6370/** Opcode 0x0f 0xbb. */
6371FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6372{
6373 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6374 IEMOP_HLP_MIN_386();
6375 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6376}
6377
6378
6379/** Opcode 0x0f 0xbc. */
6380FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6381{
6382 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6383 IEMOP_HLP_MIN_386();
6384 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6385 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6386}
6387
6388
6389/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6390FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6391
6392
6393/** Opcode 0x0f 0xbd. */
6394FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6395{
6396 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6397 IEMOP_HLP_MIN_386();
6398 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6399 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6400}
6401
6402
6403/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6404FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6405
6406
6407/** Opcode 0x0f 0xbe. */
6408FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6409{
6410 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6411 IEMOP_HLP_MIN_386();
6412
6413 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6414
6415 /*
6416 * If rm is denoting a register, no more instruction bytes.
6417 */
6418 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6419 {
6420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6421 switch (pVCpu->iem.s.enmEffOpSize)
6422 {
6423 case IEMMODE_16BIT:
6424 IEM_MC_BEGIN(0, 1);
6425 IEM_MC_LOCAL(uint16_t, u16Value);
6426 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6427 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6428 IEM_MC_ADVANCE_RIP();
6429 IEM_MC_END();
6430 return VINF_SUCCESS;
6431
6432 case IEMMODE_32BIT:
6433 IEM_MC_BEGIN(0, 1);
6434 IEM_MC_LOCAL(uint32_t, u32Value);
6435 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6436 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6437 IEM_MC_ADVANCE_RIP();
6438 IEM_MC_END();
6439 return VINF_SUCCESS;
6440
6441 case IEMMODE_64BIT:
6442 IEM_MC_BEGIN(0, 1);
6443 IEM_MC_LOCAL(uint64_t, u64Value);
6444 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6445 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6446 IEM_MC_ADVANCE_RIP();
6447 IEM_MC_END();
6448 return VINF_SUCCESS;
6449
6450 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6451 }
6452 }
6453 else
6454 {
6455 /*
6456 * We're loading a register from memory.
6457 */
6458 switch (pVCpu->iem.s.enmEffOpSize)
6459 {
6460 case IEMMODE_16BIT:
6461 IEM_MC_BEGIN(0, 2);
6462 IEM_MC_LOCAL(uint16_t, u16Value);
6463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6466 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6467 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6468 IEM_MC_ADVANCE_RIP();
6469 IEM_MC_END();
6470 return VINF_SUCCESS;
6471
6472 case IEMMODE_32BIT:
6473 IEM_MC_BEGIN(0, 2);
6474 IEM_MC_LOCAL(uint32_t, u32Value);
6475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6478 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6479 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6480 IEM_MC_ADVANCE_RIP();
6481 IEM_MC_END();
6482 return VINF_SUCCESS;
6483
6484 case IEMMODE_64BIT:
6485 IEM_MC_BEGIN(0, 2);
6486 IEM_MC_LOCAL(uint64_t, u64Value);
6487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6490 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6491 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6492 IEM_MC_ADVANCE_RIP();
6493 IEM_MC_END();
6494 return VINF_SUCCESS;
6495
6496 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6497 }
6498 }
6499}
6500
6501
6502/** Opcode 0x0f 0xbf. */
6503FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6504{
6505 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6506 IEMOP_HLP_MIN_386();
6507
6508 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6509
6510 /** @todo Not entirely sure how the operand size prefix is handled here,
6511 * assuming that it will be ignored. Would be nice to have a few
6512 * test for this. */
6513 /*
6514 * If rm is denoting a register, no more instruction bytes.
6515 */
6516 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6517 {
6518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6519 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6520 {
6521 IEM_MC_BEGIN(0, 1);
6522 IEM_MC_LOCAL(uint32_t, u32Value);
6523 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6524 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6525 IEM_MC_ADVANCE_RIP();
6526 IEM_MC_END();
6527 }
6528 else
6529 {
6530 IEM_MC_BEGIN(0, 1);
6531 IEM_MC_LOCAL(uint64_t, u64Value);
6532 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6533 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6534 IEM_MC_ADVANCE_RIP();
6535 IEM_MC_END();
6536 }
6537 }
6538 else
6539 {
6540 /*
6541 * We're loading a register from memory.
6542 */
6543 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6544 {
6545 IEM_MC_BEGIN(0, 2);
6546 IEM_MC_LOCAL(uint32_t, u32Value);
6547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6550 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6551 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6552 IEM_MC_ADVANCE_RIP();
6553 IEM_MC_END();
6554 }
6555 else
6556 {
6557 IEM_MC_BEGIN(0, 2);
6558 IEM_MC_LOCAL(uint64_t, u64Value);
6559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6562 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6563 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6564 IEM_MC_ADVANCE_RIP();
6565 IEM_MC_END();
6566 }
6567 }
6568 return VINF_SUCCESS;
6569}
6570
6571
6572/** Opcode 0x0f 0xc0. */
6573FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6574{
6575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6576 IEMOP_HLP_MIN_486();
6577 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6578
6579 /*
6580 * If rm is denoting a register, no more instruction bytes.
6581 */
6582 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6583 {
6584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6585
6586 IEM_MC_BEGIN(3, 0);
6587 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6588 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6589 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6590
6591 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6592 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6593 IEM_MC_REF_EFLAGS(pEFlags);
6594 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6595
6596 IEM_MC_ADVANCE_RIP();
6597 IEM_MC_END();
6598 }
6599 else
6600 {
6601 /*
6602 * We're accessing memory.
6603 */
6604 IEM_MC_BEGIN(3, 3);
6605 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6606 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6607 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6608 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6610
6611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6612 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6613 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6614 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6615 IEM_MC_FETCH_EFLAGS(EFlags);
6616 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6617 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6618 else
6619 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6620
6621 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6622 IEM_MC_COMMIT_EFLAGS(EFlags);
6623 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6624 IEM_MC_ADVANCE_RIP();
6625 IEM_MC_END();
6626 return VINF_SUCCESS;
6627 }
6628 return VINF_SUCCESS;
6629}
6630
6631
6632/** Opcode 0x0f 0xc1. */
6633FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6634{
6635 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6636 IEMOP_HLP_MIN_486();
6637 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6638
6639 /*
6640 * If rm is denoting a register, no more instruction bytes.
6641 */
6642 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6643 {
6644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6645
6646 switch (pVCpu->iem.s.enmEffOpSize)
6647 {
6648 case IEMMODE_16BIT:
6649 IEM_MC_BEGIN(3, 0);
6650 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6651 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6652 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6653
6654 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6655 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6656 IEM_MC_REF_EFLAGS(pEFlags);
6657 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6658
6659 IEM_MC_ADVANCE_RIP();
6660 IEM_MC_END();
6661 return VINF_SUCCESS;
6662
6663 case IEMMODE_32BIT:
6664 IEM_MC_BEGIN(3, 0);
6665 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6666 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6667 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6668
6669 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6670 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6671 IEM_MC_REF_EFLAGS(pEFlags);
6672 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6673
6674 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6675 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6676 IEM_MC_ADVANCE_RIP();
6677 IEM_MC_END();
6678 return VINF_SUCCESS;
6679
6680 case IEMMODE_64BIT:
6681 IEM_MC_BEGIN(3, 0);
6682 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6683 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6684 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6685
6686 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6687 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6688 IEM_MC_REF_EFLAGS(pEFlags);
6689 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6690
6691 IEM_MC_ADVANCE_RIP();
6692 IEM_MC_END();
6693 return VINF_SUCCESS;
6694
6695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6696 }
6697 }
6698 else
6699 {
6700 /*
6701 * We're accessing memory.
6702 */
6703 switch (pVCpu->iem.s.enmEffOpSize)
6704 {
6705 case IEMMODE_16BIT:
6706 IEM_MC_BEGIN(3, 3);
6707 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6708 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6709 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6710 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6712
6713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6714 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6715 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6716 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6717 IEM_MC_FETCH_EFLAGS(EFlags);
6718 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6719 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6720 else
6721 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6722
6723 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6724 IEM_MC_COMMIT_EFLAGS(EFlags);
6725 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
6726 IEM_MC_ADVANCE_RIP();
6727 IEM_MC_END();
6728 return VINF_SUCCESS;
6729
6730 case IEMMODE_32BIT:
6731 IEM_MC_BEGIN(3, 3);
6732 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6733 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6734 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6735 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6737
6738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6739 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6740 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6741 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6742 IEM_MC_FETCH_EFLAGS(EFlags);
6743 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6744 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6745 else
6746 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6747
6748 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6749 IEM_MC_COMMIT_EFLAGS(EFlags);
6750 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
6751 IEM_MC_ADVANCE_RIP();
6752 IEM_MC_END();
6753 return VINF_SUCCESS;
6754
6755 case IEMMODE_64BIT:
6756 IEM_MC_BEGIN(3, 3);
6757 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6758 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6759 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6760 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6762
6763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6764 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6765 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6766 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6767 IEM_MC_FETCH_EFLAGS(EFlags);
6768 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6769 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6770 else
6771 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6772
6773 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6774 IEM_MC_COMMIT_EFLAGS(EFlags);
6775 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
6776 IEM_MC_ADVANCE_RIP();
6777 IEM_MC_END();
6778 return VINF_SUCCESS;
6779
6780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6781 }
6782 }
6783}
6784
6785
6786/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
6787FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
6788/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
6789FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
6790/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
6791FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
6792/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
6793FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
6794
6795
6796/** Opcode 0x0f 0xc3. */
6797FNIEMOP_DEF(iemOp_movnti_My_Gy)
6798{
6799 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
6800
6801 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6802
6803 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6804 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6805 {
6806 switch (pVCpu->iem.s.enmEffOpSize)
6807 {
6808 case IEMMODE_32BIT:
6809 IEM_MC_BEGIN(0, 2);
6810 IEM_MC_LOCAL(uint32_t, u32Value);
6811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6812
6813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6815 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6816 return IEMOP_RAISE_INVALID_OPCODE();
6817
6818 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6819 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6820 IEM_MC_ADVANCE_RIP();
6821 IEM_MC_END();
6822 break;
6823
6824 case IEMMODE_64BIT:
6825 IEM_MC_BEGIN(0, 2);
6826 IEM_MC_LOCAL(uint64_t, u64Value);
6827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6828
6829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6831 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6832 return IEMOP_RAISE_INVALID_OPCODE();
6833
6834 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6835 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
6836 IEM_MC_ADVANCE_RIP();
6837 IEM_MC_END();
6838 break;
6839
6840 case IEMMODE_16BIT:
6841 /** @todo check this form. */
6842 return IEMOP_RAISE_INVALID_OPCODE();
6843 }
6844 }
6845 else
6846 return IEMOP_RAISE_INVALID_OPCODE();
6847 return VINF_SUCCESS;
6848}
6849/* Opcode 0x66 0x0f 0xc3 - invalid */
6850/* Opcode 0xf3 0x0f 0xc3 - invalid */
6851/* Opcode 0xf2 0x0f 0xc3 - invalid */
6852
6853/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
6854FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
6855/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
6856FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
6857/* Opcode 0xf3 0x0f 0xc4 - invalid */
6858/* Opcode 0xf2 0x0f 0xc4 - invalid */
6859
6860/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
6861FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
6862/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
6863FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
6864/* Opcode 0xf3 0x0f 0xc5 - invalid */
6865/* Opcode 0xf2 0x0f 0xc5 - invalid */
6866
6867/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
6868FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
6869/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
6870FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
6871/* Opcode 0xf3 0x0f 0xc6 - invalid */
6872/* Opcode 0xf2 0x0f 0xc6 - invalid */
6873
6874
6875/** Opcode 0x0f 0xc7 !11/1. */
6876FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6877{
6878 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
6879
6880 IEM_MC_BEGIN(4, 3);
6881 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6882 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6883 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6884 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6885 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6886 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6887 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6888
6889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6890 IEMOP_HLP_DONE_DECODING();
6891 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6892
6893 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6894 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6895 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6896
6897 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6898 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6899 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6900
6901 IEM_MC_FETCH_EFLAGS(EFlags);
6902 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6903 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6904 else
6905 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6906
6907 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6908 IEM_MC_COMMIT_EFLAGS(EFlags);
6909 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6910 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6911 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6912 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6913 IEM_MC_ENDIF();
6914 IEM_MC_ADVANCE_RIP();
6915
6916 IEM_MC_END();
6917 return VINF_SUCCESS;
6918}
6919
6920
6921/** Opcode REX.W 0x0f 0xc7 !11/1. */
6922FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
6923{
6924 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
6925 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6926 {
6927#if 0
6928 RT_NOREF(bRm);
6929 IEMOP_BITCH_ABOUT_STUB();
6930 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6931#else
6932 IEM_MC_BEGIN(4, 3);
6933 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
6934 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
6935 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
6936 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6937 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
6938 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
6939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6940
6941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6942 IEMOP_HLP_DONE_DECODING();
6943 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
6944 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6945
6946 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
6947 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
6948 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
6949
6950 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
6951 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
6952 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
6953
6954 IEM_MC_FETCH_EFLAGS(EFlags);
6955# ifdef RT_ARCH_AMD64
6956 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6957 {
6958 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6959 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6960 else
6961 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6962 }
6963 else
6964# endif
6965 {
6966 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
6967 accesses and not all all atomic, which works fine on in UNI CPU guest
6968 configuration (ignoring DMA). If guest SMP is active we have no choice
6969 but to use a rendezvous callback here. Sigh. */
6970 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
6971 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6972 else
6973 {
6974 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6975 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
6976 }
6977 }
6978
6979 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
6980 IEM_MC_COMMIT_EFLAGS(EFlags);
6981 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6982 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
6983 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
6984 IEM_MC_ENDIF();
6985 IEM_MC_ADVANCE_RIP();
6986
6987 IEM_MC_END();
6988 return VINF_SUCCESS;
6989#endif
6990 }
6991 Log(("cmpxchg16b -> #UD\n"));
6992 return IEMOP_RAISE_INVALID_OPCODE();
6993}
6994
6995
6996/** Opcode 0x0f 0xc7 11/6. */
6997FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6998
6999/** Opcode 0x0f 0xc7 !11/6. */
7000FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7001
7002/** Opcode 0x66 0x0f 0xc7 !11/6. */
7003FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7004
7005/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7006FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7007
7008/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7009FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7010
7011
7012/** Opcode 0x0f 0xc7. */
7013FNIEMOP_DEF(iemOp_Grp9)
7014{
7015 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
7016 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7017 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7018 {
7019 case 0: case 2: case 3: case 4: case 5:
7020 return IEMOP_RAISE_INVALID_OPCODE();
7021 case 1:
7022 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
7023 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
7024 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
7025 return IEMOP_RAISE_INVALID_OPCODE();
7026 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7027 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7028 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7029 case 6:
7030 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7031 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
7032 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7033 {
7034 case 0:
7035 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
7036 case IEM_OP_PRF_SIZE_OP:
7037 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
7038 case IEM_OP_PRF_REPZ:
7039 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
7040 default:
7041 return IEMOP_RAISE_INVALID_OPCODE();
7042 }
7043 case 7:
7044 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7045 {
7046 case 0:
7047 case IEM_OP_PRF_REPZ:
7048 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
7049 default:
7050 return IEMOP_RAISE_INVALID_OPCODE();
7051 }
7052 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7053 }
7054}
7055
7056
7057/**
7058 * Common 'bswap register' helper.
7059 */
7060FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7061{
7062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7063 switch (pVCpu->iem.s.enmEffOpSize)
7064 {
7065 case IEMMODE_16BIT:
7066 IEM_MC_BEGIN(1, 0);
7067 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7068 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7069 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7070 IEM_MC_ADVANCE_RIP();
7071 IEM_MC_END();
7072 return VINF_SUCCESS;
7073
7074 case IEMMODE_32BIT:
7075 IEM_MC_BEGIN(1, 0);
7076 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7077 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7078 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7079 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7080 IEM_MC_ADVANCE_RIP();
7081 IEM_MC_END();
7082 return VINF_SUCCESS;
7083
7084 case IEMMODE_64BIT:
7085 IEM_MC_BEGIN(1, 0);
7086 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7087 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7088 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7089 IEM_MC_ADVANCE_RIP();
7090 IEM_MC_END();
7091 return VINF_SUCCESS;
7092
7093 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7094 }
7095}
7096
7097
7098/** Opcode 0x0f 0xc8. */
7099FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7100{
7101 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7102 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7103 prefix. REX.B is the correct prefix it appears. For a parallel
7104 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7105 IEMOP_HLP_MIN_486();
7106 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7107}
7108
7109
7110/** Opcode 0x0f 0xc9. */
7111FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7112{
7113 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7114 IEMOP_HLP_MIN_486();
7115 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7116}
7117
7118
7119/** Opcode 0x0f 0xca. */
7120FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7121{
7122 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7123 IEMOP_HLP_MIN_486();
7124 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7125}
7126
7127
7128/** Opcode 0x0f 0xcb. */
7129FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7130{
7131 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7132 IEMOP_HLP_MIN_486();
7133 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7134}
7135
7136
7137/** Opcode 0x0f 0xcc. */
7138FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7139{
7140 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7141 IEMOP_HLP_MIN_486();
7142 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7143}
7144
7145
7146/** Opcode 0x0f 0xcd. */
7147FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7148{
7149 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7150 IEMOP_HLP_MIN_486();
7151 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7152}
7153
7154
7155/** Opcode 0x0f 0xce. */
7156FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7157{
7158 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7159 IEMOP_HLP_MIN_486();
7160 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7161}
7162
7163
7164/** Opcode 0x0f 0xcf. */
7165FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7166{
7167 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7168 IEMOP_HLP_MIN_486();
7169 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7170}
7171
7172
7173/* Opcode 0x0f 0xd0 - invalid */
7174/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7175FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7176/* Opcode 0xf3 0x0f 0xd0 - invalid */
7177/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7178FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7179
7180/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7181FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7182/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7183FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7184/* Opcode 0xf3 0x0f 0xd1 - invalid */
7185/* Opcode 0xf2 0x0f 0xd1 - invalid */
7186
7187/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7188FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7189/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7190FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7191/* Opcode 0xf3 0x0f 0xd2 - invalid */
7192/* Opcode 0xf2 0x0f 0xd2 - invalid */
7193
7194/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7195FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7196/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7197FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7198/* Opcode 0xf3 0x0f 0xd3 - invalid */
7199/* Opcode 0xf2 0x0f 0xd3 - invalid */
7200
7201/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7202FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7203/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7204FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7205/* Opcode 0xf3 0x0f 0xd4 - invalid */
7206/* Opcode 0xf2 0x0f 0xd4 - invalid */
7207
7208/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7209FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7210/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7211FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7212/* Opcode 0xf3 0x0f 0xd5 - invalid */
7213/* Opcode 0xf2 0x0f 0xd5 - invalid */
7214
7215/* Opcode 0x0f 0xd6 - invalid */
7216/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7217FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7218/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7219FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7220/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7221FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7222#if 0
7223FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7224{
7225 /* Docs says register only. */
7226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7227
7228 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7229 {
7230 case IEM_OP_PRF_SIZE_OP: /* SSE */
7231 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7232 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7233 IEM_MC_BEGIN(2, 0);
7234 IEM_MC_ARG(uint64_t *, pDst, 0);
7235 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7236 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7237 IEM_MC_PREPARE_SSE_USAGE();
7238 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7239 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7240 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7241 IEM_MC_ADVANCE_RIP();
7242 IEM_MC_END();
7243 return VINF_SUCCESS;
7244
7245 case 0: /* MMX */
7246 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7247 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7248 IEM_MC_BEGIN(2, 0);
7249 IEM_MC_ARG(uint64_t *, pDst, 0);
7250 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7251 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7252 IEM_MC_PREPARE_FPU_USAGE();
7253 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7254 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7255 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7256 IEM_MC_ADVANCE_RIP();
7257 IEM_MC_END();
7258 return VINF_SUCCESS;
7259
7260 default:
7261 return IEMOP_RAISE_INVALID_OPCODE();
7262 }
7263}
7264#endif
7265
7266
7267/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7268FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7269{
7270 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7271 /** @todo testcase: Check that the instruction implicitly clears the high
7272 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7273 * and opcode modifications are made to work with the whole width (not
7274 * just 128). */
7275 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
7276 /* Docs says register only. */
7277 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7278 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7279 {
7280 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7281 IEM_MC_BEGIN(2, 0);
7282 IEM_MC_ARG(uint64_t *, pDst, 0);
7283 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7284 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7285 IEM_MC_PREPARE_FPU_USAGE();
7286 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7287 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7288 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7289 IEM_MC_ADVANCE_RIP();
7290 IEM_MC_END();
7291 return VINF_SUCCESS;
7292 }
7293 return IEMOP_RAISE_INVALID_OPCODE();
7294}
7295
7296/** Opcode 0x66 0x0f 0xd7 - */
7297FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
7298{
7299 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7300 /** @todo testcase: Check that the instruction implicitly clears the high
7301 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7302 * and opcode modifications are made to work with the whole width (not
7303 * just 128). */
7304 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
7305 /* Docs says register only. */
7306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7307 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7308 {
7309 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7310 IEM_MC_BEGIN(2, 0);
7311 IEM_MC_ARG(uint64_t *, pDst, 0);
7312 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7313 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7314 IEM_MC_PREPARE_SSE_USAGE();
7315 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7316 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7317 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7318 IEM_MC_ADVANCE_RIP();
7319 IEM_MC_END();
7320 return VINF_SUCCESS;
7321 }
7322 return IEMOP_RAISE_INVALID_OPCODE();
7323}
7324
7325/* Opcode 0xf3 0x0f 0xd7 - invalid */
7326/* Opcode 0xf2 0x0f 0xd7 - invalid */
7327
7328
7329/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7330FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7331/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7332FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7333/* Opcode 0xf3 0x0f 0xd8 - invalid */
7334/* Opcode 0xf2 0x0f 0xd8 - invalid */
7335
7336/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7337FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7338/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7339FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7340/* Opcode 0xf3 0x0f 0xd9 - invalid */
7341/* Opcode 0xf2 0x0f 0xd9 - invalid */
7342
7343/** Opcode 0x0f 0xda - pminub Pq, Qq */
7344FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7345/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7346FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7347/* Opcode 0xf3 0x0f 0xda - invalid */
7348/* Opcode 0xf2 0x0f 0xda - invalid */
7349
7350/** Opcode 0x0f 0xdb - pand Pq, Qq */
7351FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7352/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7353FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7354/* Opcode 0xf3 0x0f 0xdb - invalid */
7355/* Opcode 0xf2 0x0f 0xdb - invalid */
7356
7357/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7358FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7359/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7360FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7361/* Opcode 0xf3 0x0f 0xdc - invalid */
7362/* Opcode 0xf2 0x0f 0xdc - invalid */
7363
7364/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7365FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7366/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7367FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7368/* Opcode 0xf3 0x0f 0xdd - invalid */
7369/* Opcode 0xf2 0x0f 0xdd - invalid */
7370
7371/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7372FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7373/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7374FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7375/* Opcode 0xf3 0x0f 0xde - invalid */
7376/* Opcode 0xf2 0x0f 0xde - invalid */
7377
7378/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7379FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7380/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7381FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7382/* Opcode 0xf3 0x0f 0xdf - invalid */
7383/* Opcode 0xf2 0x0f 0xdf - invalid */
7384
7385/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7386FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7387/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7388FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7389/* Opcode 0xf3 0x0f 0xe0 - invalid */
7390/* Opcode 0xf2 0x0f 0xe0 - invalid */
7391
7392/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7393FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7394/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7395FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7396/* Opcode 0xf3 0x0f 0xe1 - invalid */
7397/* Opcode 0xf2 0x0f 0xe1 - invalid */
7398
7399/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7400FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7401/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7402FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7403/* Opcode 0xf3 0x0f 0xe2 - invalid */
7404/* Opcode 0xf2 0x0f 0xe2 - invalid */
7405
7406/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7407FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7408/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7409FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7410/* Opcode 0xf3 0x0f 0xe3 - invalid */
7411/* Opcode 0xf2 0x0f 0xe3 - invalid */
7412
7413/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7414FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7415/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7416FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7417/* Opcode 0xf3 0x0f 0xe4 - invalid */
7418/* Opcode 0xf2 0x0f 0xe4 - invalid */
7419
7420/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7421FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7422/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7423FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7424/* Opcode 0xf3 0x0f 0xe5 - invalid */
7425/* Opcode 0xf2 0x0f 0xe5 - invalid */
7426
7427/* Opcode 0x0f 0xe6 - invalid */
7428/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7429FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7430/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7431FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7432/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7433FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7434
7435
7436/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
7437FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
7438{
7439 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7440 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7441 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7442 {
7443 /* Register, memory. */
7444 IEM_MC_BEGIN(0, 2);
7445 IEM_MC_LOCAL(uint64_t, uSrc);
7446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7447
7448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7450 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7451 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7452
7453 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7454 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7455
7456 IEM_MC_ADVANCE_RIP();
7457 IEM_MC_END();
7458 return VINF_SUCCESS;
7459 }
7460 /* The register, register encoding is invalid. */
7461 return IEMOP_RAISE_INVALID_OPCODE();
7462}
7463
7464/** Opcode 0x66 0x0f 0xe7 - vmovntdq Mx, Vx */
7465FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
7466{
7467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7468 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7469 {
7470 /* Register, memory. */
7471 IEMOP_MNEMONIC(vmovntdq_Mx_Vx, "vmovntdq Mx,Vx");
7472 IEM_MC_BEGIN(0, 2);
7473 IEM_MC_LOCAL(uint128_t, uSrc);
7474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7475
7476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7478 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7479 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7480
7481 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7482 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7483
7484 IEM_MC_ADVANCE_RIP();
7485 IEM_MC_END();
7486 return VINF_SUCCESS;
7487 }
7488
7489 /* The register, register encoding is invalid. */
7490 return IEMOP_RAISE_INVALID_OPCODE();
7491}
7492
7493/* Opcode 0xf3 0x0f 0xe7 - invalid */
7494/* Opcode 0xf2 0x0f 0xe7 - invalid */
7495
7496
7497/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7498FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7499/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7500FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7501/* Opcode 0xf3 0x0f 0xe8 - invalid */
7502/* Opcode 0xf2 0x0f 0xe8 - invalid */
7503
7504/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7505FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7506/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7507FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7508/* Opcode 0xf3 0x0f 0xe9 - invalid */
7509/* Opcode 0xf2 0x0f 0xe9 - invalid */
7510
7511/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7512FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7513/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7514FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7515/* Opcode 0xf3 0x0f 0xea - invalid */
7516/* Opcode 0xf2 0x0f 0xea - invalid */
7517
7518/** Opcode 0x0f 0xeb - por Pq, Qq */
7519FNIEMOP_STUB(iemOp_por_Pq_Qq);
7520/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7521FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7522/* Opcode 0xf3 0x0f 0xeb - invalid */
7523/* Opcode 0xf2 0x0f 0xeb - invalid */
7524
7525/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7526FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7527/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7528FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7529/* Opcode 0xf3 0x0f 0xec - invalid */
7530/* Opcode 0xf2 0x0f 0xec - invalid */
7531
7532/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7533FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7534/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7535FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7536/* Opcode 0xf3 0x0f 0xed - invalid */
7537/* Opcode 0xf2 0x0f 0xed - invalid */
7538
7539/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7540FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7541/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7542FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7543/* Opcode 0xf3 0x0f 0xee - invalid */
7544/* Opcode 0xf2 0x0f 0xee - invalid */
7545
7546
7547/** Opcode 0x0f 0xef - pxor Pq, Qq */
7548FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
7549{
7550 IEMOP_MNEMONIC(pxor, "pxor");
7551 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
7552}
7553
7554/** Opcode 0x66 0x0f 0xef - vpxor Vx, Hx, Wx */
7555FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
7556{
7557 IEMOP_MNEMONIC(vpxor, "vpxor");
7558 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7559}
7560
7561/* Opcode 0xf3 0x0f 0xef - invalid */
7562/* Opcode 0xf2 0x0f 0xef - invalid */
7563
7564/* Opcode 0x0f 0xf0 - invalid */
7565/* Opcode 0x66 0x0f 0xf0 - invalid */
7566/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7567FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7568
7569/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7570FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7571/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7572FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7573/* Opcode 0xf2 0x0f 0xf1 - invalid */
7574
7575/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7576FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7577/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7578FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7579/* Opcode 0xf2 0x0f 0xf2 - invalid */
7580
7581/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7582FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7583/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7584FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7585/* Opcode 0xf2 0x0f 0xf3 - invalid */
7586
7587/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7588FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7589/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7590FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7591/* Opcode 0xf2 0x0f 0xf4 - invalid */
7592
7593/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7594FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7595/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7596FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7597/* Opcode 0xf2 0x0f 0xf5 - invalid */
7598
7599/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7600FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7601/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7602FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7603/* Opcode 0xf2 0x0f 0xf6 - invalid */
7604
7605/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7606FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7607/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7608FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7609/* Opcode 0xf2 0x0f 0xf7 - invalid */
7610
7611/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7612FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7613/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7614FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7615/* Opcode 0xf2 0x0f 0xf8 - invalid */
7616
7617/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7618FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7619/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7620FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7621/* Opcode 0xf2 0x0f 0xf9 - invalid */
7622
7623/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7624FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7625/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7626FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7627/* Opcode 0xf2 0x0f 0xfa - invalid */
7628
7629/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7630FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7631/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7632FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7633/* Opcode 0xf2 0x0f 0xfb - invalid */
7634
7635/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7636FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7637/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7638FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7639/* Opcode 0xf2 0x0f 0xfc - invalid */
7640
7641/** Opcode 0x0f 0xfd - paddw Pq, Qq */
7642FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
7643/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
7644FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
7645/* Opcode 0xf2 0x0f 0xfd - invalid */
7646
7647/** Opcode 0x0f 0xfe - paddd Pq, Qq */
7648FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
7649/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
7650FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
7651/* Opcode 0xf2 0x0f 0xfe - invalid */
7652
7653
7654/** Opcode **** 0x0f 0xff - UD0 */
7655FNIEMOP_DEF(iemOp_ud0)
7656{
7657 IEMOP_MNEMONIC(ud0, "ud0");
7658 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7659 {
7660 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7661#ifndef TST_IEM_CHECK_MC
7662 RTGCPTR GCPtrEff;
7663 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
7664 if (rcStrict != VINF_SUCCESS)
7665 return rcStrict;
7666#endif
7667 IEMOP_HLP_DONE_DECODING();
7668 }
7669 return IEMOP_RAISE_INVALID_OPCODE();
7670}
7671
7672
7673
7674/**
7675 * Two byte opcode map, first byte 0x0f.
7676 *
7677 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
7678 * check if it needs updating as well when making changes.
7679 */
7680IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
7681{
7682 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7683 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
7684 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
7685 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
7686 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
7687 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
7688 /* 0x05 */ IEMOP_X4(iemOp_syscall),
7689 /* 0x06 */ IEMOP_X4(iemOp_clts),
7690 /* 0x07 */ IEMOP_X4(iemOp_sysret),
7691 /* 0x08 */ IEMOP_X4(iemOp_invd),
7692 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
7693 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
7694 /* 0x0b */ IEMOP_X4(iemOp_ud2),
7695 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
7696 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
7697 /* 0x0e */ IEMOP_X4(iemOp_femms),
7698 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
7699
7700 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7701 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7702 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7703 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7704 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7705 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7706 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7707 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7708 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
7709 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
7710 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
7711 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
7712 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
7713 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
7714 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
7715 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
7716
7717 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
7718 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
7719 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
7720 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
7721 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
7722 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7723 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
7724 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7725 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7726 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7727 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7728 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7729 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7730 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7731 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7732 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7733
7734 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
7735 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
7736 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
7737 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
7738 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
7739 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
7740 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
7741 /* 0x37 */ IEMOP_X4(iemOp_getsec),
7742 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
7743 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7744 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
7745 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7746 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7747 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7748 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7749 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7750
7751 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
7752 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
7753 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
7754 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
7755 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
7756 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
7757 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
7758 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
7759 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
7760 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
7761 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
7762 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
7763 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
7764 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
7765 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
7766 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
7767
7768 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7769 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
7770 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7771 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7772 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7773 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7774 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7775 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7776 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
7777 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
7778 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
7779 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
7780 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
7781 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
7782 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
7783 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
7784
7785 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7786 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7787 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7788 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7789 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7790 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7791 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7792 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7793 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7794 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7795 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7796 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7797 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7798 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7799 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7800 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
7801
7802 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
7803 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
7804 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
7805 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
7806 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7807 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7808 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7809 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7810
7811 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7812 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7813 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7814 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7815 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
7816 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
7817 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
7818 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
7819
7820 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
7821 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
7822 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
7823 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
7824 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
7825 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
7826 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
7827 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
7828 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
7829 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
7830 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
7831 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
7832 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
7833 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
7834 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
7835 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
7836
7837 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
7838 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
7839 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
7840 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
7841 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
7842 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
7843 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
7844 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
7845 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
7846 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
7847 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
7848 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
7849 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
7850 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
7851 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
7852 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
7853
7854 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
7855 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
7856 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
7857 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
7858 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
7859 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
7860 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
7861 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7862 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
7863 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
7864 /* 0xaa */ IEMOP_X4(iemOp_rsm),
7865 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
7866 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
7867 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
7868 /* 0xae */ IEMOP_X4(iemOp_Grp15),
7869 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
7870
7871 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
7872 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
7873 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
7874 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
7875 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
7876 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
7877 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
7878 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
7879 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
7880 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
7881 /* 0xba */ IEMOP_X4(iemOp_Grp8),
7882 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
7883 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
7884 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
7885 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
7886 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
7887
7888 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
7889 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
7890 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
7891 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7892 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7893 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7894 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
7895 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
7896 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
7897 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
7898 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
7899 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
7900 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
7901 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
7902 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
7903 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
7904
7905 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
7906 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7907 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7908 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7909 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7910 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7911 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
7912 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7913 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7914 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7915 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7916 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7917 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7918 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7919 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7920 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7921
7922 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7923 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7924 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7925 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7926 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7927 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7928 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
7929 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7930 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7931 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7932 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7933 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7934 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7935 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7936 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7937 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7938
7939 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
7940 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7941 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7942 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7943 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7944 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7945 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7946 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7947 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7948 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7949 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7950 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7951 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7952 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7953 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7954 /* 0xff */ IEMOP_X4(iemOp_ud0),
7955};
7956AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
7957
7958
7959/**
7960 * VEX opcode map \#1.
7961 *
7962 * @remarks This is (currently) a subset of g_apfnTwoByteMap, so please check if
7963 * it it needs updating too when making changes.
7964 */
7965IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
7966{
7967 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7968 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
7969 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
7970 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
7971 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
7972 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
7973 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
7974 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
7975 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
7976 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
7977 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
7978 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
7979 /* 0x0b */ IEMOP_X4(iemOp_InvalidNeedRM),
7980 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
7981 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
7982 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
7983 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
7984
7985 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7986 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7987 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7988 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7989 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7990 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7991 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7992 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7993 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
7994 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
7995 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
7996 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
7997 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
7998 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
7999 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
8000 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
8001
8002 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
8003 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
8004 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
8005 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
8006 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
8007 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
8008 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
8009 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
8010 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8011 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8012 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
8013 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8014 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
8015 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
8016 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8017 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8018
8019 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
8020 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
8021 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
8022 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
8023 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
8024 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
8025 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
8026 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
8027 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8028 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8029 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8030 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8031 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8032 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8033 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8034 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8035
8036 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
8037 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
8038 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
8039 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
8040 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
8041 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
8042 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
8043 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
8044 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
8045 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
8046 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
8047 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
8048 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
8049 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
8050 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
8051 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
8052
8053 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8054 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8055 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8056 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8057 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8058 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8059 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8060 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8061 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8062 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8063 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8064 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8065 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8066 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8067 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8068 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8069
8070 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8071 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8072 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8073 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8074 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8075 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8076 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8077 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8078 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8079 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8080 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8081 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8082 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8083 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8084 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8085 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8086
8087 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8088 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_Grp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8089 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_Grp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8090 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_Grp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8091 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8092 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8093 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8094 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8095 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
8096 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
8097 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
8098 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
8099 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8100 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8101 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
8102 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
8103
8104 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
8105 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
8106 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
8107 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
8108 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
8109 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
8110 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
8111 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
8112 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
8113 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
8114 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
8115 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
8116 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
8117 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
8118 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
8119 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
8120 IEMOP_X4(iemOp_InvalidNeedRM),
8121 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
8122 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
8123 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
8124 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
8125 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
8126 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
8127 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
8128 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
8129 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
8130 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
8131 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
8132 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
8133 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
8134 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
8135 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
8136 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
8137
8138 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8139 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8140 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8141 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8142 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8143 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8144 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8145 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8146 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8147 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8148 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
8149 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
8150 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
8151 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
8152 /* 0xae */ IEMOP_X4(iemOp_Grp15), /** @todo groups and vex */
8153 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
8154
8155 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8156 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8157 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8158 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8159 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8160 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8161 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8162 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8163 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8164 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8165 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
8166 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
8167 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
8168 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
8169 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
8170 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
8171
8172 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8173 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8174 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8175 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8176 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8177 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8178 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8179 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8180 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8181 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8182 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
8183 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
8184 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
8185 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
8186 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
8187 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
8188
8189 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8190 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8191 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8192 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8193 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8194 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8195 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8196 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8197 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8198 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8199 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8200 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8201 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8202 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8203 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8204 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8205
8206 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8207 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8208 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8209 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8210 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8211 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8212 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8213 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8214 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8215 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8216 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8217 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8218 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8219 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8220 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8221 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8222
8223 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8224 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8225 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8226 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8227 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8228 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8229 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8230 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8231 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8232 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8233 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8234 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8235 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8236 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8237 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8238 /* 0xff */ IEMOP_X4(iemOp_ud0),
8239};
8240AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8241/** @} */
8242
8243
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette