VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 65778

Last change on this file since 65778 was 65778, checked in by vboxsync, 8 years ago

IEM: Correted invalid opcode decoding in group 12, 13, and 14.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 305.9 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 65778 2017-02-13 17:38:28Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/** @name ..... opcodes.
26 *
27 * @{
28 */
29
30/** @} */
31
32
33/** @name Two byte opcodes (first byte 0x0f).
34 *
35 * @{
36 */
37
38/** Opcode 0x0f 0x00 /0. */
39FNIEMOPRM_DEF(iemOp_Grp6_sldt)
40{
41 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
42 IEMOP_HLP_MIN_286();
43 IEMOP_HLP_NO_REAL_OR_V86_MODE();
44
45 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
46 {
47 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
48 switch (pVCpu->iem.s.enmEffOpSize)
49 {
50 case IEMMODE_16BIT:
51 IEM_MC_BEGIN(0, 1);
52 IEM_MC_LOCAL(uint16_t, u16Ldtr);
53 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
54 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
55 IEM_MC_ADVANCE_RIP();
56 IEM_MC_END();
57 break;
58
59 case IEMMODE_32BIT:
60 IEM_MC_BEGIN(0, 1);
61 IEM_MC_LOCAL(uint32_t, u32Ldtr);
62 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
63 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
64 IEM_MC_ADVANCE_RIP();
65 IEM_MC_END();
66 break;
67
68 case IEMMODE_64BIT:
69 IEM_MC_BEGIN(0, 1);
70 IEM_MC_LOCAL(uint64_t, u64Ldtr);
71 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
72 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
73 IEM_MC_ADVANCE_RIP();
74 IEM_MC_END();
75 break;
76
77 IEM_NOT_REACHED_DEFAULT_CASE_RET();
78 }
79 }
80 else
81 {
82 IEM_MC_BEGIN(0, 2);
83 IEM_MC_LOCAL(uint16_t, u16Ldtr);
84 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
85 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
86 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
87 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
88 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
89 IEM_MC_ADVANCE_RIP();
90 IEM_MC_END();
91 }
92 return VINF_SUCCESS;
93}
94
95
96/** Opcode 0x0f 0x00 /1. */
97FNIEMOPRM_DEF(iemOp_Grp6_str)
98{
99 IEMOP_MNEMONIC(str, "str Rv/Mw");
100 IEMOP_HLP_MIN_286();
101 IEMOP_HLP_NO_REAL_OR_V86_MODE();
102
103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
104 {
105 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
106 switch (pVCpu->iem.s.enmEffOpSize)
107 {
108 case IEMMODE_16BIT:
109 IEM_MC_BEGIN(0, 1);
110 IEM_MC_LOCAL(uint16_t, u16Tr);
111 IEM_MC_FETCH_TR_U16(u16Tr);
112 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 break;
116
117 case IEMMODE_32BIT:
118 IEM_MC_BEGIN(0, 1);
119 IEM_MC_LOCAL(uint32_t, u32Tr);
120 IEM_MC_FETCH_TR_U32(u32Tr);
121 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
122 IEM_MC_ADVANCE_RIP();
123 IEM_MC_END();
124 break;
125
126 case IEMMODE_64BIT:
127 IEM_MC_BEGIN(0, 1);
128 IEM_MC_LOCAL(uint64_t, u64Tr);
129 IEM_MC_FETCH_TR_U64(u64Tr);
130 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
131 IEM_MC_ADVANCE_RIP();
132 IEM_MC_END();
133 break;
134
135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
136 }
137 }
138 else
139 {
140 IEM_MC_BEGIN(0, 2);
141 IEM_MC_LOCAL(uint16_t, u16Tr);
142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
144 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
145 IEM_MC_FETCH_TR_U16(u16Tr);
146 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
147 IEM_MC_ADVANCE_RIP();
148 IEM_MC_END();
149 }
150 return VINF_SUCCESS;
151}
152
153
154/** Opcode 0x0f 0x00 /2. */
155FNIEMOPRM_DEF(iemOp_Grp6_lldt)
156{
157 IEMOP_MNEMONIC(lldt, "lldt Ew");
158 IEMOP_HLP_MIN_286();
159 IEMOP_HLP_NO_REAL_OR_V86_MODE();
160
161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
162 {
163 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
164 IEM_MC_BEGIN(1, 0);
165 IEM_MC_ARG(uint16_t, u16Sel, 0);
166 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
167 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
168 IEM_MC_END();
169 }
170 else
171 {
172 IEM_MC_BEGIN(1, 1);
173 IEM_MC_ARG(uint16_t, u16Sel, 0);
174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
176 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
177 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
178 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
179 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
180 IEM_MC_END();
181 }
182 return VINF_SUCCESS;
183}
184
185
186/** Opcode 0x0f 0x00 /3. */
187FNIEMOPRM_DEF(iemOp_Grp6_ltr)
188{
189 IEMOP_MNEMONIC(ltr, "ltr Ew");
190 IEMOP_HLP_MIN_286();
191 IEMOP_HLP_NO_REAL_OR_V86_MODE();
192
193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
194 {
195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
196 IEM_MC_BEGIN(1, 0);
197 IEM_MC_ARG(uint16_t, u16Sel, 0);
198 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
199 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
200 IEM_MC_END();
201 }
202 else
203 {
204 IEM_MC_BEGIN(1, 1);
205 IEM_MC_ARG(uint16_t, u16Sel, 0);
206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
209 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
210 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
211 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
212 IEM_MC_END();
213 }
214 return VINF_SUCCESS;
215}
216
217
218/** Opcode 0x0f 0x00 /3. */
219FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
220{
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
223
224 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
225 {
226 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
227 IEM_MC_BEGIN(2, 0);
228 IEM_MC_ARG(uint16_t, u16Sel, 0);
229 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
230 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
231 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
232 IEM_MC_END();
233 }
234 else
235 {
236 IEM_MC_BEGIN(2, 1);
237 IEM_MC_ARG(uint16_t, u16Sel, 0);
238 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
241 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
242 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
243 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
244 IEM_MC_END();
245 }
246 return VINF_SUCCESS;
247}
248
249
250/** Opcode 0x0f 0x00 /4. */
251FNIEMOPRM_DEF(iemOp_Grp6_verr)
252{
253 IEMOP_MNEMONIC(verr, "verr Ew");
254 IEMOP_HLP_MIN_286();
255 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
256}
257
258
259/** Opcode 0x0f 0x00 /5. */
260FNIEMOPRM_DEF(iemOp_Grp6_verw)
261{
262 IEMOP_MNEMONIC(verw, "verw Ew");
263 IEMOP_HLP_MIN_286();
264 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
265}
266
267
268/**
269 * Group 6 jump table.
270 */
271IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
272{
273 iemOp_Grp6_sldt,
274 iemOp_Grp6_str,
275 iemOp_Grp6_lldt,
276 iemOp_Grp6_ltr,
277 iemOp_Grp6_verr,
278 iemOp_Grp6_verw,
279 iemOp_InvalidWithRM,
280 iemOp_InvalidWithRM
281};
282
283/** Opcode 0x0f 0x00. */
284FNIEMOP_DEF(iemOp_Grp6)
285{
286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
287 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
288}
289
290
291/** Opcode 0x0f 0x01 /0. */
292FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
293{
294 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
295 IEMOP_HLP_MIN_286();
296 IEMOP_HLP_64BIT_OP_SIZE();
297 IEM_MC_BEGIN(2, 1);
298 IEM_MC_ARG(uint8_t, iEffSeg, 0);
299 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
302 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
303 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
304 IEM_MC_END();
305 return VINF_SUCCESS;
306}
307
308
309/** Opcode 0x0f 0x01 /0. */
310FNIEMOP_DEF(iemOp_Grp7_vmcall)
311{
312 IEMOP_BITCH_ABOUT_STUB();
313 return IEMOP_RAISE_INVALID_OPCODE();
314}
315
316
317/** Opcode 0x0f 0x01 /0. */
318FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
319{
320 IEMOP_BITCH_ABOUT_STUB();
321 return IEMOP_RAISE_INVALID_OPCODE();
322}
323
324
325/** Opcode 0x0f 0x01 /0. */
326FNIEMOP_DEF(iemOp_Grp7_vmresume)
327{
328 IEMOP_BITCH_ABOUT_STUB();
329 return IEMOP_RAISE_INVALID_OPCODE();
330}
331
332
333/** Opcode 0x0f 0x01 /0. */
334FNIEMOP_DEF(iemOp_Grp7_vmxoff)
335{
336 IEMOP_BITCH_ABOUT_STUB();
337 return IEMOP_RAISE_INVALID_OPCODE();
338}
339
340
341/** Opcode 0x0f 0x01 /1. */
342FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
343{
344 IEMOP_MNEMONIC(sidt, "sidt Ms");
345 IEMOP_HLP_MIN_286();
346 IEMOP_HLP_64BIT_OP_SIZE();
347 IEM_MC_BEGIN(2, 1);
348 IEM_MC_ARG(uint8_t, iEffSeg, 0);
349 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
352 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
353 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
354 IEM_MC_END();
355 return VINF_SUCCESS;
356}
357
358
359/** Opcode 0x0f 0x01 /1. */
360FNIEMOP_DEF(iemOp_Grp7_monitor)
361{
362 IEMOP_MNEMONIC(monitor, "monitor");
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
364 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
365}
366
367
368/** Opcode 0x0f 0x01 /1. */
369FNIEMOP_DEF(iemOp_Grp7_mwait)
370{
371 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
374}
375
376
377/** Opcode 0x0f 0x01 /2. */
378FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
379{
380 IEMOP_MNEMONIC(lgdt, "lgdt");
381 IEMOP_HLP_64BIT_OP_SIZE();
382 IEM_MC_BEGIN(3, 1);
383 IEM_MC_ARG(uint8_t, iEffSeg, 0);
384 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
385 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
389 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
390 IEM_MC_END();
391 return VINF_SUCCESS;
392}
393
394
395/** Opcode 0x0f 0x01 0xd0. */
396FNIEMOP_DEF(iemOp_Grp7_xgetbv)
397{
398 IEMOP_MNEMONIC(xgetbv, "xgetbv");
399 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
400 {
401 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
402 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
403 }
404 return IEMOP_RAISE_INVALID_OPCODE();
405}
406
407
408/** Opcode 0x0f 0x01 0xd1. */
409FNIEMOP_DEF(iemOp_Grp7_xsetbv)
410{
411 IEMOP_MNEMONIC(xsetbv, "xsetbv");
412 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
413 {
414 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
416 }
417 return IEMOP_RAISE_INVALID_OPCODE();
418}
419
420
421/** Opcode 0x0f 0x01 /3. */
422FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
423{
424 IEMOP_MNEMONIC(lidt, "lidt");
425 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
426 ? IEMMODE_64BIT
427 : pVCpu->iem.s.enmEffOpSize;
428 IEM_MC_BEGIN(3, 1);
429 IEM_MC_ARG(uint8_t, iEffSeg, 0);
430 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
431 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
434 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
435 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
436 IEM_MC_END();
437 return VINF_SUCCESS;
438}
439
440
441/** Opcode 0x0f 0x01 0xd8. */
442FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
443
444/** Opcode 0x0f 0x01 0xd9. */
445FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
446
447/** Opcode 0x0f 0x01 0xda. */
448FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
449
450/** Opcode 0x0f 0x01 0xdb. */
451FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
452
453/** Opcode 0x0f 0x01 0xdc. */
454FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
455
456/** Opcode 0x0f 0x01 0xdd. */
457FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
458
459/** Opcode 0x0f 0x01 0xde. */
460FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
461
462/** Opcode 0x0f 0x01 0xdf. */
463FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
464
465/** Opcode 0x0f 0x01 /4. */
466FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
467{
468 IEMOP_MNEMONIC(smsw, "smsw");
469 IEMOP_HLP_MIN_286();
470 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
471 {
472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
473 switch (pVCpu->iem.s.enmEffOpSize)
474 {
475 case IEMMODE_16BIT:
476 IEM_MC_BEGIN(0, 1);
477 IEM_MC_LOCAL(uint16_t, u16Tmp);
478 IEM_MC_FETCH_CR0_U16(u16Tmp);
479 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
480 { /* likely */ }
481 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
482 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
483 else
484 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
485 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
486 IEM_MC_ADVANCE_RIP();
487 IEM_MC_END();
488 return VINF_SUCCESS;
489
490 case IEMMODE_32BIT:
491 IEM_MC_BEGIN(0, 1);
492 IEM_MC_LOCAL(uint32_t, u32Tmp);
493 IEM_MC_FETCH_CR0_U32(u32Tmp);
494 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
495 IEM_MC_ADVANCE_RIP();
496 IEM_MC_END();
497 return VINF_SUCCESS;
498
499 case IEMMODE_64BIT:
500 IEM_MC_BEGIN(0, 1);
501 IEM_MC_LOCAL(uint64_t, u64Tmp);
502 IEM_MC_FETCH_CR0_U64(u64Tmp);
503 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
504 IEM_MC_ADVANCE_RIP();
505 IEM_MC_END();
506 return VINF_SUCCESS;
507
508 IEM_NOT_REACHED_DEFAULT_CASE_RET();
509 }
510 }
511 else
512 {
513 /* Ignore operand size here, memory refs are always 16-bit. */
514 IEM_MC_BEGIN(0, 2);
515 IEM_MC_LOCAL(uint16_t, u16Tmp);
516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
519 IEM_MC_FETCH_CR0_U16(u16Tmp);
520 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
521 { /* likely */ }
522 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
523 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
524 else
525 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
526 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
527 IEM_MC_ADVANCE_RIP();
528 IEM_MC_END();
529 return VINF_SUCCESS;
530 }
531}
532
533
534/** Opcode 0x0f 0x01 /6. */
535FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
536{
537 /* The operand size is effectively ignored, all is 16-bit and only the
538 lower 3-bits are used. */
539 IEMOP_MNEMONIC(lmsw, "lmsw");
540 IEMOP_HLP_MIN_286();
541 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
542 {
543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
544 IEM_MC_BEGIN(1, 0);
545 IEM_MC_ARG(uint16_t, u16Tmp, 0);
546 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
547 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
548 IEM_MC_END();
549 }
550 else
551 {
552 IEM_MC_BEGIN(1, 1);
553 IEM_MC_ARG(uint16_t, u16Tmp, 0);
554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
557 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
558 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
559 IEM_MC_END();
560 }
561 return VINF_SUCCESS;
562}
563
564
565/** Opcode 0x0f 0x01 /7. */
566FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
567{
568 IEMOP_MNEMONIC(invlpg, "invlpg");
569 IEMOP_HLP_MIN_486();
570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
571 IEM_MC_BEGIN(1, 1);
572 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
574 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
575 IEM_MC_END();
576 return VINF_SUCCESS;
577}
578
579
580/** Opcode 0x0f 0x01 /7. */
581FNIEMOP_DEF(iemOp_Grp7_swapgs)
582{
583 IEMOP_MNEMONIC(swapgs, "swapgs");
584 IEMOP_HLP_ONLY_64BIT();
585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
586 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
587}
588
589
590/** Opcode 0x0f 0x01 /7. */
591FNIEMOP_DEF(iemOp_Grp7_rdtscp)
592{
593 NOREF(pVCpu);
594 IEMOP_BITCH_ABOUT_STUB();
595 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
596}
597
598
599/**
600 * Group 7 jump table, memory variant.
601 */
602IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
603{
604 iemOp_Grp7_sgdt,
605 iemOp_Grp7_sidt,
606 iemOp_Grp7_lgdt,
607 iemOp_Grp7_lidt,
608 iemOp_Grp7_smsw,
609 iemOp_InvalidWithRM,
610 iemOp_Grp7_lmsw,
611 iemOp_Grp7_invlpg
612};
613
614
615/** Opcode 0x0f 0x01. */
616FNIEMOP_DEF(iemOp_Grp7)
617{
618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
619 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
620 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
621
622 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
623 {
624 case 0:
625 switch (bRm & X86_MODRM_RM_MASK)
626 {
627 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
628 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
629 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
630 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
631 }
632 return IEMOP_RAISE_INVALID_OPCODE();
633
634 case 1:
635 switch (bRm & X86_MODRM_RM_MASK)
636 {
637 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
638 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
639 }
640 return IEMOP_RAISE_INVALID_OPCODE();
641
642 case 2:
643 switch (bRm & X86_MODRM_RM_MASK)
644 {
645 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
646 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
647 }
648 return IEMOP_RAISE_INVALID_OPCODE();
649
650 case 3:
651 switch (bRm & X86_MODRM_RM_MASK)
652 {
653 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
654 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
655 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
656 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
657 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
658 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
659 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
660 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
661 IEM_NOT_REACHED_DEFAULT_CASE_RET();
662 }
663
664 case 4:
665 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
666
667 case 5:
668 return IEMOP_RAISE_INVALID_OPCODE();
669
670 case 6:
671 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
672
673 case 7:
674 switch (bRm & X86_MODRM_RM_MASK)
675 {
676 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
677 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
678 }
679 return IEMOP_RAISE_INVALID_OPCODE();
680
681 IEM_NOT_REACHED_DEFAULT_CASE_RET();
682 }
683}
684
685/** Opcode 0x0f 0x00 /3. */
686FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
687{
688 IEMOP_HLP_NO_REAL_OR_V86_MODE();
689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
690
691 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
692 {
693 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
694 switch (pVCpu->iem.s.enmEffOpSize)
695 {
696 case IEMMODE_16BIT:
697 {
698 IEM_MC_BEGIN(3, 0);
699 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
700 IEM_MC_ARG(uint16_t, u16Sel, 1);
701 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
702
703 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
704 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
705 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
706
707 IEM_MC_END();
708 return VINF_SUCCESS;
709 }
710
711 case IEMMODE_32BIT:
712 case IEMMODE_64BIT:
713 {
714 IEM_MC_BEGIN(3, 0);
715 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
716 IEM_MC_ARG(uint16_t, u16Sel, 1);
717 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
718
719 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
720 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
721 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
722
723 IEM_MC_END();
724 return VINF_SUCCESS;
725 }
726
727 IEM_NOT_REACHED_DEFAULT_CASE_RET();
728 }
729 }
730 else
731 {
732 switch (pVCpu->iem.s.enmEffOpSize)
733 {
734 case IEMMODE_16BIT:
735 {
736 IEM_MC_BEGIN(3, 1);
737 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
738 IEM_MC_ARG(uint16_t, u16Sel, 1);
739 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
741
742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
743 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
744
745 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
746 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
747 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
748
749 IEM_MC_END();
750 return VINF_SUCCESS;
751 }
752
753 case IEMMODE_32BIT:
754 case IEMMODE_64BIT:
755 {
756 IEM_MC_BEGIN(3, 1);
757 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
758 IEM_MC_ARG(uint16_t, u16Sel, 1);
759 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
761
762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
763 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
764/** @todo testcase: make sure it's a 16-bit read. */
765
766 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
767 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
768 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
769
770 IEM_MC_END();
771 return VINF_SUCCESS;
772 }
773
774 IEM_NOT_REACHED_DEFAULT_CASE_RET();
775 }
776 }
777}
778
779
780
781/** Opcode 0x0f 0x02. */
782FNIEMOP_DEF(iemOp_lar_Gv_Ew)
783{
784 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
785 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
786}
787
788
789/** Opcode 0x0f 0x03. */
790FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
791{
792 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
793 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
794}
795
796
797/** Opcode 0x0f 0x05. */
798FNIEMOP_DEF(iemOp_syscall)
799{
800 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
802 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
803}
804
805
806/** Opcode 0x0f 0x06. */
807FNIEMOP_DEF(iemOp_clts)
808{
809 IEMOP_MNEMONIC(clts, "clts");
810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
811 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
812}
813
814
815/** Opcode 0x0f 0x07. */
816FNIEMOP_DEF(iemOp_sysret)
817{
818 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
820 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
821}
822
823
824/** Opcode 0x0f 0x08. */
825FNIEMOP_STUB(iemOp_invd);
826// IEMOP_HLP_MIN_486();
827
828
829/** Opcode 0x0f 0x09. */
830FNIEMOP_DEF(iemOp_wbinvd)
831{
832 IEMOP_MNEMONIC(wbinvd, "wbinvd");
833 IEMOP_HLP_MIN_486();
834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
835 IEM_MC_BEGIN(0, 0);
836 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
837 IEM_MC_ADVANCE_RIP();
838 IEM_MC_END();
839 return VINF_SUCCESS; /* ignore for now */
840}
841
842
843/** Opcode 0x0f 0x0b. */
844FNIEMOP_DEF(iemOp_ud2)
845{
846 IEMOP_MNEMONIC(ud2, "ud2");
847 return IEMOP_RAISE_INVALID_OPCODE();
848}
849
850/** Opcode 0x0f 0x0d. */
851FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
852{
853 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
854 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
855 {
856 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
857 return IEMOP_RAISE_INVALID_OPCODE();
858 }
859
860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
861 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
862 {
863 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
864 return IEMOP_RAISE_INVALID_OPCODE();
865 }
866
867 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
868 {
869 case 2: /* Aliased to /0 for the time being. */
870 case 4: /* Aliased to /0 for the time being. */
871 case 5: /* Aliased to /0 for the time being. */
872 case 6: /* Aliased to /0 for the time being. */
873 case 7: /* Aliased to /0 for the time being. */
874 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
875 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
876 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
878 }
879
880 IEM_MC_BEGIN(0, 1);
881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
884 /* Currently a NOP. */
885 NOREF(GCPtrEffSrc);
886 IEM_MC_ADVANCE_RIP();
887 IEM_MC_END();
888 return VINF_SUCCESS;
889}
890
891
892/** Opcode 0x0f 0x0e. */
893FNIEMOP_STUB(iemOp_femms);
894
895
896/** Opcode 0x0f 0x0f 0x0c. */
897FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
898
899/** Opcode 0x0f 0x0f 0x0d. */
900FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
901
902/** Opcode 0x0f 0x0f 0x1c. */
903FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
904
905/** Opcode 0x0f 0x0f 0x1d. */
906FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
907
908/** Opcode 0x0f 0x0f 0x8a. */
909FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
910
911/** Opcode 0x0f 0x0f 0x8e. */
912FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
913
914/** Opcode 0x0f 0x0f 0x90. */
915FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
916
917/** Opcode 0x0f 0x0f 0x94. */
918FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
919
920/** Opcode 0x0f 0x0f 0x96. */
921FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
922
923/** Opcode 0x0f 0x0f 0x97. */
924FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
925
926/** Opcode 0x0f 0x0f 0x9a. */
927FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
928
929/** Opcode 0x0f 0x0f 0x9e. */
930FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
931
932/** Opcode 0x0f 0x0f 0xa0. */
933FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
934
935/** Opcode 0x0f 0x0f 0xa4. */
936FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
937
938/** Opcode 0x0f 0x0f 0xa6. */
939FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
940
941/** Opcode 0x0f 0x0f 0xa7. */
942FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
943
944/** Opcode 0x0f 0x0f 0xaa. */
945FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
946
947/** Opcode 0x0f 0x0f 0xae. */
948FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
949
950/** Opcode 0x0f 0x0f 0xb0. */
951FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
952
953/** Opcode 0x0f 0x0f 0xb4. */
954FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
955
956/** Opcode 0x0f 0x0f 0xb6. */
957FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
958
959/** Opcode 0x0f 0x0f 0xb7. */
960FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
961
962/** Opcode 0x0f 0x0f 0xbb. */
963FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
964
965/** Opcode 0x0f 0x0f 0xbf. */
966FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
967
968
969/** Opcode 0x0f 0x0f. */
970FNIEMOP_DEF(iemOp_3Dnow)
971{
972 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
973 {
974 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
975 return IEMOP_RAISE_INVALID_OPCODE();
976 }
977
978 /* This is pretty sparse, use switch instead of table. */
979 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
980 switch (b)
981 {
982 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
983 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
984 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
985 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
986 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
987 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
988 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
989 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
990 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
991 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
992 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
993 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
994 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
995 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
996 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
997 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
998 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
999 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1000 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1001 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1002 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1003 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1004 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1005 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1006 default:
1007 return IEMOP_RAISE_INVALID_OPCODE();
1008 }
1009}
1010
1011
1012/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1013FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1014/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1015FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1016/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1017FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1018/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1019FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1020
1021
1022/** Opcode 0x0f 0x11 - vmovups Wps, Vps */
1023FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1024{
1025 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1027 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1028 {
1029 /*
1030 * Register, register.
1031 */
1032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1033 IEM_MC_BEGIN(0, 0);
1034 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1035 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1036 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1037 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1038 IEM_MC_ADVANCE_RIP();
1039 IEM_MC_END();
1040 }
1041 else
1042 {
1043 /*
1044 * Memory, register.
1045 */
1046 IEM_MC_BEGIN(0, 2);
1047 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1049
1050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1052 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1053 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1054
1055 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1056 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1057
1058 IEM_MC_ADVANCE_RIP();
1059 IEM_MC_END();
1060 }
1061 return VINF_SUCCESS;
1062}
1063
1064
1065/** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */
1066FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
1067
1068/** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */
1069FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
1070
1071/** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */
1072FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1073{
1074 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1076 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1077 {
1078 /*
1079 * Register, register.
1080 */
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_BEGIN(0, 1);
1083 IEM_MC_LOCAL(uint64_t, uSrc);
1084
1085 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1086 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1087 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1088 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1089
1090 IEM_MC_ADVANCE_RIP();
1091 IEM_MC_END();
1092 }
1093 else
1094 {
1095 /*
1096 * Memory, register.
1097 */
1098 IEM_MC_BEGIN(0, 2);
1099 IEM_MC_LOCAL(uint64_t, uSrc);
1100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1101
1102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1104 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1105 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1106
1107 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1108 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1109
1110 IEM_MC_ADVANCE_RIP();
1111 IEM_MC_END();
1112 }
1113 return VINF_SUCCESS;
1114}
1115
1116
1117/** Opcode 0x0f 0x12. */
1118FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1119
1120/** Opcode 0x66 0x0f 0x12. */
1121FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1122
1123/** Opcode 0xf3 0x0f 0x12. */
1124FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1125
1126/** Opcode 0xf2 0x0f 0x12. */
1127FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1128
1129/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1130FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1131
1132/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1133FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1134{
1135 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1136 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1137 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1138 {
1139#if 0
1140 /*
1141 * Register, register.
1142 */
1143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1144 IEM_MC_BEGIN(0, 1);
1145 IEM_MC_LOCAL(uint64_t, uSrc);
1146 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1147 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1148 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1149 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1150 IEM_MC_ADVANCE_RIP();
1151 IEM_MC_END();
1152#else
1153 return IEMOP_RAISE_INVALID_OPCODE();
1154#endif
1155 }
1156 else
1157 {
1158 /*
1159 * Memory, register.
1160 */
1161 IEM_MC_BEGIN(0, 2);
1162 IEM_MC_LOCAL(uint64_t, uSrc);
1163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1164
1165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1167 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1168 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1169
1170 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1171 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1172
1173 IEM_MC_ADVANCE_RIP();
1174 IEM_MC_END();
1175 }
1176 return VINF_SUCCESS;
1177}
1178
1179/* Opcode 0xf3 0x0f 0x13 - invalid */
1180/* Opcode 0xf2 0x0f 0x13 - invalid */
1181
1182/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1183FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1184/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1185FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1186/* Opcode 0xf3 0x0f 0x14 - invalid */
1187/* Opcode 0xf2 0x0f 0x14 - invalid */
1188/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1189FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1190/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1191FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1192/* Opcode 0xf3 0x0f 0x15 - invalid */
1193/* Opcode 0xf2 0x0f 0x15 - invalid */
1194/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1195FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1196/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1197FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1198/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1199FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1200/* Opcode 0xf2 0x0f 0x16 - invalid */
1201/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1202FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1203/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1204FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1205/* Opcode 0xf3 0x0f 0x17 - invalid */
1206/* Opcode 0xf2 0x0f 0x17 - invalid */
1207
1208
1209/** Opcode 0x0f 0x18. */
1210FNIEMOP_DEF(iemOp_prefetch_Grp16)
1211{
1212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1213 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1214 {
1215 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1216 {
1217 case 4: /* Aliased to /0 for the time being according to AMD. */
1218 case 5: /* Aliased to /0 for the time being according to AMD. */
1219 case 6: /* Aliased to /0 for the time being according to AMD. */
1220 case 7: /* Aliased to /0 for the time being according to AMD. */
1221 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1222 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1223 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1224 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1226 }
1227
1228 IEM_MC_BEGIN(0, 1);
1229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1232 /* Currently a NOP. */
1233 NOREF(GCPtrEffSrc);
1234 IEM_MC_ADVANCE_RIP();
1235 IEM_MC_END();
1236 return VINF_SUCCESS;
1237 }
1238
1239 return IEMOP_RAISE_INVALID_OPCODE();
1240}
1241
1242
1243/** Opcode 0x0f 0x19..0x1f. */
1244FNIEMOP_DEF(iemOp_nop_Ev)
1245{
1246 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1247 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1248 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1249 {
1250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1251 IEM_MC_BEGIN(0, 0);
1252 IEM_MC_ADVANCE_RIP();
1253 IEM_MC_END();
1254 }
1255 else
1256 {
1257 IEM_MC_BEGIN(0, 1);
1258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1261 /* Currently a NOP. */
1262 NOREF(GCPtrEffSrc);
1263 IEM_MC_ADVANCE_RIP();
1264 IEM_MC_END();
1265 }
1266 return VINF_SUCCESS;
1267}
1268
1269
1270/** Opcode 0x0f 0x20. */
1271FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1272{
1273 /* mod is ignored, as is operand size overrides. */
1274 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1275 IEMOP_HLP_MIN_386();
1276 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1277 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1278 else
1279 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1280
1281 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1282 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1283 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1284 {
1285 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1286 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1287 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1288 iCrReg |= 8;
1289 }
1290 switch (iCrReg)
1291 {
1292 case 0: case 2: case 3: case 4: case 8:
1293 break;
1294 default:
1295 return IEMOP_RAISE_INVALID_OPCODE();
1296 }
1297 IEMOP_HLP_DONE_DECODING();
1298
1299 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1300}
1301
1302
1303/** Opcode 0x0f 0x21. */
1304FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1305{
1306 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1307 IEMOP_HLP_MIN_386();
1308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1310 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1311 return IEMOP_RAISE_INVALID_OPCODE();
1312 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1313 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1314 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1315}
1316
1317
1318/** Opcode 0x0f 0x22. */
1319FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1320{
1321 /* mod is ignored, as is operand size overrides. */
1322 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1323 IEMOP_HLP_MIN_386();
1324 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1325 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1326 else
1327 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1328
1329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1330 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1331 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1332 {
1333 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1334 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1335 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1336 iCrReg |= 8;
1337 }
1338 switch (iCrReg)
1339 {
1340 case 0: case 2: case 3: case 4: case 8:
1341 break;
1342 default:
1343 return IEMOP_RAISE_INVALID_OPCODE();
1344 }
1345 IEMOP_HLP_DONE_DECODING();
1346
1347 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1348}
1349
1350
1351/** Opcode 0x0f 0x23. */
1352FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1353{
1354 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1355 IEMOP_HLP_MIN_386();
1356 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1358 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1359 return IEMOP_RAISE_INVALID_OPCODE();
1360 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1361 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1362 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1363}
1364
1365
1366/** Opcode 0x0f 0x24. */
1367FNIEMOP_DEF(iemOp_mov_Rd_Td)
1368{
1369 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1370 /** @todo works on 386 and 486. */
1371 /* The RM byte is not considered, see testcase. */
1372 return IEMOP_RAISE_INVALID_OPCODE();
1373}
1374
1375
1376/** Opcode 0x0f 0x26. */
1377FNIEMOP_DEF(iemOp_mov_Td_Rd)
1378{
1379 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1380 /** @todo works on 386 and 486. */
1381 /* The RM byte is not considered, see testcase. */
1382 return IEMOP_RAISE_INVALID_OPCODE();
1383}
1384
1385
1386/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
1387FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1388{
1389 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1391 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1392 {
1393 /*
1394 * Register, register.
1395 */
1396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1397 IEM_MC_BEGIN(0, 0);
1398 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1399 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1400 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1401 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1402 IEM_MC_ADVANCE_RIP();
1403 IEM_MC_END();
1404 }
1405 else
1406 {
1407 /*
1408 * Register, memory.
1409 */
1410 IEM_MC_BEGIN(0, 2);
1411 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1413
1414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1416 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1417 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1418
1419 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1420 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1421
1422 IEM_MC_ADVANCE_RIP();
1423 IEM_MC_END();
1424 }
1425 return VINF_SUCCESS;
1426}
1427
1428/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
1429FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1430{
1431 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1433 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1434 {
1435 /*
1436 * Register, register.
1437 */
1438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1439 IEM_MC_BEGIN(0, 0);
1440 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1441 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1442 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1443 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1444 IEM_MC_ADVANCE_RIP();
1445 IEM_MC_END();
1446 }
1447 else
1448 {
1449 /*
1450 * Register, memory.
1451 */
1452 IEM_MC_BEGIN(0, 2);
1453 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1455
1456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1458 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1459 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1460
1461 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1462 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1463
1464 IEM_MC_ADVANCE_RIP();
1465 IEM_MC_END();
1466 }
1467 return VINF_SUCCESS;
1468}
1469
1470/* Opcode 0xf3 0x0f 0x28 - invalid */
1471/* Opcode 0xf2 0x0f 0x28 - invalid */
1472
1473/** Opcode 0x0f 0x29 - vmovaps Wps, Vps */
1474FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1475{
1476 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1477 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1478 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1479 {
1480 /*
1481 * Register, register.
1482 */
1483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1484 IEM_MC_BEGIN(0, 0);
1485 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1486 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1487 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1488 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1489 IEM_MC_ADVANCE_RIP();
1490 IEM_MC_END();
1491 }
1492 else
1493 {
1494 /*
1495 * Memory, register.
1496 */
1497 IEM_MC_BEGIN(0, 2);
1498 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1500
1501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1503 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1504 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1505
1506 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1507 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1508
1509 IEM_MC_ADVANCE_RIP();
1510 IEM_MC_END();
1511 }
1512 return VINF_SUCCESS;
1513}
1514
1515/** Opcode 0x66 0x0f 0x29 - vmovapd Wpd,Vpd */
1516FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
1517{
1518 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1521 {
1522 /*
1523 * Register, register.
1524 */
1525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1526 IEM_MC_BEGIN(0, 0);
1527 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1528 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1529 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1530 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1531 IEM_MC_ADVANCE_RIP();
1532 IEM_MC_END();
1533 }
1534 else
1535 {
1536 /*
1537 * Memory, register.
1538 */
1539 IEM_MC_BEGIN(0, 2);
1540 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1542
1543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1545 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1546 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1547
1548 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1549 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1550
1551 IEM_MC_ADVANCE_RIP();
1552 IEM_MC_END();
1553 }
1554 return VINF_SUCCESS;
1555}
1556
1557/* Opcode 0xf3 0x0f 0x29 - invalid */
1558/* Opcode 0xf2 0x0f 0x29 - invalid */
1559
1560
1561/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
1562FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
1563/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
1564FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
1565/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
1566FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
1567/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1568FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
1569
1570
1571/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
1572FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
1573{
1574 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1576 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1577 {
1578 /*
1579 * memory, register.
1580 */
1581 IEM_MC_BEGIN(0, 2);
1582 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1584
1585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1587 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1588 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1589
1590 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1591 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1592
1593 IEM_MC_ADVANCE_RIP();
1594 IEM_MC_END();
1595 }
1596 /* The register, register encoding is invalid. */
1597 else
1598 return IEMOP_RAISE_INVALID_OPCODE();
1599 return VINF_SUCCESS;
1600}
1601
1602/** Opcode 0x66 0x0f 0x2b - vmovntpd Mpd, Vpd */
1603FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
1604{
1605 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1607 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1608 {
1609 /*
1610 * memory, register.
1611 */
1612 IEM_MC_BEGIN(0, 2);
1613 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1615
1616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1618 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1619 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1620
1621 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1622 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1623
1624 IEM_MC_ADVANCE_RIP();
1625 IEM_MC_END();
1626 }
1627 /* The register, register encoding is invalid. */
1628 else
1629 return IEMOP_RAISE_INVALID_OPCODE();
1630 return VINF_SUCCESS;
1631}
1632/* Opcode 0xf3 0x0f 0x2b - invalid */
1633/* Opcode 0xf2 0x0f 0x2b - invalid */
1634
1635
1636/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
1637FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
1638/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
1639FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
1640/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
1641FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
1642/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
1643FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
1644
1645/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
1646FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
1647/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
1648FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
1649/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
1650FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
1651/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
1652FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
1653
1654/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
1655FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
1656/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
1657FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
1658/* Opcode 0xf3 0x0f 0x2e - invalid */
1659/* Opcode 0xf2 0x0f 0x2e - invalid */
1660
1661/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
1662FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
1663/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
1664FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
1665/* Opcode 0xf3 0x0f 0x2f - invalid */
1666/* Opcode 0xf2 0x0f 0x2f - invalid */
1667
1668/** Opcode 0x0f 0x30. */
1669FNIEMOP_DEF(iemOp_wrmsr)
1670{
1671 IEMOP_MNEMONIC(wrmsr, "wrmsr");
1672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1673 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1674}
1675
1676
1677/** Opcode 0x0f 0x31. */
1678FNIEMOP_DEF(iemOp_rdtsc)
1679{
1680 IEMOP_MNEMONIC(rdtsc, "rdtsc");
1681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1682 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1683}
1684
1685
1686/** Opcode 0x0f 0x33. */
1687FNIEMOP_DEF(iemOp_rdmsr)
1688{
1689 IEMOP_MNEMONIC(rdmsr, "rdmsr");
1690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1691 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1692}
1693
1694
1695/** Opcode 0x0f 0x34. */
1696FNIEMOP_STUB(iemOp_rdpmc);
1697/** Opcode 0x0f 0x34. */
1698FNIEMOP_STUB(iemOp_sysenter);
1699/** Opcode 0x0f 0x35. */
1700FNIEMOP_STUB(iemOp_sysexit);
1701/** Opcode 0x0f 0x37. */
1702FNIEMOP_STUB(iemOp_getsec);
1703/** Opcode 0x0f 0x38. */
1704FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1705/** Opcode 0x0f 0x3a. */
1706FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1707
1708
1709/**
1710 * Implements a conditional move.
1711 *
1712 * Wish there was an obvious way to do this where we could share and reduce
1713 * code bloat.
1714 *
1715 * @param a_Cnd The conditional "microcode" operation.
1716 */
1717#define CMOV_X(a_Cnd) \
1718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1719 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1720 { \
1721 switch (pVCpu->iem.s.enmEffOpSize) \
1722 { \
1723 case IEMMODE_16BIT: \
1724 IEM_MC_BEGIN(0, 1); \
1725 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1726 a_Cnd { \
1727 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1728 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
1729 } IEM_MC_ENDIF(); \
1730 IEM_MC_ADVANCE_RIP(); \
1731 IEM_MC_END(); \
1732 return VINF_SUCCESS; \
1733 \
1734 case IEMMODE_32BIT: \
1735 IEM_MC_BEGIN(0, 1); \
1736 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1737 a_Cnd { \
1738 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1739 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
1740 } IEM_MC_ELSE() { \
1741 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
1742 } IEM_MC_ENDIF(); \
1743 IEM_MC_ADVANCE_RIP(); \
1744 IEM_MC_END(); \
1745 return VINF_SUCCESS; \
1746 \
1747 case IEMMODE_64BIT: \
1748 IEM_MC_BEGIN(0, 1); \
1749 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1750 a_Cnd { \
1751 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1752 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
1753 } IEM_MC_ENDIF(); \
1754 IEM_MC_ADVANCE_RIP(); \
1755 IEM_MC_END(); \
1756 return VINF_SUCCESS; \
1757 \
1758 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1759 } \
1760 } \
1761 else \
1762 { \
1763 switch (pVCpu->iem.s.enmEffOpSize) \
1764 { \
1765 case IEMMODE_16BIT: \
1766 IEM_MC_BEGIN(0, 2); \
1767 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1768 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1770 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1771 a_Cnd { \
1772 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
1773 } IEM_MC_ENDIF(); \
1774 IEM_MC_ADVANCE_RIP(); \
1775 IEM_MC_END(); \
1776 return VINF_SUCCESS; \
1777 \
1778 case IEMMODE_32BIT: \
1779 IEM_MC_BEGIN(0, 2); \
1780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1781 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1783 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1784 a_Cnd { \
1785 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
1786 } IEM_MC_ELSE() { \
1787 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
1788 } IEM_MC_ENDIF(); \
1789 IEM_MC_ADVANCE_RIP(); \
1790 IEM_MC_END(); \
1791 return VINF_SUCCESS; \
1792 \
1793 case IEMMODE_64BIT: \
1794 IEM_MC_BEGIN(0, 2); \
1795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1796 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1798 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1799 a_Cnd { \
1800 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
1801 } IEM_MC_ENDIF(); \
1802 IEM_MC_ADVANCE_RIP(); \
1803 IEM_MC_END(); \
1804 return VINF_SUCCESS; \
1805 \
1806 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1807 } \
1808 } do {} while (0)
1809
1810
1811
1812/** Opcode 0x0f 0x40. */
1813FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1814{
1815 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
1816 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1817}
1818
1819
1820/** Opcode 0x0f 0x41. */
1821FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1822{
1823 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
1824 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1825}
1826
1827
1828/** Opcode 0x0f 0x42. */
1829FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1830{
1831 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
1832 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1833}
1834
1835
1836/** Opcode 0x0f 0x43. */
1837FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1838{
1839 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
1840 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1841}
1842
1843
1844/** Opcode 0x0f 0x44. */
1845FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1846{
1847 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
1848 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1849}
1850
1851
1852/** Opcode 0x0f 0x45. */
1853FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1854{
1855 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
1856 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1857}
1858
1859
1860/** Opcode 0x0f 0x46. */
1861FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1862{
1863 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
1864 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1865}
1866
1867
1868/** Opcode 0x0f 0x47. */
1869FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1870{
1871 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
1872 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1873}
1874
1875
1876/** Opcode 0x0f 0x48. */
1877FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1878{
1879 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
1880 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1881}
1882
1883
1884/** Opcode 0x0f 0x49. */
1885FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1886{
1887 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
1888 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1889}
1890
1891
1892/** Opcode 0x0f 0x4a. */
1893FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1894{
1895 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
1896 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1897}
1898
1899
1900/** Opcode 0x0f 0x4b. */
1901FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1902{
1903 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
1904 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1905}
1906
1907
1908/** Opcode 0x0f 0x4c. */
1909FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1910{
1911 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
1912 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1913}
1914
1915
1916/** Opcode 0x0f 0x4d. */
1917FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1918{
1919 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
1920 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1921}
1922
1923
1924/** Opcode 0x0f 0x4e. */
1925FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1926{
1927 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
1928 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1929}
1930
1931
1932/** Opcode 0x0f 0x4f. */
1933FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1934{
1935 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
1936 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1937}
1938
1939#undef CMOV_X
1940
1941/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
1942FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
1943/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
1944FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
1945/* Opcode 0xf3 0x0f 0x50 - invalid */
1946/* Opcode 0xf2 0x0f 0x50 - invalid */
1947
1948/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
1949FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
1950/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
1951FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
1952/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
1953FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
1954/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
1955FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
1956
1957/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
1958FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
1959/* Opcode 0x66 0x0f 0x52 - invalid */
1960/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
1961FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
1962/* Opcode 0xf2 0x0f 0x52 - invalid */
1963
1964/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
1965FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
1966/* Opcode 0x66 0x0f 0x53 - invalid */
1967/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
1968FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
1969/* Opcode 0xf2 0x0f 0x53 - invalid */
1970
1971/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
1972FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
1973/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
1974FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
1975/* Opcode 0xf3 0x0f 0x54 - invalid */
1976/* Opcode 0xf2 0x0f 0x54 - invalid */
1977
1978/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
1979FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
1980/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
1981FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
1982/* Opcode 0xf3 0x0f 0x55 - invalid */
1983/* Opcode 0xf2 0x0f 0x55 - invalid */
1984
1985/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
1986FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
1987/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
1988FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
1989/* Opcode 0xf3 0x0f 0x56 - invalid */
1990/* Opcode 0xf2 0x0f 0x56 - invalid */
1991
1992/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
1993FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
1994/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
1995FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
1996/* Opcode 0xf3 0x0f 0x57 - invalid */
1997/* Opcode 0xf2 0x0f 0x57 - invalid */
1998
1999/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2000FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2001/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2002FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2003/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2004FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2005/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2006FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2007
2008/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2009FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2010/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2011FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2012/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2013FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2014/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2015FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2016
2017/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2018FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2019/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2020FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2021/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2022FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2023/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2024FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2025
2026/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2027FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2028/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2029FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2030/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2031FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2032/* Opcode 0xf2 0x0f 0x5b - invalid */
2033
2034/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2035FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2036/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2037FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2038/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2039FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2040/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2041FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2042
2043/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2044FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2045/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2046FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2047/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2048FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2049/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2050FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2051
2052/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2053FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2054/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2055FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2056/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2057FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2058/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2059FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2060
2061/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2062FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2063/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2064FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2065/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2066FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2067/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2068FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2069
2070/**
2071 * Common worker for MMX instructions on the forms:
2072 * pxxxx mm1, mm2/mem32
2073 *
2074 * The 2nd operand is the first half of a register, which in the memory case
2075 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2076 * memory accessed for MMX.
2077 *
2078 * Exceptions type 4.
2079 */
2080FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2081{
2082 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2083 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2084 {
2085 /*
2086 * Register, register.
2087 */
2088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2089 IEM_MC_BEGIN(2, 0);
2090 IEM_MC_ARG(uint128_t *, pDst, 0);
2091 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2092 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2093 IEM_MC_PREPARE_SSE_USAGE();
2094 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2095 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2096 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2097 IEM_MC_ADVANCE_RIP();
2098 IEM_MC_END();
2099 }
2100 else
2101 {
2102 /*
2103 * Register, memory.
2104 */
2105 IEM_MC_BEGIN(2, 2);
2106 IEM_MC_ARG(uint128_t *, pDst, 0);
2107 IEM_MC_LOCAL(uint64_t, uSrc);
2108 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2110
2111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2113 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2114 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2115
2116 IEM_MC_PREPARE_SSE_USAGE();
2117 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2118 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2119
2120 IEM_MC_ADVANCE_RIP();
2121 IEM_MC_END();
2122 }
2123 return VINF_SUCCESS;
2124}
2125
2126
2127/**
2128 * Common worker for SSE2 instructions on the forms:
2129 * pxxxx xmm1, xmm2/mem128
2130 *
2131 * The 2nd operand is the first half of a register, which in the memory case
2132 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2133 * memory accessed for MMX.
2134 *
2135 * Exceptions type 4.
2136 */
2137FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2138{
2139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2140 if (!pImpl->pfnU64)
2141 return IEMOP_RAISE_INVALID_OPCODE();
2142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2143 {
2144 /*
2145 * Register, register.
2146 */
2147 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2148 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2150 IEM_MC_BEGIN(2, 0);
2151 IEM_MC_ARG(uint64_t *, pDst, 0);
2152 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2153 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2154 IEM_MC_PREPARE_FPU_USAGE();
2155 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2156 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2157 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2158 IEM_MC_ADVANCE_RIP();
2159 IEM_MC_END();
2160 }
2161 else
2162 {
2163 /*
2164 * Register, memory.
2165 */
2166 IEM_MC_BEGIN(2, 2);
2167 IEM_MC_ARG(uint64_t *, pDst, 0);
2168 IEM_MC_LOCAL(uint32_t, uSrc);
2169 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2171
2172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2174 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2175 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2176
2177 IEM_MC_PREPARE_FPU_USAGE();
2178 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2179 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2180
2181 IEM_MC_ADVANCE_RIP();
2182 IEM_MC_END();
2183 }
2184 return VINF_SUCCESS;
2185}
2186
2187
2188/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2189FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2190{
2191 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2192 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2193}
2194
2195/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2196FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2197{
2198 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2199 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2200}
2201
2202/* Opcode 0xf3 0x0f 0x60 - invalid */
2203
2204
2205/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2206FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2207{
2208 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2209 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2210}
2211
2212/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2213FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2214{
2215 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2216 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2217}
2218
2219/* Opcode 0xf3 0x0f 0x61 - invalid */
2220
2221
2222/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2223FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2224{
2225 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2226 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2227}
2228
2229/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2230FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2231{
2232 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2233 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2234}
2235
2236/* Opcode 0xf3 0x0f 0x62 - invalid */
2237
2238
2239
2240/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2241FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2242/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2243FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2244/* Opcode 0xf3 0x0f 0x63 - invalid */
2245
2246/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2247FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2248/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2249FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2250/* Opcode 0xf3 0x0f 0x64 - invalid */
2251
2252/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2253FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2254/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2255FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2256/* Opcode 0xf3 0x0f 0x65 - invalid */
2257
2258/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2259FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2260/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2261FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2262/* Opcode 0xf3 0x0f 0x66 - invalid */
2263
2264/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2265FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2266/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2267FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2268/* Opcode 0xf3 0x0f 0x67 - invalid */
2269
2270
2271/**
2272 * Common worker for MMX instructions on the form:
2273 * pxxxx mm1, mm2/mem64
2274 *
2275 * The 2nd operand is the second half of a register, which in the memory case
2276 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2277 * where it may read the full 128 bits or only the upper 64 bits.
2278 *
2279 * Exceptions type 4.
2280 */
2281FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2282{
2283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2284 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2285 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2286 {
2287 /*
2288 * Register, register.
2289 */
2290 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2291 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2293 IEM_MC_BEGIN(2, 0);
2294 IEM_MC_ARG(uint64_t *, pDst, 0);
2295 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2296 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2297 IEM_MC_PREPARE_FPU_USAGE();
2298 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2299 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2300 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2301 IEM_MC_ADVANCE_RIP();
2302 IEM_MC_END();
2303 }
2304 else
2305 {
2306 /*
2307 * Register, memory.
2308 */
2309 IEM_MC_BEGIN(2, 2);
2310 IEM_MC_ARG(uint64_t *, pDst, 0);
2311 IEM_MC_LOCAL(uint64_t, uSrc);
2312 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2314
2315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2317 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2318 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2319
2320 IEM_MC_PREPARE_FPU_USAGE();
2321 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2322 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2323
2324 IEM_MC_ADVANCE_RIP();
2325 IEM_MC_END();
2326 }
2327 return VINF_SUCCESS;
2328}
2329
2330
2331/**
2332 * Common worker for SSE2 instructions on the form:
2333 * pxxxx xmm1, xmm2/mem128
2334 *
2335 * The 2nd operand is the second half of a register, which in the memory case
2336 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2337 * where it may read the full 128 bits or only the upper 64 bits.
2338 *
2339 * Exceptions type 4.
2340 */
2341FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2342{
2343 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2344 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2345 {
2346 /*
2347 * Register, register.
2348 */
2349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2350 IEM_MC_BEGIN(2, 0);
2351 IEM_MC_ARG(uint128_t *, pDst, 0);
2352 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2353 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2354 IEM_MC_PREPARE_SSE_USAGE();
2355 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2356 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2357 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2358 IEM_MC_ADVANCE_RIP();
2359 IEM_MC_END();
2360 }
2361 else
2362 {
2363 /*
2364 * Register, memory.
2365 */
2366 IEM_MC_BEGIN(2, 2);
2367 IEM_MC_ARG(uint128_t *, pDst, 0);
2368 IEM_MC_LOCAL(uint128_t, uSrc);
2369 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2371
2372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2374 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2375 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2376
2377 IEM_MC_PREPARE_SSE_USAGE();
2378 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2379 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2380
2381 IEM_MC_ADVANCE_RIP();
2382 IEM_MC_END();
2383 }
2384 return VINF_SUCCESS;
2385}
2386
2387
2388/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2389FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2390{
2391 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2392 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2393}
2394
2395/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2396FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2397{
2398 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2399 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2400}
2401/* Opcode 0xf3 0x0f 0x68 - invalid */
2402
2403
2404/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2405FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2406{
2407 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2408 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2409}
2410
2411/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2412FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2413{
2414 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2415 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2416
2417}
2418/* Opcode 0xf3 0x0f 0x69 - invalid */
2419
2420
2421/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2422FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2423{
2424 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2425 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2426}
2427
2428/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2429FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2430{
2431 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2432 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2433}
2434/* Opcode 0xf3 0x0f 0x6a - invalid */
2435
2436
2437/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2438FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2439/** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */
2440FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2441/* Opcode 0xf3 0x0f 0x6b - invalid */
2442
2443
2444/* Opcode 0x0f 0x6c - invalid */
2445
2446/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2447FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2448{
2449 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
2450 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2451}
2452
2453/* Opcode 0xf3 0x0f 0x6c - invalid */
2454/* Opcode 0xf2 0x0f 0x6c - invalid */
2455
2456
2457/* Opcode 0x0f 0x6d - invalid */
2458
2459/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
2460FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
2461{
2462 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2463 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2464}
2465
2466/* Opcode 0xf3 0x0f 0x6d - invalid */
2467
2468
2469/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
2470FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
2471{
2472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2473 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2474 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2475 else
2476 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2477 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2478 {
2479 /* MMX, greg */
2480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2481 IEM_MC_BEGIN(0, 1);
2482 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2483 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2484 IEM_MC_LOCAL(uint64_t, u64Tmp);
2485 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2486 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2487 else
2488 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2489 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2490 IEM_MC_ADVANCE_RIP();
2491 IEM_MC_END();
2492 }
2493 else
2494 {
2495 /* MMX, [mem] */
2496 IEM_MC_BEGIN(0, 2);
2497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2498 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2501 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2502 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2503 {
2504 IEM_MC_LOCAL(uint64_t, u64Tmp);
2505 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2506 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2507 }
2508 else
2509 {
2510 IEM_MC_LOCAL(uint32_t, u32Tmp);
2511 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2512 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2513 }
2514 IEM_MC_ADVANCE_RIP();
2515 IEM_MC_END();
2516 }
2517 return VINF_SUCCESS;
2518}
2519
2520/** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */
2521FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
2522{
2523 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2524 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2525 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
2526 else
2527 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
2528 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2529 {
2530 /* XMM, greg*/
2531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2532 IEM_MC_BEGIN(0, 1);
2533 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2534 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2535 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2536 {
2537 IEM_MC_LOCAL(uint64_t, u64Tmp);
2538 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2539 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2540 }
2541 else
2542 {
2543 IEM_MC_LOCAL(uint32_t, u32Tmp);
2544 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2545 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2546 }
2547 IEM_MC_ADVANCE_RIP();
2548 IEM_MC_END();
2549 }
2550 else
2551 {
2552 /* XMM, [mem] */
2553 IEM_MC_BEGIN(0, 2);
2554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2555 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2558 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2559 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2560 {
2561 IEM_MC_LOCAL(uint64_t, u64Tmp);
2562 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2563 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2564 }
2565 else
2566 {
2567 IEM_MC_LOCAL(uint32_t, u32Tmp);
2568 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2569 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2570 }
2571 IEM_MC_ADVANCE_RIP();
2572 IEM_MC_END();
2573 }
2574 return VINF_SUCCESS;
2575}
2576
2577/* Opcode 0xf3 0x0f 0x6e - invalid */
2578
2579
2580/** Opcode 0x0f 0x6f - movq Pq, Qq */
2581FNIEMOP_DEF(iemOp_movq_Pq_Qq)
2582{
2583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2584 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2585 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2586 {
2587 /*
2588 * Register, register.
2589 */
2590 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2591 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2593 IEM_MC_BEGIN(0, 1);
2594 IEM_MC_LOCAL(uint64_t, u64Tmp);
2595 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2596 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2597 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2598 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2599 IEM_MC_ADVANCE_RIP();
2600 IEM_MC_END();
2601 }
2602 else
2603 {
2604 /*
2605 * Register, memory.
2606 */
2607 IEM_MC_BEGIN(0, 2);
2608 IEM_MC_LOCAL(uint64_t, u64Tmp);
2609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2610
2611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2613 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2614 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2615 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2616 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2617
2618 IEM_MC_ADVANCE_RIP();
2619 IEM_MC_END();
2620 }
2621 return VINF_SUCCESS;
2622}
2623
2624/** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */
2625FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
2626{
2627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2628 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
2629 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2630 {
2631 /*
2632 * Register, register.
2633 */
2634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2635 IEM_MC_BEGIN(0, 0);
2636 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2637 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2638 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2639 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2640 IEM_MC_ADVANCE_RIP();
2641 IEM_MC_END();
2642 }
2643 else
2644 {
2645 /*
2646 * Register, memory.
2647 */
2648 IEM_MC_BEGIN(0, 2);
2649 IEM_MC_LOCAL(uint128_t, u128Tmp);
2650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2651
2652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2654 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2655 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2656 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2657 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2658
2659 IEM_MC_ADVANCE_RIP();
2660 IEM_MC_END();
2661 }
2662 return VINF_SUCCESS;
2663}
2664
2665/** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */
2666FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
2667{
2668 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2669 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
2670 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2671 {
2672 /*
2673 * Register, register.
2674 */
2675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2676 IEM_MC_BEGIN(0, 0);
2677 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2678 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2679 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2680 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2681 IEM_MC_ADVANCE_RIP();
2682 IEM_MC_END();
2683 }
2684 else
2685 {
2686 /*
2687 * Register, memory.
2688 */
2689 IEM_MC_BEGIN(0, 2);
2690 IEM_MC_LOCAL(uint128_t, u128Tmp);
2691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2692
2693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2695 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2696 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2697 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2698 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2699
2700 IEM_MC_ADVANCE_RIP();
2701 IEM_MC_END();
2702 }
2703 return VINF_SUCCESS;
2704}
2705
2706
2707/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
2708FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
2709{
2710 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
2711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2712 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2713 {
2714 /*
2715 * Register, register.
2716 */
2717 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2719
2720 IEM_MC_BEGIN(3, 0);
2721 IEM_MC_ARG(uint64_t *, pDst, 0);
2722 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2723 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2724 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2725 IEM_MC_PREPARE_FPU_USAGE();
2726 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2727 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2728 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2729 IEM_MC_ADVANCE_RIP();
2730 IEM_MC_END();
2731 }
2732 else
2733 {
2734 /*
2735 * Register, memory.
2736 */
2737 IEM_MC_BEGIN(3, 2);
2738 IEM_MC_ARG(uint64_t *, pDst, 0);
2739 IEM_MC_LOCAL(uint64_t, uSrc);
2740 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2742
2743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2744 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2745 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2747 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2748
2749 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2750 IEM_MC_PREPARE_FPU_USAGE();
2751 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2752 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2753
2754 IEM_MC_ADVANCE_RIP();
2755 IEM_MC_END();
2756 }
2757 return VINF_SUCCESS;
2758}
2759
2760/** Opcode 0x66 0x0f 0x70 - vpshufd Vx, Wx, Ib */
2761FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
2762{
2763 IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
2764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2765 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2766 {
2767 /*
2768 * Register, register.
2769 */
2770 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2772
2773 IEM_MC_BEGIN(3, 0);
2774 IEM_MC_ARG(uint128_t *, pDst, 0);
2775 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2776 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2777 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2778 IEM_MC_PREPARE_SSE_USAGE();
2779 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2780 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2781 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2782 IEM_MC_ADVANCE_RIP();
2783 IEM_MC_END();
2784 }
2785 else
2786 {
2787 /*
2788 * Register, memory.
2789 */
2790 IEM_MC_BEGIN(3, 2);
2791 IEM_MC_ARG(uint128_t *, pDst, 0);
2792 IEM_MC_LOCAL(uint128_t, uSrc);
2793 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2795
2796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2797 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2798 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2800 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2801
2802 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2803 IEM_MC_PREPARE_SSE_USAGE();
2804 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2805 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2806
2807 IEM_MC_ADVANCE_RIP();
2808 IEM_MC_END();
2809 }
2810 return VINF_SUCCESS;
2811}
2812
2813/** Opcode 0xf3 0x0f 0x70 - vpshufhw Vx, Wx, Ib */
2814FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
2815{
2816 IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
2817 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2818 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2819 {
2820 /*
2821 * Register, register.
2822 */
2823 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2825
2826 IEM_MC_BEGIN(3, 0);
2827 IEM_MC_ARG(uint128_t *, pDst, 0);
2828 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2829 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2830 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2831 IEM_MC_PREPARE_SSE_USAGE();
2832 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2833 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2834 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2835 IEM_MC_ADVANCE_RIP();
2836 IEM_MC_END();
2837 }
2838 else
2839 {
2840 /*
2841 * Register, memory.
2842 */
2843 IEM_MC_BEGIN(3, 2);
2844 IEM_MC_ARG(uint128_t *, pDst, 0);
2845 IEM_MC_LOCAL(uint128_t, uSrc);
2846 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2848
2849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2850 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2851 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2853 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2854
2855 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2856 IEM_MC_PREPARE_SSE_USAGE();
2857 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2858 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2859
2860 IEM_MC_ADVANCE_RIP();
2861 IEM_MC_END();
2862 }
2863 return VINF_SUCCESS;
2864}
2865
2866/** Opcode 0xf2 0x0f 0x70 - vpshuflw Vx, Wx, Ib */
2867FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
2868{
2869 IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
2870 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2871 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2872 {
2873 /*
2874 * Register, register.
2875 */
2876 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2878
2879 IEM_MC_BEGIN(3, 0);
2880 IEM_MC_ARG(uint128_t *, pDst, 0);
2881 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2882 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2883 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2884 IEM_MC_PREPARE_SSE_USAGE();
2885 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2886 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2887 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2888 IEM_MC_ADVANCE_RIP();
2889 IEM_MC_END();
2890 }
2891 else
2892 {
2893 /*
2894 * Register, memory.
2895 */
2896 IEM_MC_BEGIN(3, 2);
2897 IEM_MC_ARG(uint128_t *, pDst, 0);
2898 IEM_MC_LOCAL(uint128_t, uSrc);
2899 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2901
2902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2903 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2904 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2906 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2907
2908 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2909 IEM_MC_PREPARE_SSE_USAGE();
2910 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2911 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2912
2913 IEM_MC_ADVANCE_RIP();
2914 IEM_MC_END();
2915 }
2916 return VINF_SUCCESS;
2917}
2918
2919
2920/** Opcode 0x0f 0x71 11/2. */
2921FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2922
2923/** Opcode 0x66 0x0f 0x71 11/2. */
2924FNIEMOP_STUB_1(iemOp_Grp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
2925
2926/** Opcode 0x0f 0x71 11/4. */
2927FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2928
2929/** Opcode 0x66 0x0f 0x71 11/4. */
2930FNIEMOP_STUB_1(iemOp_Grp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
2931
2932/** Opcode 0x0f 0x71 11/6. */
2933FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2934
2935/** Opcode 0x66 0x0f 0x71 11/6. */
2936FNIEMOP_STUB_1(iemOp_Grp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
2937
2938
2939/**
2940 * Group 12 jump table for register variant.
2941 */
2942IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[8*4] =
2943{
2944 /** @todo decode imm8? */
2945 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2946 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2947 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2948 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2949 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2950 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2951 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2952 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
2953};
2954
2955
2956/** Opcode 0x0f 0x71. */
2957FNIEMOP_DEF(iemOp_Grp12)
2958{
2959 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2960 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2961 /* register, register */
2962 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
2963 + pVCpu->iem.s.idxPrefix], bRm);
2964 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
2965}
2966
2967
2968/** Opcode 0x0f 0x72 11/2. */
2969FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2970
2971/** Opcode 0x66 0x0f 0x72 11/2. */
2972FNIEMOP_STUB_1(iemOp_Grp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
2973
2974/** Opcode 0x0f 0x72 11/4. */
2975FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2976
2977/** Opcode 0x66 0x0f 0x72 11/4. */
2978FNIEMOP_STUB_1(iemOp_Grp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
2979
2980/** Opcode 0x0f 0x72 11/6. */
2981FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2982
2983/** Opcode 0x66 0x0f 0x72 11/6. */
2984FNIEMOP_STUB_1(iemOp_Grp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
2985
2986
2987/**
2988 * Group 13 jump table for register variant.
2989 */
2990IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[8*4] =
2991{
2992 /** @todo decode imm8? */
2993 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2994 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2995 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2996 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2997 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2998 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2999 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3000 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3001};
3002
3003/** Opcode 0x0f 0x72. */
3004FNIEMOP_DEF(iemOp_Grp13)
3005{
3006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3007 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3008 /* register, register */
3009 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3010 + pVCpu->iem.s.idxPrefix], bRm);
3011 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3012}
3013
3014
3015/** Opcode 0x0f 0x73 11/2. */
3016FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3017
3018/** Opcode 0x66 0x0f 0x73 11/2. */
3019FNIEMOP_STUB_1(iemOp_Grp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
3020
3021/** Opcode 0x66 0x0f 0x73 11/3. */
3022FNIEMOP_STUB_1(iemOp_Grp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3023
3024/** Opcode 0x0f 0x73 11/6. */
3025FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3026
3027/** Opcode 0x66 0x0f 0x73 11/6. */
3028FNIEMOP_STUB_1(iemOp_Grp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
3029
3030/** Opcode 0x66 0x0f 0x73 11/7. */
3031FNIEMOP_STUB_1(iemOp_Grp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3032
3033/**
3034 * Group 14 jump table for register variant.
3035 */
3036IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[8*4] =
3037{
3038 /** @todo decode imm8? */
3039 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3040 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3041 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3042 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3043 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3044 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3045 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3046 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3047};
3048
3049
3050/** Opcode 0x0f 0x73. */
3051FNIEMOP_DEF(iemOp_Grp14)
3052{
3053 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3054 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3055 /* register, register */
3056 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3057 + pVCpu->iem.s.idxPrefix], bRm);
3058 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3059}
3060
3061
3062/**
3063 * Common worker for MMX instructions on the form:
3064 * pxxx mm1, mm2/mem64
3065 */
3066FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3067{
3068 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3069 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3070 {
3071 /*
3072 * Register, register.
3073 */
3074 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3075 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3077 IEM_MC_BEGIN(2, 0);
3078 IEM_MC_ARG(uint64_t *, pDst, 0);
3079 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3080 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3081 IEM_MC_PREPARE_FPU_USAGE();
3082 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3083 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3084 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3085 IEM_MC_ADVANCE_RIP();
3086 IEM_MC_END();
3087 }
3088 else
3089 {
3090 /*
3091 * Register, memory.
3092 */
3093 IEM_MC_BEGIN(2, 2);
3094 IEM_MC_ARG(uint64_t *, pDst, 0);
3095 IEM_MC_LOCAL(uint64_t, uSrc);
3096 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3098
3099 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3101 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3102 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3103
3104 IEM_MC_PREPARE_FPU_USAGE();
3105 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3106 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3107
3108 IEM_MC_ADVANCE_RIP();
3109 IEM_MC_END();
3110 }
3111 return VINF_SUCCESS;
3112}
3113
3114
3115/**
3116 * Common worker for SSE2 instructions on the forms:
3117 * pxxx xmm1, xmm2/mem128
3118 *
3119 * Proper alignment of the 128-bit operand is enforced.
3120 * Exceptions type 4. SSE2 cpuid checks.
3121 */
3122FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3123{
3124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3125 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3126 {
3127 /*
3128 * Register, register.
3129 */
3130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3131 IEM_MC_BEGIN(2, 0);
3132 IEM_MC_ARG(uint128_t *, pDst, 0);
3133 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3134 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3135 IEM_MC_PREPARE_SSE_USAGE();
3136 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3137 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3138 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3139 IEM_MC_ADVANCE_RIP();
3140 IEM_MC_END();
3141 }
3142 else
3143 {
3144 /*
3145 * Register, memory.
3146 */
3147 IEM_MC_BEGIN(2, 2);
3148 IEM_MC_ARG(uint128_t *, pDst, 0);
3149 IEM_MC_LOCAL(uint128_t, uSrc);
3150 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3151 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3152
3153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3155 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3156 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3157
3158 IEM_MC_PREPARE_SSE_USAGE();
3159 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3160 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3161
3162 IEM_MC_ADVANCE_RIP();
3163 IEM_MC_END();
3164 }
3165 return VINF_SUCCESS;
3166}
3167
3168
3169/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3170FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3171{
3172 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3173 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3174}
3175
3176/** Opcode 0x66 0x0f 0x74 - vpcmpeqb Vx, Hx, Wx */
3177FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3178{
3179 IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb");
3180 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3181}
3182
3183/* Opcode 0xf3 0x0f 0x74 - invalid */
3184/* Opcode 0xf2 0x0f 0x74 - invalid */
3185
3186
3187/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3188FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3189{
3190 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3191 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3192}
3193
3194/** Opcode 0x66 0x0f 0x75 - vpcmpeqw Vx, Hx, Wx */
3195FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3196{
3197 IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw");
3198 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3199}
3200
3201/* Opcode 0xf3 0x0f 0x75 - invalid */
3202/* Opcode 0xf2 0x0f 0x75 - invalid */
3203
3204
3205/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3206FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3207{
3208 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3209 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3210}
3211
3212/** Opcode 0x66 0x0f 0x76 - vpcmpeqd Vx, Hx, Wx */
3213FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3214{
3215 IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd");
3216 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3217}
3218
3219/* Opcode 0xf3 0x0f 0x76 - invalid */
3220/* Opcode 0xf2 0x0f 0x76 - invalid */
3221
3222
3223/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3224FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3225/* Opcode 0x66 0x0f 0x77 - invalid */
3226/* Opcode 0xf3 0x0f 0x77 - invalid */
3227/* Opcode 0xf2 0x0f 0x77 - invalid */
3228
3229/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3230FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3231/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3232FNIEMOP_STUB(iemOp_AmdGrp17);
3233/* Opcode 0xf3 0x0f 0x78 - invalid */
3234/* Opcode 0xf2 0x0f 0x78 - invalid */
3235
3236/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3237FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3238/* Opcode 0x66 0x0f 0x79 - invalid */
3239/* Opcode 0xf3 0x0f 0x79 - invalid */
3240/* Opcode 0xf2 0x0f 0x79 - invalid */
3241
3242/* Opcode 0x0f 0x7a - invalid */
3243/* Opcode 0x66 0x0f 0x7a - invalid */
3244/* Opcode 0xf3 0x0f 0x7a - invalid */
3245/* Opcode 0xf2 0x0f 0x7a - invalid */
3246
3247/* Opcode 0x0f 0x7b - invalid */
3248/* Opcode 0x66 0x0f 0x7b - invalid */
3249/* Opcode 0xf3 0x0f 0x7b - invalid */
3250/* Opcode 0xf2 0x0f 0x7b - invalid */
3251
3252/* Opcode 0x0f 0x7c - invalid */
3253/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3254FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3255/* Opcode 0xf3 0x0f 0x7c - invalid */
3256/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3257FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3258
3259/* Opcode 0x0f 0x7d - invalid */
3260/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3261FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3262/* Opcode 0xf3 0x0f 0x7d - invalid */
3263/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3264FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3265
3266
3267/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3268FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3269{
3270 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3271 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3272 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3273 else
3274 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3275 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3276 {
3277 /* greg, MMX */
3278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3279 IEM_MC_BEGIN(0, 1);
3280 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3281 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3282 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3283 {
3284 IEM_MC_LOCAL(uint64_t, u64Tmp);
3285 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3286 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3287 }
3288 else
3289 {
3290 IEM_MC_LOCAL(uint32_t, u32Tmp);
3291 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3292 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3293 }
3294 IEM_MC_ADVANCE_RIP();
3295 IEM_MC_END();
3296 }
3297 else
3298 {
3299 /* [mem], MMX */
3300 IEM_MC_BEGIN(0, 2);
3301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3302 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3305 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3306 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3307 {
3308 IEM_MC_LOCAL(uint64_t, u64Tmp);
3309 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3310 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3311 }
3312 else
3313 {
3314 IEM_MC_LOCAL(uint32_t, u32Tmp);
3315 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3316 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3317 }
3318 IEM_MC_ADVANCE_RIP();
3319 IEM_MC_END();
3320 }
3321 return VINF_SUCCESS;
3322}
3323
3324/** Opcode 0x66 0x0f 0x7e - vmovd_q Ey, Vy */
3325FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3326{
3327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3328 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3329 IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq");
3330 else
3331 IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd");
3332 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3333 {
3334 /* greg, XMM */
3335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3336 IEM_MC_BEGIN(0, 1);
3337 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3338 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3339 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3340 {
3341 IEM_MC_LOCAL(uint64_t, u64Tmp);
3342 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3343 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3344 }
3345 else
3346 {
3347 IEM_MC_LOCAL(uint32_t, u32Tmp);
3348 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3349 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3350 }
3351 IEM_MC_ADVANCE_RIP();
3352 IEM_MC_END();
3353 }
3354 else
3355 {
3356 /* [mem], XMM */
3357 IEM_MC_BEGIN(0, 2);
3358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3359 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3362 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3363 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3364 {
3365 IEM_MC_LOCAL(uint64_t, u64Tmp);
3366 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3367 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3368 }
3369 else
3370 {
3371 IEM_MC_LOCAL(uint32_t, u32Tmp);
3372 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3373 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3374 }
3375 IEM_MC_ADVANCE_RIP();
3376 IEM_MC_END();
3377 }
3378 return VINF_SUCCESS;
3379}
3380
3381/** Opcode 0xf3 0x0f 0x7e - vmovq Vq, Wq */
3382FNIEMOP_STUB(iemOp_vmovq_Vq_Wq);
3383/* Opcode 0xf2 0x0f 0x7e - invalid */
3384
3385
3386/** Opcode 0x0f 0x7f - movq Qq, Pq */
3387FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3388{
3389 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3391 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3392 {
3393 /*
3394 * Register, register.
3395 */
3396 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3397 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3399 IEM_MC_BEGIN(0, 1);
3400 IEM_MC_LOCAL(uint64_t, u64Tmp);
3401 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3402 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3403 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3404 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3405 IEM_MC_ADVANCE_RIP();
3406 IEM_MC_END();
3407 }
3408 else
3409 {
3410 /*
3411 * Register, memory.
3412 */
3413 IEM_MC_BEGIN(0, 2);
3414 IEM_MC_LOCAL(uint64_t, u64Tmp);
3415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3416
3417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3419 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3420 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3421
3422 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3423 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3424
3425 IEM_MC_ADVANCE_RIP();
3426 IEM_MC_END();
3427 }
3428 return VINF_SUCCESS;
3429}
3430
3431/** Opcode 0x66 0x0f 0x7f - vmovdqa Wx,Vx */
3432FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3433{
3434 IEMOP_MNEMONIC(vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx");
3435 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3436 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3437 {
3438 /*
3439 * Register, register.
3440 */
3441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3442 IEM_MC_BEGIN(0, 0);
3443 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3444 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3445 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3446 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3447 IEM_MC_ADVANCE_RIP();
3448 IEM_MC_END();
3449 }
3450 else
3451 {
3452 /*
3453 * Register, memory.
3454 */
3455 IEM_MC_BEGIN(0, 2);
3456 IEM_MC_LOCAL(uint128_t, u128Tmp);
3457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3458
3459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3461 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3462 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3463
3464 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3465 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3466
3467 IEM_MC_ADVANCE_RIP();
3468 IEM_MC_END();
3469 }
3470 return VINF_SUCCESS;
3471}
3472
3473/** Opcode 0xf3 0x0f 0x7f - vmovdqu Wx,Vx */
3474FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
3475{
3476 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3477 IEMOP_MNEMONIC(vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx");
3478 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3479 {
3480 /*
3481 * Register, register.
3482 */
3483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3484 IEM_MC_BEGIN(0, 0);
3485 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3486 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3487 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3488 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3489 IEM_MC_ADVANCE_RIP();
3490 IEM_MC_END();
3491 }
3492 else
3493 {
3494 /*
3495 * Register, memory.
3496 */
3497 IEM_MC_BEGIN(0, 2);
3498 IEM_MC_LOCAL(uint128_t, u128Tmp);
3499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3500
3501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3503 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3504 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3505
3506 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3507 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3508
3509 IEM_MC_ADVANCE_RIP();
3510 IEM_MC_END();
3511 }
3512 return VINF_SUCCESS;
3513}
3514
3515/* Opcode 0xf2 0x0f 0x7f - invalid */
3516
3517
3518
3519/** Opcode 0x0f 0x80. */
3520FNIEMOP_DEF(iemOp_jo_Jv)
3521{
3522 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3523 IEMOP_HLP_MIN_386();
3524 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3525 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3526 {
3527 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3529
3530 IEM_MC_BEGIN(0, 0);
3531 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3532 IEM_MC_REL_JMP_S16(i16Imm);
3533 } IEM_MC_ELSE() {
3534 IEM_MC_ADVANCE_RIP();
3535 } IEM_MC_ENDIF();
3536 IEM_MC_END();
3537 }
3538 else
3539 {
3540 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3542
3543 IEM_MC_BEGIN(0, 0);
3544 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3545 IEM_MC_REL_JMP_S32(i32Imm);
3546 } IEM_MC_ELSE() {
3547 IEM_MC_ADVANCE_RIP();
3548 } IEM_MC_ENDIF();
3549 IEM_MC_END();
3550 }
3551 return VINF_SUCCESS;
3552}
3553
3554
3555/** Opcode 0x0f 0x81. */
3556FNIEMOP_DEF(iemOp_jno_Jv)
3557{
3558 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3559 IEMOP_HLP_MIN_386();
3560 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3561 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3562 {
3563 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3565
3566 IEM_MC_BEGIN(0, 0);
3567 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3568 IEM_MC_ADVANCE_RIP();
3569 } IEM_MC_ELSE() {
3570 IEM_MC_REL_JMP_S16(i16Imm);
3571 } IEM_MC_ENDIF();
3572 IEM_MC_END();
3573 }
3574 else
3575 {
3576 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3578
3579 IEM_MC_BEGIN(0, 0);
3580 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3581 IEM_MC_ADVANCE_RIP();
3582 } IEM_MC_ELSE() {
3583 IEM_MC_REL_JMP_S32(i32Imm);
3584 } IEM_MC_ENDIF();
3585 IEM_MC_END();
3586 }
3587 return VINF_SUCCESS;
3588}
3589
3590
3591/** Opcode 0x0f 0x82. */
3592FNIEMOP_DEF(iemOp_jc_Jv)
3593{
3594 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3595 IEMOP_HLP_MIN_386();
3596 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3597 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3598 {
3599 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3601
3602 IEM_MC_BEGIN(0, 0);
3603 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3604 IEM_MC_REL_JMP_S16(i16Imm);
3605 } IEM_MC_ELSE() {
3606 IEM_MC_ADVANCE_RIP();
3607 } IEM_MC_ENDIF();
3608 IEM_MC_END();
3609 }
3610 else
3611 {
3612 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3614
3615 IEM_MC_BEGIN(0, 0);
3616 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3617 IEM_MC_REL_JMP_S32(i32Imm);
3618 } IEM_MC_ELSE() {
3619 IEM_MC_ADVANCE_RIP();
3620 } IEM_MC_ENDIF();
3621 IEM_MC_END();
3622 }
3623 return VINF_SUCCESS;
3624}
3625
3626
3627/** Opcode 0x0f 0x83. */
3628FNIEMOP_DEF(iemOp_jnc_Jv)
3629{
3630 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3631 IEMOP_HLP_MIN_386();
3632 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3633 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3634 {
3635 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3637
3638 IEM_MC_BEGIN(0, 0);
3639 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3640 IEM_MC_ADVANCE_RIP();
3641 } IEM_MC_ELSE() {
3642 IEM_MC_REL_JMP_S16(i16Imm);
3643 } IEM_MC_ENDIF();
3644 IEM_MC_END();
3645 }
3646 else
3647 {
3648 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3650
3651 IEM_MC_BEGIN(0, 0);
3652 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3653 IEM_MC_ADVANCE_RIP();
3654 } IEM_MC_ELSE() {
3655 IEM_MC_REL_JMP_S32(i32Imm);
3656 } IEM_MC_ENDIF();
3657 IEM_MC_END();
3658 }
3659 return VINF_SUCCESS;
3660}
3661
3662
3663/** Opcode 0x0f 0x84. */
3664FNIEMOP_DEF(iemOp_je_Jv)
3665{
3666 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
3667 IEMOP_HLP_MIN_386();
3668 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3669 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3670 {
3671 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3673
3674 IEM_MC_BEGIN(0, 0);
3675 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3676 IEM_MC_REL_JMP_S16(i16Imm);
3677 } IEM_MC_ELSE() {
3678 IEM_MC_ADVANCE_RIP();
3679 } IEM_MC_ENDIF();
3680 IEM_MC_END();
3681 }
3682 else
3683 {
3684 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3686
3687 IEM_MC_BEGIN(0, 0);
3688 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3689 IEM_MC_REL_JMP_S32(i32Imm);
3690 } IEM_MC_ELSE() {
3691 IEM_MC_ADVANCE_RIP();
3692 } IEM_MC_ENDIF();
3693 IEM_MC_END();
3694 }
3695 return VINF_SUCCESS;
3696}
3697
3698
3699/** Opcode 0x0f 0x85. */
3700FNIEMOP_DEF(iemOp_jne_Jv)
3701{
3702 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
3703 IEMOP_HLP_MIN_386();
3704 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3705 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3706 {
3707 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3709
3710 IEM_MC_BEGIN(0, 0);
3711 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3712 IEM_MC_ADVANCE_RIP();
3713 } IEM_MC_ELSE() {
3714 IEM_MC_REL_JMP_S16(i16Imm);
3715 } IEM_MC_ENDIF();
3716 IEM_MC_END();
3717 }
3718 else
3719 {
3720 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3722
3723 IEM_MC_BEGIN(0, 0);
3724 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3725 IEM_MC_ADVANCE_RIP();
3726 } IEM_MC_ELSE() {
3727 IEM_MC_REL_JMP_S32(i32Imm);
3728 } IEM_MC_ENDIF();
3729 IEM_MC_END();
3730 }
3731 return VINF_SUCCESS;
3732}
3733
3734
3735/** Opcode 0x0f 0x86. */
3736FNIEMOP_DEF(iemOp_jbe_Jv)
3737{
3738 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
3739 IEMOP_HLP_MIN_386();
3740 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3741 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3742 {
3743 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3745
3746 IEM_MC_BEGIN(0, 0);
3747 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3748 IEM_MC_REL_JMP_S16(i16Imm);
3749 } IEM_MC_ELSE() {
3750 IEM_MC_ADVANCE_RIP();
3751 } IEM_MC_ENDIF();
3752 IEM_MC_END();
3753 }
3754 else
3755 {
3756 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3758
3759 IEM_MC_BEGIN(0, 0);
3760 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3761 IEM_MC_REL_JMP_S32(i32Imm);
3762 } IEM_MC_ELSE() {
3763 IEM_MC_ADVANCE_RIP();
3764 } IEM_MC_ENDIF();
3765 IEM_MC_END();
3766 }
3767 return VINF_SUCCESS;
3768}
3769
3770
3771/** Opcode 0x0f 0x87. */
3772FNIEMOP_DEF(iemOp_jnbe_Jv)
3773{
3774 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
3775 IEMOP_HLP_MIN_386();
3776 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3777 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3778 {
3779 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3781
3782 IEM_MC_BEGIN(0, 0);
3783 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3784 IEM_MC_ADVANCE_RIP();
3785 } IEM_MC_ELSE() {
3786 IEM_MC_REL_JMP_S16(i16Imm);
3787 } IEM_MC_ENDIF();
3788 IEM_MC_END();
3789 }
3790 else
3791 {
3792 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3794
3795 IEM_MC_BEGIN(0, 0);
3796 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3797 IEM_MC_ADVANCE_RIP();
3798 } IEM_MC_ELSE() {
3799 IEM_MC_REL_JMP_S32(i32Imm);
3800 } IEM_MC_ENDIF();
3801 IEM_MC_END();
3802 }
3803 return VINF_SUCCESS;
3804}
3805
3806
3807/** Opcode 0x0f 0x88. */
3808FNIEMOP_DEF(iemOp_js_Jv)
3809{
3810 IEMOP_MNEMONIC(js_Jv, "js Jv");
3811 IEMOP_HLP_MIN_386();
3812 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3813 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3814 {
3815 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3817
3818 IEM_MC_BEGIN(0, 0);
3819 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3820 IEM_MC_REL_JMP_S16(i16Imm);
3821 } IEM_MC_ELSE() {
3822 IEM_MC_ADVANCE_RIP();
3823 } IEM_MC_ENDIF();
3824 IEM_MC_END();
3825 }
3826 else
3827 {
3828 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3830
3831 IEM_MC_BEGIN(0, 0);
3832 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3833 IEM_MC_REL_JMP_S32(i32Imm);
3834 } IEM_MC_ELSE() {
3835 IEM_MC_ADVANCE_RIP();
3836 } IEM_MC_ENDIF();
3837 IEM_MC_END();
3838 }
3839 return VINF_SUCCESS;
3840}
3841
3842
3843/** Opcode 0x0f 0x89. */
3844FNIEMOP_DEF(iemOp_jns_Jv)
3845{
3846 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
3847 IEMOP_HLP_MIN_386();
3848 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3849 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3850 {
3851 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3853
3854 IEM_MC_BEGIN(0, 0);
3855 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3856 IEM_MC_ADVANCE_RIP();
3857 } IEM_MC_ELSE() {
3858 IEM_MC_REL_JMP_S16(i16Imm);
3859 } IEM_MC_ENDIF();
3860 IEM_MC_END();
3861 }
3862 else
3863 {
3864 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3866
3867 IEM_MC_BEGIN(0, 0);
3868 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3869 IEM_MC_ADVANCE_RIP();
3870 } IEM_MC_ELSE() {
3871 IEM_MC_REL_JMP_S32(i32Imm);
3872 } IEM_MC_ENDIF();
3873 IEM_MC_END();
3874 }
3875 return VINF_SUCCESS;
3876}
3877
3878
3879/** Opcode 0x0f 0x8a. */
3880FNIEMOP_DEF(iemOp_jp_Jv)
3881{
3882 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
3883 IEMOP_HLP_MIN_386();
3884 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3885 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3886 {
3887 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3889
3890 IEM_MC_BEGIN(0, 0);
3891 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3892 IEM_MC_REL_JMP_S16(i16Imm);
3893 } IEM_MC_ELSE() {
3894 IEM_MC_ADVANCE_RIP();
3895 } IEM_MC_ENDIF();
3896 IEM_MC_END();
3897 }
3898 else
3899 {
3900 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3902
3903 IEM_MC_BEGIN(0, 0);
3904 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3905 IEM_MC_REL_JMP_S32(i32Imm);
3906 } IEM_MC_ELSE() {
3907 IEM_MC_ADVANCE_RIP();
3908 } IEM_MC_ENDIF();
3909 IEM_MC_END();
3910 }
3911 return VINF_SUCCESS;
3912}
3913
3914
3915/** Opcode 0x0f 0x8b. */
3916FNIEMOP_DEF(iemOp_jnp_Jv)
3917{
3918 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
3919 IEMOP_HLP_MIN_386();
3920 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3921 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3922 {
3923 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3925
3926 IEM_MC_BEGIN(0, 0);
3927 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3928 IEM_MC_ADVANCE_RIP();
3929 } IEM_MC_ELSE() {
3930 IEM_MC_REL_JMP_S16(i16Imm);
3931 } IEM_MC_ENDIF();
3932 IEM_MC_END();
3933 }
3934 else
3935 {
3936 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3938
3939 IEM_MC_BEGIN(0, 0);
3940 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3941 IEM_MC_ADVANCE_RIP();
3942 } IEM_MC_ELSE() {
3943 IEM_MC_REL_JMP_S32(i32Imm);
3944 } IEM_MC_ENDIF();
3945 IEM_MC_END();
3946 }
3947 return VINF_SUCCESS;
3948}
3949
3950
3951/** Opcode 0x0f 0x8c. */
3952FNIEMOP_DEF(iemOp_jl_Jv)
3953{
3954 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
3955 IEMOP_HLP_MIN_386();
3956 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3957 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3958 {
3959 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3961
3962 IEM_MC_BEGIN(0, 0);
3963 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3964 IEM_MC_REL_JMP_S16(i16Imm);
3965 } IEM_MC_ELSE() {
3966 IEM_MC_ADVANCE_RIP();
3967 } IEM_MC_ENDIF();
3968 IEM_MC_END();
3969 }
3970 else
3971 {
3972 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3974
3975 IEM_MC_BEGIN(0, 0);
3976 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3977 IEM_MC_REL_JMP_S32(i32Imm);
3978 } IEM_MC_ELSE() {
3979 IEM_MC_ADVANCE_RIP();
3980 } IEM_MC_ENDIF();
3981 IEM_MC_END();
3982 }
3983 return VINF_SUCCESS;
3984}
3985
3986
3987/** Opcode 0x0f 0x8d. */
3988FNIEMOP_DEF(iemOp_jnl_Jv)
3989{
3990 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
3991 IEMOP_HLP_MIN_386();
3992 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3993 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3994 {
3995 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3997
3998 IEM_MC_BEGIN(0, 0);
3999 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4000 IEM_MC_ADVANCE_RIP();
4001 } IEM_MC_ELSE() {
4002 IEM_MC_REL_JMP_S16(i16Imm);
4003 } IEM_MC_ENDIF();
4004 IEM_MC_END();
4005 }
4006 else
4007 {
4008 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4010
4011 IEM_MC_BEGIN(0, 0);
4012 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4013 IEM_MC_ADVANCE_RIP();
4014 } IEM_MC_ELSE() {
4015 IEM_MC_REL_JMP_S32(i32Imm);
4016 } IEM_MC_ENDIF();
4017 IEM_MC_END();
4018 }
4019 return VINF_SUCCESS;
4020}
4021
4022
4023/** Opcode 0x0f 0x8e. */
4024FNIEMOP_DEF(iemOp_jle_Jv)
4025{
4026 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4027 IEMOP_HLP_MIN_386();
4028 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4029 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4030 {
4031 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4033
4034 IEM_MC_BEGIN(0, 0);
4035 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4036 IEM_MC_REL_JMP_S16(i16Imm);
4037 } IEM_MC_ELSE() {
4038 IEM_MC_ADVANCE_RIP();
4039 } IEM_MC_ENDIF();
4040 IEM_MC_END();
4041 }
4042 else
4043 {
4044 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4046
4047 IEM_MC_BEGIN(0, 0);
4048 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4049 IEM_MC_REL_JMP_S32(i32Imm);
4050 } IEM_MC_ELSE() {
4051 IEM_MC_ADVANCE_RIP();
4052 } IEM_MC_ENDIF();
4053 IEM_MC_END();
4054 }
4055 return VINF_SUCCESS;
4056}
4057
4058
4059/** Opcode 0x0f 0x8f. */
4060FNIEMOP_DEF(iemOp_jnle_Jv)
4061{
4062 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4063 IEMOP_HLP_MIN_386();
4064 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4065 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4066 {
4067 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4069
4070 IEM_MC_BEGIN(0, 0);
4071 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4072 IEM_MC_ADVANCE_RIP();
4073 } IEM_MC_ELSE() {
4074 IEM_MC_REL_JMP_S16(i16Imm);
4075 } IEM_MC_ENDIF();
4076 IEM_MC_END();
4077 }
4078 else
4079 {
4080 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4082
4083 IEM_MC_BEGIN(0, 0);
4084 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4085 IEM_MC_ADVANCE_RIP();
4086 } IEM_MC_ELSE() {
4087 IEM_MC_REL_JMP_S32(i32Imm);
4088 } IEM_MC_ENDIF();
4089 IEM_MC_END();
4090 }
4091 return VINF_SUCCESS;
4092}
4093
4094
4095/** Opcode 0x0f 0x90. */
4096FNIEMOP_DEF(iemOp_seto_Eb)
4097{
4098 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4099 IEMOP_HLP_MIN_386();
4100 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4101
4102 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4103 * any way. AMD says it's "unused", whatever that means. We're
4104 * ignoring for now. */
4105 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4106 {
4107 /* register target */
4108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4109 IEM_MC_BEGIN(0, 0);
4110 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4111 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4112 } IEM_MC_ELSE() {
4113 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4114 } IEM_MC_ENDIF();
4115 IEM_MC_ADVANCE_RIP();
4116 IEM_MC_END();
4117 }
4118 else
4119 {
4120 /* memory target */
4121 IEM_MC_BEGIN(0, 1);
4122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4125 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4126 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4127 } IEM_MC_ELSE() {
4128 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4129 } IEM_MC_ENDIF();
4130 IEM_MC_ADVANCE_RIP();
4131 IEM_MC_END();
4132 }
4133 return VINF_SUCCESS;
4134}
4135
4136
4137/** Opcode 0x0f 0x91. */
4138FNIEMOP_DEF(iemOp_setno_Eb)
4139{
4140 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4141 IEMOP_HLP_MIN_386();
4142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4143
4144 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4145 * any way. AMD says it's "unused", whatever that means. We're
4146 * ignoring for now. */
4147 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4148 {
4149 /* register target */
4150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4151 IEM_MC_BEGIN(0, 0);
4152 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4153 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4154 } IEM_MC_ELSE() {
4155 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4156 } IEM_MC_ENDIF();
4157 IEM_MC_ADVANCE_RIP();
4158 IEM_MC_END();
4159 }
4160 else
4161 {
4162 /* memory target */
4163 IEM_MC_BEGIN(0, 1);
4164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4167 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4168 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4169 } IEM_MC_ELSE() {
4170 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4171 } IEM_MC_ENDIF();
4172 IEM_MC_ADVANCE_RIP();
4173 IEM_MC_END();
4174 }
4175 return VINF_SUCCESS;
4176}
4177
4178
4179/** Opcode 0x0f 0x92. */
4180FNIEMOP_DEF(iemOp_setc_Eb)
4181{
4182 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4183 IEMOP_HLP_MIN_386();
4184 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4185
4186 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4187 * any way. AMD says it's "unused", whatever that means. We're
4188 * ignoring for now. */
4189 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4190 {
4191 /* register target */
4192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4193 IEM_MC_BEGIN(0, 0);
4194 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4195 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4196 } IEM_MC_ELSE() {
4197 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4198 } IEM_MC_ENDIF();
4199 IEM_MC_ADVANCE_RIP();
4200 IEM_MC_END();
4201 }
4202 else
4203 {
4204 /* memory target */
4205 IEM_MC_BEGIN(0, 1);
4206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4209 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4210 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4211 } IEM_MC_ELSE() {
4212 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4213 } IEM_MC_ENDIF();
4214 IEM_MC_ADVANCE_RIP();
4215 IEM_MC_END();
4216 }
4217 return VINF_SUCCESS;
4218}
4219
4220
4221/** Opcode 0x0f 0x93. */
4222FNIEMOP_DEF(iemOp_setnc_Eb)
4223{
4224 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4225 IEMOP_HLP_MIN_386();
4226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4227
4228 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4229 * any way. AMD says it's "unused", whatever that means. We're
4230 * ignoring for now. */
4231 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4232 {
4233 /* register target */
4234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4235 IEM_MC_BEGIN(0, 0);
4236 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4237 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4238 } IEM_MC_ELSE() {
4239 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4240 } IEM_MC_ENDIF();
4241 IEM_MC_ADVANCE_RIP();
4242 IEM_MC_END();
4243 }
4244 else
4245 {
4246 /* memory target */
4247 IEM_MC_BEGIN(0, 1);
4248 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4251 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4252 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4253 } IEM_MC_ELSE() {
4254 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4255 } IEM_MC_ENDIF();
4256 IEM_MC_ADVANCE_RIP();
4257 IEM_MC_END();
4258 }
4259 return VINF_SUCCESS;
4260}
4261
4262
4263/** Opcode 0x0f 0x94. */
4264FNIEMOP_DEF(iemOp_sete_Eb)
4265{
4266 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4267 IEMOP_HLP_MIN_386();
4268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4269
4270 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4271 * any way. AMD says it's "unused", whatever that means. We're
4272 * ignoring for now. */
4273 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4274 {
4275 /* register target */
4276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4277 IEM_MC_BEGIN(0, 0);
4278 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4279 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4280 } IEM_MC_ELSE() {
4281 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4282 } IEM_MC_ENDIF();
4283 IEM_MC_ADVANCE_RIP();
4284 IEM_MC_END();
4285 }
4286 else
4287 {
4288 /* memory target */
4289 IEM_MC_BEGIN(0, 1);
4290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4293 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4294 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4295 } IEM_MC_ELSE() {
4296 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4297 } IEM_MC_ENDIF();
4298 IEM_MC_ADVANCE_RIP();
4299 IEM_MC_END();
4300 }
4301 return VINF_SUCCESS;
4302}
4303
4304
4305/** Opcode 0x0f 0x95. */
4306FNIEMOP_DEF(iemOp_setne_Eb)
4307{
4308 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4309 IEMOP_HLP_MIN_386();
4310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4311
4312 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4313 * any way. AMD says it's "unused", whatever that means. We're
4314 * ignoring for now. */
4315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4316 {
4317 /* register target */
4318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4319 IEM_MC_BEGIN(0, 0);
4320 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4321 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4322 } IEM_MC_ELSE() {
4323 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4324 } IEM_MC_ENDIF();
4325 IEM_MC_ADVANCE_RIP();
4326 IEM_MC_END();
4327 }
4328 else
4329 {
4330 /* memory target */
4331 IEM_MC_BEGIN(0, 1);
4332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4335 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4336 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4337 } IEM_MC_ELSE() {
4338 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4339 } IEM_MC_ENDIF();
4340 IEM_MC_ADVANCE_RIP();
4341 IEM_MC_END();
4342 }
4343 return VINF_SUCCESS;
4344}
4345
4346
4347/** Opcode 0x0f 0x96. */
4348FNIEMOP_DEF(iemOp_setbe_Eb)
4349{
4350 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4351 IEMOP_HLP_MIN_386();
4352 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4353
4354 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4355 * any way. AMD says it's "unused", whatever that means. We're
4356 * ignoring for now. */
4357 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4358 {
4359 /* register target */
4360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4361 IEM_MC_BEGIN(0, 0);
4362 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4363 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4364 } IEM_MC_ELSE() {
4365 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4366 } IEM_MC_ENDIF();
4367 IEM_MC_ADVANCE_RIP();
4368 IEM_MC_END();
4369 }
4370 else
4371 {
4372 /* memory target */
4373 IEM_MC_BEGIN(0, 1);
4374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4377 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4378 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4379 } IEM_MC_ELSE() {
4380 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4381 } IEM_MC_ENDIF();
4382 IEM_MC_ADVANCE_RIP();
4383 IEM_MC_END();
4384 }
4385 return VINF_SUCCESS;
4386}
4387
4388
4389/** Opcode 0x0f 0x97. */
4390FNIEMOP_DEF(iemOp_setnbe_Eb)
4391{
4392 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4393 IEMOP_HLP_MIN_386();
4394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4395
4396 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4397 * any way. AMD says it's "unused", whatever that means. We're
4398 * ignoring for now. */
4399 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4400 {
4401 /* register target */
4402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4403 IEM_MC_BEGIN(0, 0);
4404 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4405 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4406 } IEM_MC_ELSE() {
4407 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4408 } IEM_MC_ENDIF();
4409 IEM_MC_ADVANCE_RIP();
4410 IEM_MC_END();
4411 }
4412 else
4413 {
4414 /* memory target */
4415 IEM_MC_BEGIN(0, 1);
4416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4419 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4420 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4421 } IEM_MC_ELSE() {
4422 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4423 } IEM_MC_ENDIF();
4424 IEM_MC_ADVANCE_RIP();
4425 IEM_MC_END();
4426 }
4427 return VINF_SUCCESS;
4428}
4429
4430
4431/** Opcode 0x0f 0x98. */
4432FNIEMOP_DEF(iemOp_sets_Eb)
4433{
4434 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4435 IEMOP_HLP_MIN_386();
4436 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4437
4438 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4439 * any way. AMD says it's "unused", whatever that means. We're
4440 * ignoring for now. */
4441 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4442 {
4443 /* register target */
4444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4445 IEM_MC_BEGIN(0, 0);
4446 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4447 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4448 } IEM_MC_ELSE() {
4449 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4450 } IEM_MC_ENDIF();
4451 IEM_MC_ADVANCE_RIP();
4452 IEM_MC_END();
4453 }
4454 else
4455 {
4456 /* memory target */
4457 IEM_MC_BEGIN(0, 1);
4458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4461 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4462 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4463 } IEM_MC_ELSE() {
4464 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4465 } IEM_MC_ENDIF();
4466 IEM_MC_ADVANCE_RIP();
4467 IEM_MC_END();
4468 }
4469 return VINF_SUCCESS;
4470}
4471
4472
4473/** Opcode 0x0f 0x99. */
4474FNIEMOP_DEF(iemOp_setns_Eb)
4475{
4476 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4477 IEMOP_HLP_MIN_386();
4478 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4479
4480 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4481 * any way. AMD says it's "unused", whatever that means. We're
4482 * ignoring for now. */
4483 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4484 {
4485 /* register target */
4486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4487 IEM_MC_BEGIN(0, 0);
4488 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4489 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4490 } IEM_MC_ELSE() {
4491 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4492 } IEM_MC_ENDIF();
4493 IEM_MC_ADVANCE_RIP();
4494 IEM_MC_END();
4495 }
4496 else
4497 {
4498 /* memory target */
4499 IEM_MC_BEGIN(0, 1);
4500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4503 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4504 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4505 } IEM_MC_ELSE() {
4506 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4507 } IEM_MC_ENDIF();
4508 IEM_MC_ADVANCE_RIP();
4509 IEM_MC_END();
4510 }
4511 return VINF_SUCCESS;
4512}
4513
4514
4515/** Opcode 0x0f 0x9a. */
4516FNIEMOP_DEF(iemOp_setp_Eb)
4517{
4518 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4519 IEMOP_HLP_MIN_386();
4520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4521
4522 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4523 * any way. AMD says it's "unused", whatever that means. We're
4524 * ignoring for now. */
4525 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4526 {
4527 /* register target */
4528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4529 IEM_MC_BEGIN(0, 0);
4530 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4531 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4532 } IEM_MC_ELSE() {
4533 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4534 } IEM_MC_ENDIF();
4535 IEM_MC_ADVANCE_RIP();
4536 IEM_MC_END();
4537 }
4538 else
4539 {
4540 /* memory target */
4541 IEM_MC_BEGIN(0, 1);
4542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4545 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4546 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4547 } IEM_MC_ELSE() {
4548 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4549 } IEM_MC_ENDIF();
4550 IEM_MC_ADVANCE_RIP();
4551 IEM_MC_END();
4552 }
4553 return VINF_SUCCESS;
4554}
4555
4556
4557/** Opcode 0x0f 0x9b. */
4558FNIEMOP_DEF(iemOp_setnp_Eb)
4559{
4560 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4561 IEMOP_HLP_MIN_386();
4562 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4563
4564 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4565 * any way. AMD says it's "unused", whatever that means. We're
4566 * ignoring for now. */
4567 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4568 {
4569 /* register target */
4570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4571 IEM_MC_BEGIN(0, 0);
4572 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4573 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4574 } IEM_MC_ELSE() {
4575 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4576 } IEM_MC_ENDIF();
4577 IEM_MC_ADVANCE_RIP();
4578 IEM_MC_END();
4579 }
4580 else
4581 {
4582 /* memory target */
4583 IEM_MC_BEGIN(0, 1);
4584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4587 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4588 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4589 } IEM_MC_ELSE() {
4590 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4591 } IEM_MC_ENDIF();
4592 IEM_MC_ADVANCE_RIP();
4593 IEM_MC_END();
4594 }
4595 return VINF_SUCCESS;
4596}
4597
4598
4599/** Opcode 0x0f 0x9c. */
4600FNIEMOP_DEF(iemOp_setl_Eb)
4601{
4602 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4603 IEMOP_HLP_MIN_386();
4604 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4605
4606 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4607 * any way. AMD says it's "unused", whatever that means. We're
4608 * ignoring for now. */
4609 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4610 {
4611 /* register target */
4612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4613 IEM_MC_BEGIN(0, 0);
4614 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4615 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4616 } IEM_MC_ELSE() {
4617 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4618 } IEM_MC_ENDIF();
4619 IEM_MC_ADVANCE_RIP();
4620 IEM_MC_END();
4621 }
4622 else
4623 {
4624 /* memory target */
4625 IEM_MC_BEGIN(0, 1);
4626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4629 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4630 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4631 } IEM_MC_ELSE() {
4632 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4633 } IEM_MC_ENDIF();
4634 IEM_MC_ADVANCE_RIP();
4635 IEM_MC_END();
4636 }
4637 return VINF_SUCCESS;
4638}
4639
4640
4641/** Opcode 0x0f 0x9d. */
4642FNIEMOP_DEF(iemOp_setnl_Eb)
4643{
4644 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4645 IEMOP_HLP_MIN_386();
4646 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4647
4648 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4649 * any way. AMD says it's "unused", whatever that means. We're
4650 * ignoring for now. */
4651 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4652 {
4653 /* register target */
4654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4655 IEM_MC_BEGIN(0, 0);
4656 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4657 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4658 } IEM_MC_ELSE() {
4659 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4660 } IEM_MC_ENDIF();
4661 IEM_MC_ADVANCE_RIP();
4662 IEM_MC_END();
4663 }
4664 else
4665 {
4666 /* memory target */
4667 IEM_MC_BEGIN(0, 1);
4668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4671 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4672 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4673 } IEM_MC_ELSE() {
4674 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4675 } IEM_MC_ENDIF();
4676 IEM_MC_ADVANCE_RIP();
4677 IEM_MC_END();
4678 }
4679 return VINF_SUCCESS;
4680}
4681
4682
4683/** Opcode 0x0f 0x9e. */
4684FNIEMOP_DEF(iemOp_setle_Eb)
4685{
4686 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
4687 IEMOP_HLP_MIN_386();
4688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4689
4690 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4691 * any way. AMD says it's "unused", whatever that means. We're
4692 * ignoring for now. */
4693 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4694 {
4695 /* register target */
4696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4697 IEM_MC_BEGIN(0, 0);
4698 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4699 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4700 } IEM_MC_ELSE() {
4701 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4702 } IEM_MC_ENDIF();
4703 IEM_MC_ADVANCE_RIP();
4704 IEM_MC_END();
4705 }
4706 else
4707 {
4708 /* memory target */
4709 IEM_MC_BEGIN(0, 1);
4710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4713 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4714 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4715 } IEM_MC_ELSE() {
4716 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4717 } IEM_MC_ENDIF();
4718 IEM_MC_ADVANCE_RIP();
4719 IEM_MC_END();
4720 }
4721 return VINF_SUCCESS;
4722}
4723
4724
4725/** Opcode 0x0f 0x9f. */
4726FNIEMOP_DEF(iemOp_setnle_Eb)
4727{
4728 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
4729 IEMOP_HLP_MIN_386();
4730 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4731
4732 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4733 * any way. AMD says it's "unused", whatever that means. We're
4734 * ignoring for now. */
4735 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4736 {
4737 /* register target */
4738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4739 IEM_MC_BEGIN(0, 0);
4740 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4741 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4742 } IEM_MC_ELSE() {
4743 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4744 } IEM_MC_ENDIF();
4745 IEM_MC_ADVANCE_RIP();
4746 IEM_MC_END();
4747 }
4748 else
4749 {
4750 /* memory target */
4751 IEM_MC_BEGIN(0, 1);
4752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4755 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4756 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4757 } IEM_MC_ELSE() {
4758 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4759 } IEM_MC_ENDIF();
4760 IEM_MC_ADVANCE_RIP();
4761 IEM_MC_END();
4762 }
4763 return VINF_SUCCESS;
4764}
4765
4766
4767/**
4768 * Common 'push segment-register' helper.
4769 */
4770FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4771{
4772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4773 if (iReg < X86_SREG_FS)
4774 IEMOP_HLP_NO_64BIT();
4775 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4776
4777 switch (pVCpu->iem.s.enmEffOpSize)
4778 {
4779 case IEMMODE_16BIT:
4780 IEM_MC_BEGIN(0, 1);
4781 IEM_MC_LOCAL(uint16_t, u16Value);
4782 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4783 IEM_MC_PUSH_U16(u16Value);
4784 IEM_MC_ADVANCE_RIP();
4785 IEM_MC_END();
4786 break;
4787
4788 case IEMMODE_32BIT:
4789 IEM_MC_BEGIN(0, 1);
4790 IEM_MC_LOCAL(uint32_t, u32Value);
4791 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4792 IEM_MC_PUSH_U32_SREG(u32Value);
4793 IEM_MC_ADVANCE_RIP();
4794 IEM_MC_END();
4795 break;
4796
4797 case IEMMODE_64BIT:
4798 IEM_MC_BEGIN(0, 1);
4799 IEM_MC_LOCAL(uint64_t, u64Value);
4800 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4801 IEM_MC_PUSH_U64(u64Value);
4802 IEM_MC_ADVANCE_RIP();
4803 IEM_MC_END();
4804 break;
4805 }
4806
4807 return VINF_SUCCESS;
4808}
4809
4810
4811/** Opcode 0x0f 0xa0. */
4812FNIEMOP_DEF(iemOp_push_fs)
4813{
4814 IEMOP_MNEMONIC(push_fs, "push fs");
4815 IEMOP_HLP_MIN_386();
4816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4817 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4818}
4819
4820
4821/** Opcode 0x0f 0xa1. */
4822FNIEMOP_DEF(iemOp_pop_fs)
4823{
4824 IEMOP_MNEMONIC(pop_fs, "pop fs");
4825 IEMOP_HLP_MIN_386();
4826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4827 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
4828}
4829
4830
4831/** Opcode 0x0f 0xa2. */
4832FNIEMOP_DEF(iemOp_cpuid)
4833{
4834 IEMOP_MNEMONIC(cpuid, "cpuid");
4835 IEMOP_HLP_MIN_486(); /* not all 486es. */
4836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4837 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4838}
4839
4840
4841/**
4842 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4843 * iemOp_bts_Ev_Gv.
4844 */
4845FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4846{
4847 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4848 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4849
4850 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4851 {
4852 /* register destination. */
4853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4854 switch (pVCpu->iem.s.enmEffOpSize)
4855 {
4856 case IEMMODE_16BIT:
4857 IEM_MC_BEGIN(3, 0);
4858 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4859 IEM_MC_ARG(uint16_t, u16Src, 1);
4860 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4861
4862 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4863 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4864 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4865 IEM_MC_REF_EFLAGS(pEFlags);
4866 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4867
4868 IEM_MC_ADVANCE_RIP();
4869 IEM_MC_END();
4870 return VINF_SUCCESS;
4871
4872 case IEMMODE_32BIT:
4873 IEM_MC_BEGIN(3, 0);
4874 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4875 IEM_MC_ARG(uint32_t, u32Src, 1);
4876 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4877
4878 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4879 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4880 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4881 IEM_MC_REF_EFLAGS(pEFlags);
4882 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4883
4884 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4885 IEM_MC_ADVANCE_RIP();
4886 IEM_MC_END();
4887 return VINF_SUCCESS;
4888
4889 case IEMMODE_64BIT:
4890 IEM_MC_BEGIN(3, 0);
4891 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4892 IEM_MC_ARG(uint64_t, u64Src, 1);
4893 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4894
4895 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4896 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4897 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4898 IEM_MC_REF_EFLAGS(pEFlags);
4899 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4900
4901 IEM_MC_ADVANCE_RIP();
4902 IEM_MC_END();
4903 return VINF_SUCCESS;
4904
4905 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4906 }
4907 }
4908 else
4909 {
4910 /* memory destination. */
4911
4912 uint32_t fAccess;
4913 if (pImpl->pfnLockedU16)
4914 fAccess = IEM_ACCESS_DATA_RW;
4915 else /* BT */
4916 fAccess = IEM_ACCESS_DATA_R;
4917
4918 /** @todo test negative bit offsets! */
4919 switch (pVCpu->iem.s.enmEffOpSize)
4920 {
4921 case IEMMODE_16BIT:
4922 IEM_MC_BEGIN(3, 2);
4923 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4924 IEM_MC_ARG(uint16_t, u16Src, 1);
4925 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4927 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4928
4929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4930 if (pImpl->pfnLockedU16)
4931 IEMOP_HLP_DONE_DECODING();
4932 else
4933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4934 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4935 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4936 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4937 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4938 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
4939 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4940 IEM_MC_FETCH_EFLAGS(EFlags);
4941
4942 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4943 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4944 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4945 else
4946 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4947 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
4948
4949 IEM_MC_COMMIT_EFLAGS(EFlags);
4950 IEM_MC_ADVANCE_RIP();
4951 IEM_MC_END();
4952 return VINF_SUCCESS;
4953
4954 case IEMMODE_32BIT:
4955 IEM_MC_BEGIN(3, 2);
4956 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4957 IEM_MC_ARG(uint32_t, u32Src, 1);
4958 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4960 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4961
4962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4963 if (pImpl->pfnLockedU16)
4964 IEMOP_HLP_DONE_DECODING();
4965 else
4966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4967 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4968 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4969 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4970 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4971 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4972 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4973 IEM_MC_FETCH_EFLAGS(EFlags);
4974
4975 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4976 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4977 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4978 else
4979 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4980 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
4981
4982 IEM_MC_COMMIT_EFLAGS(EFlags);
4983 IEM_MC_ADVANCE_RIP();
4984 IEM_MC_END();
4985 return VINF_SUCCESS;
4986
4987 case IEMMODE_64BIT:
4988 IEM_MC_BEGIN(3, 2);
4989 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4990 IEM_MC_ARG(uint64_t, u64Src, 1);
4991 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4992 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4993 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4994
4995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4996 if (pImpl->pfnLockedU16)
4997 IEMOP_HLP_DONE_DECODING();
4998 else
4999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5000 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5001 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5002 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5003 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5004 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5005 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5006 IEM_MC_FETCH_EFLAGS(EFlags);
5007
5008 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5009 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5010 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5011 else
5012 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5013 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5014
5015 IEM_MC_COMMIT_EFLAGS(EFlags);
5016 IEM_MC_ADVANCE_RIP();
5017 IEM_MC_END();
5018 return VINF_SUCCESS;
5019
5020 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5021 }
5022 }
5023}
5024
5025
5026/** Opcode 0x0f 0xa3. */
5027FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5028{
5029 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5030 IEMOP_HLP_MIN_386();
5031 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5032}
5033
5034
5035/**
5036 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5037 */
5038FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5039{
5040 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5041 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5042
5043 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5044 {
5045 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5047
5048 switch (pVCpu->iem.s.enmEffOpSize)
5049 {
5050 case IEMMODE_16BIT:
5051 IEM_MC_BEGIN(4, 0);
5052 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5053 IEM_MC_ARG(uint16_t, u16Src, 1);
5054 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5055 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5056
5057 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5058 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5059 IEM_MC_REF_EFLAGS(pEFlags);
5060 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5061
5062 IEM_MC_ADVANCE_RIP();
5063 IEM_MC_END();
5064 return VINF_SUCCESS;
5065
5066 case IEMMODE_32BIT:
5067 IEM_MC_BEGIN(4, 0);
5068 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5069 IEM_MC_ARG(uint32_t, u32Src, 1);
5070 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5071 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5072
5073 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5074 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5075 IEM_MC_REF_EFLAGS(pEFlags);
5076 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5077
5078 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5079 IEM_MC_ADVANCE_RIP();
5080 IEM_MC_END();
5081 return VINF_SUCCESS;
5082
5083 case IEMMODE_64BIT:
5084 IEM_MC_BEGIN(4, 0);
5085 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5086 IEM_MC_ARG(uint64_t, u64Src, 1);
5087 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5088 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5089
5090 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5091 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5092 IEM_MC_REF_EFLAGS(pEFlags);
5093 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5094
5095 IEM_MC_ADVANCE_RIP();
5096 IEM_MC_END();
5097 return VINF_SUCCESS;
5098
5099 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5100 }
5101 }
5102 else
5103 {
5104 switch (pVCpu->iem.s.enmEffOpSize)
5105 {
5106 case IEMMODE_16BIT:
5107 IEM_MC_BEGIN(4, 2);
5108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5109 IEM_MC_ARG(uint16_t, u16Src, 1);
5110 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5111 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5113
5114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5115 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5116 IEM_MC_ASSIGN(cShiftArg, cShift);
5117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5118 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5119 IEM_MC_FETCH_EFLAGS(EFlags);
5120 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5121 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5122
5123 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5124 IEM_MC_COMMIT_EFLAGS(EFlags);
5125 IEM_MC_ADVANCE_RIP();
5126 IEM_MC_END();
5127 return VINF_SUCCESS;
5128
5129 case IEMMODE_32BIT:
5130 IEM_MC_BEGIN(4, 2);
5131 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5132 IEM_MC_ARG(uint32_t, u32Src, 1);
5133 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5134 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5136
5137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5138 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5139 IEM_MC_ASSIGN(cShiftArg, cShift);
5140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5141 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5142 IEM_MC_FETCH_EFLAGS(EFlags);
5143 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5144 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5145
5146 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5147 IEM_MC_COMMIT_EFLAGS(EFlags);
5148 IEM_MC_ADVANCE_RIP();
5149 IEM_MC_END();
5150 return VINF_SUCCESS;
5151
5152 case IEMMODE_64BIT:
5153 IEM_MC_BEGIN(4, 2);
5154 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5155 IEM_MC_ARG(uint64_t, u64Src, 1);
5156 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5157 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5159
5160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5161 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5162 IEM_MC_ASSIGN(cShiftArg, cShift);
5163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5164 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5165 IEM_MC_FETCH_EFLAGS(EFlags);
5166 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5167 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5168
5169 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5170 IEM_MC_COMMIT_EFLAGS(EFlags);
5171 IEM_MC_ADVANCE_RIP();
5172 IEM_MC_END();
5173 return VINF_SUCCESS;
5174
5175 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5176 }
5177 }
5178}
5179
5180
5181/**
5182 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5183 */
5184FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5185{
5186 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5187 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5188
5189 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5190 {
5191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5192
5193 switch (pVCpu->iem.s.enmEffOpSize)
5194 {
5195 case IEMMODE_16BIT:
5196 IEM_MC_BEGIN(4, 0);
5197 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5198 IEM_MC_ARG(uint16_t, u16Src, 1);
5199 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5200 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5201
5202 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5203 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5204 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5205 IEM_MC_REF_EFLAGS(pEFlags);
5206 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5207
5208 IEM_MC_ADVANCE_RIP();
5209 IEM_MC_END();
5210 return VINF_SUCCESS;
5211
5212 case IEMMODE_32BIT:
5213 IEM_MC_BEGIN(4, 0);
5214 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5215 IEM_MC_ARG(uint32_t, u32Src, 1);
5216 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5217 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5218
5219 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5220 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5221 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5222 IEM_MC_REF_EFLAGS(pEFlags);
5223 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5224
5225 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5226 IEM_MC_ADVANCE_RIP();
5227 IEM_MC_END();
5228 return VINF_SUCCESS;
5229
5230 case IEMMODE_64BIT:
5231 IEM_MC_BEGIN(4, 0);
5232 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5233 IEM_MC_ARG(uint64_t, u64Src, 1);
5234 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5235 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5236
5237 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5238 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5239 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5240 IEM_MC_REF_EFLAGS(pEFlags);
5241 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5242
5243 IEM_MC_ADVANCE_RIP();
5244 IEM_MC_END();
5245 return VINF_SUCCESS;
5246
5247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5248 }
5249 }
5250 else
5251 {
5252 switch (pVCpu->iem.s.enmEffOpSize)
5253 {
5254 case IEMMODE_16BIT:
5255 IEM_MC_BEGIN(4, 2);
5256 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5257 IEM_MC_ARG(uint16_t, u16Src, 1);
5258 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5259 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5261
5262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5264 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5265 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5266 IEM_MC_FETCH_EFLAGS(EFlags);
5267 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5268 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5269
5270 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5271 IEM_MC_COMMIT_EFLAGS(EFlags);
5272 IEM_MC_ADVANCE_RIP();
5273 IEM_MC_END();
5274 return VINF_SUCCESS;
5275
5276 case IEMMODE_32BIT:
5277 IEM_MC_BEGIN(4, 2);
5278 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5279 IEM_MC_ARG(uint32_t, u32Src, 1);
5280 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5281 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5283
5284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5286 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5287 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5288 IEM_MC_FETCH_EFLAGS(EFlags);
5289 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5290 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5291
5292 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5293 IEM_MC_COMMIT_EFLAGS(EFlags);
5294 IEM_MC_ADVANCE_RIP();
5295 IEM_MC_END();
5296 return VINF_SUCCESS;
5297
5298 case IEMMODE_64BIT:
5299 IEM_MC_BEGIN(4, 2);
5300 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5301 IEM_MC_ARG(uint64_t, u64Src, 1);
5302 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5303 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5305
5306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5308 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5309 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5310 IEM_MC_FETCH_EFLAGS(EFlags);
5311 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5312 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5313
5314 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5315 IEM_MC_COMMIT_EFLAGS(EFlags);
5316 IEM_MC_ADVANCE_RIP();
5317 IEM_MC_END();
5318 return VINF_SUCCESS;
5319
5320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5321 }
5322 }
5323}
5324
5325
5326
5327/** Opcode 0x0f 0xa4. */
5328FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5329{
5330 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5331 IEMOP_HLP_MIN_386();
5332 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5333}
5334
5335
5336/** Opcode 0x0f 0xa5. */
5337FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5338{
5339 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5340 IEMOP_HLP_MIN_386();
5341 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5342}
5343
5344
5345/** Opcode 0x0f 0xa8. */
5346FNIEMOP_DEF(iemOp_push_gs)
5347{
5348 IEMOP_MNEMONIC(push_gs, "push gs");
5349 IEMOP_HLP_MIN_386();
5350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5351 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5352}
5353
5354
5355/** Opcode 0x0f 0xa9. */
5356FNIEMOP_DEF(iemOp_pop_gs)
5357{
5358 IEMOP_MNEMONIC(pop_gs, "pop gs");
5359 IEMOP_HLP_MIN_386();
5360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5361 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5362}
5363
5364
5365/** Opcode 0x0f 0xaa. */
5366FNIEMOP_STUB(iemOp_rsm);
5367//IEMOP_HLP_MIN_386();
5368
5369
5370/** Opcode 0x0f 0xab. */
5371FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5372{
5373 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5374 IEMOP_HLP_MIN_386();
5375 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5376}
5377
5378
5379/** Opcode 0x0f 0xac. */
5380FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5381{
5382 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5383 IEMOP_HLP_MIN_386();
5384 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5385}
5386
5387
5388/** Opcode 0x0f 0xad. */
5389FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5390{
5391 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5392 IEMOP_HLP_MIN_386();
5393 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5394}
5395
5396
5397/** Opcode 0x0f 0xae mem/0. */
5398FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5399{
5400 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5401 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5402 return IEMOP_RAISE_INVALID_OPCODE();
5403
5404 IEM_MC_BEGIN(3, 1);
5405 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5406 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5407 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5410 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5411 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5412 IEM_MC_END();
5413 return VINF_SUCCESS;
5414}
5415
5416
5417/** Opcode 0x0f 0xae mem/1. */
5418FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5419{
5420 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5421 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5422 return IEMOP_RAISE_INVALID_OPCODE();
5423
5424 IEM_MC_BEGIN(3, 1);
5425 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5426 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5427 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5430 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5431 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5432 IEM_MC_END();
5433 return VINF_SUCCESS;
5434}
5435
5436
5437/** Opcode 0x0f 0xae mem/2. */
5438FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5439
5440/** Opcode 0x0f 0xae mem/3. */
5441FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5442
5443/** Opcode 0x0f 0xae mem/4. */
5444FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5445
5446/** Opcode 0x0f 0xae mem/5. */
5447FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5448
5449/** Opcode 0x0f 0xae mem/6. */
5450FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5451
5452/** Opcode 0x0f 0xae mem/7. */
5453FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5454
5455
5456/** Opcode 0x0f 0xae 11b/5. */
5457FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5458{
5459 RT_NOREF_PV(bRm);
5460 IEMOP_MNEMONIC(lfence, "lfence");
5461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5462 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5463 return IEMOP_RAISE_INVALID_OPCODE();
5464
5465 IEM_MC_BEGIN(0, 0);
5466 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5467 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5468 else
5469 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5470 IEM_MC_ADVANCE_RIP();
5471 IEM_MC_END();
5472 return VINF_SUCCESS;
5473}
5474
5475
5476/** Opcode 0x0f 0xae 11b/6. */
5477FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5478{
5479 RT_NOREF_PV(bRm);
5480 IEMOP_MNEMONIC(mfence, "mfence");
5481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5482 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5483 return IEMOP_RAISE_INVALID_OPCODE();
5484
5485 IEM_MC_BEGIN(0, 0);
5486 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5487 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5488 else
5489 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5490 IEM_MC_ADVANCE_RIP();
5491 IEM_MC_END();
5492 return VINF_SUCCESS;
5493}
5494
5495
5496/** Opcode 0x0f 0xae 11b/7. */
5497FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5498{
5499 RT_NOREF_PV(bRm);
5500 IEMOP_MNEMONIC(sfence, "sfence");
5501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5502 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5503 return IEMOP_RAISE_INVALID_OPCODE();
5504
5505 IEM_MC_BEGIN(0, 0);
5506 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5507 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5508 else
5509 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5510 IEM_MC_ADVANCE_RIP();
5511 IEM_MC_END();
5512 return VINF_SUCCESS;
5513}
5514
5515
5516/** Opcode 0xf3 0x0f 0xae 11b/0. */
5517FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5518
5519/** Opcode 0xf3 0x0f 0xae 11b/1. */
5520FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5521
5522/** Opcode 0xf3 0x0f 0xae 11b/2. */
5523FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5524
5525/** Opcode 0xf3 0x0f 0xae 11b/3. */
5526FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5527
5528
5529/** Opcode 0x0f 0xae. */
5530FNIEMOP_DEF(iemOp_Grp15)
5531{
5532 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5534 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5535 {
5536 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5537 {
5538 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5539 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5540 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5541 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5542 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5543 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5544 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5545 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5546 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5547 }
5548 }
5549 else
5550 {
5551 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5552 {
5553 case 0:
5554 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5555 {
5556 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5557 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5558 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5559 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5560 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5561 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5562 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5563 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5565 }
5566 break;
5567
5568 case IEM_OP_PRF_REPZ:
5569 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5570 {
5571 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5572 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5573 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5574 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5575 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5576 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5577 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5578 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5579 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5580 }
5581 break;
5582
5583 default:
5584 return IEMOP_RAISE_INVALID_OPCODE();
5585 }
5586 }
5587}
5588
5589
5590/** Opcode 0x0f 0xaf. */
5591FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5592{
5593 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5594 IEMOP_HLP_MIN_386();
5595 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5596 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5597}
5598
5599
5600/** Opcode 0x0f 0xb0. */
5601FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5602{
5603 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5604 IEMOP_HLP_MIN_486();
5605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5606
5607 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5608 {
5609 IEMOP_HLP_DONE_DECODING();
5610 IEM_MC_BEGIN(4, 0);
5611 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5612 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5613 IEM_MC_ARG(uint8_t, u8Src, 2);
5614 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5615
5616 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5617 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5618 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5619 IEM_MC_REF_EFLAGS(pEFlags);
5620 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5621 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5622 else
5623 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5624
5625 IEM_MC_ADVANCE_RIP();
5626 IEM_MC_END();
5627 }
5628 else
5629 {
5630 IEM_MC_BEGIN(4, 3);
5631 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5632 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5633 IEM_MC_ARG(uint8_t, u8Src, 2);
5634 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5636 IEM_MC_LOCAL(uint8_t, u8Al);
5637
5638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5639 IEMOP_HLP_DONE_DECODING();
5640 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5641 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5642 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5643 IEM_MC_FETCH_EFLAGS(EFlags);
5644 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5645 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5646 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5647 else
5648 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5649
5650 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5651 IEM_MC_COMMIT_EFLAGS(EFlags);
5652 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5653 IEM_MC_ADVANCE_RIP();
5654 IEM_MC_END();
5655 }
5656 return VINF_SUCCESS;
5657}
5658
5659/** Opcode 0x0f 0xb1. */
5660FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5661{
5662 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
5663 IEMOP_HLP_MIN_486();
5664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5665
5666 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5667 {
5668 IEMOP_HLP_DONE_DECODING();
5669 switch (pVCpu->iem.s.enmEffOpSize)
5670 {
5671 case IEMMODE_16BIT:
5672 IEM_MC_BEGIN(4, 0);
5673 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5674 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5675 IEM_MC_ARG(uint16_t, u16Src, 2);
5676 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5677
5678 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5679 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5680 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5681 IEM_MC_REF_EFLAGS(pEFlags);
5682 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5683 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5684 else
5685 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5686
5687 IEM_MC_ADVANCE_RIP();
5688 IEM_MC_END();
5689 return VINF_SUCCESS;
5690
5691 case IEMMODE_32BIT:
5692 IEM_MC_BEGIN(4, 0);
5693 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5694 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5695 IEM_MC_ARG(uint32_t, u32Src, 2);
5696 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5697
5698 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5699 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5700 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5701 IEM_MC_REF_EFLAGS(pEFlags);
5702 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5703 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5704 else
5705 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5706
5707 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5708 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5709 IEM_MC_ADVANCE_RIP();
5710 IEM_MC_END();
5711 return VINF_SUCCESS;
5712
5713 case IEMMODE_64BIT:
5714 IEM_MC_BEGIN(4, 0);
5715 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5716 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5717#ifdef RT_ARCH_X86
5718 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5719#else
5720 IEM_MC_ARG(uint64_t, u64Src, 2);
5721#endif
5722 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5723
5724 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5725 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5726 IEM_MC_REF_EFLAGS(pEFlags);
5727#ifdef RT_ARCH_X86
5728 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5729 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5730 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5731 else
5732 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5733#else
5734 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5735 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5736 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5737 else
5738 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5739#endif
5740
5741 IEM_MC_ADVANCE_RIP();
5742 IEM_MC_END();
5743 return VINF_SUCCESS;
5744
5745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5746 }
5747 }
5748 else
5749 {
5750 switch (pVCpu->iem.s.enmEffOpSize)
5751 {
5752 case IEMMODE_16BIT:
5753 IEM_MC_BEGIN(4, 3);
5754 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5755 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5756 IEM_MC_ARG(uint16_t, u16Src, 2);
5757 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5759 IEM_MC_LOCAL(uint16_t, u16Ax);
5760
5761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5762 IEMOP_HLP_DONE_DECODING();
5763 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5764 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5765 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5766 IEM_MC_FETCH_EFLAGS(EFlags);
5767 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5768 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5769 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5770 else
5771 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5772
5773 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5774 IEM_MC_COMMIT_EFLAGS(EFlags);
5775 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5776 IEM_MC_ADVANCE_RIP();
5777 IEM_MC_END();
5778 return VINF_SUCCESS;
5779
5780 case IEMMODE_32BIT:
5781 IEM_MC_BEGIN(4, 3);
5782 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5783 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5784 IEM_MC_ARG(uint32_t, u32Src, 2);
5785 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5787 IEM_MC_LOCAL(uint32_t, u32Eax);
5788
5789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5790 IEMOP_HLP_DONE_DECODING();
5791 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5792 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5793 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5794 IEM_MC_FETCH_EFLAGS(EFlags);
5795 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5796 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5797 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5798 else
5799 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5800
5801 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5802 IEM_MC_COMMIT_EFLAGS(EFlags);
5803 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5804 IEM_MC_ADVANCE_RIP();
5805 IEM_MC_END();
5806 return VINF_SUCCESS;
5807
5808 case IEMMODE_64BIT:
5809 IEM_MC_BEGIN(4, 3);
5810 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5811 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5812#ifdef RT_ARCH_X86
5813 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5814#else
5815 IEM_MC_ARG(uint64_t, u64Src, 2);
5816#endif
5817 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5819 IEM_MC_LOCAL(uint64_t, u64Rax);
5820
5821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5822 IEMOP_HLP_DONE_DECODING();
5823 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5824 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5825 IEM_MC_FETCH_EFLAGS(EFlags);
5826 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5827#ifdef RT_ARCH_X86
5828 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5829 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5830 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5831 else
5832 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5833#else
5834 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5835 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5836 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5837 else
5838 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5839#endif
5840
5841 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5842 IEM_MC_COMMIT_EFLAGS(EFlags);
5843 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5844 IEM_MC_ADVANCE_RIP();
5845 IEM_MC_END();
5846 return VINF_SUCCESS;
5847
5848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5849 }
5850 }
5851}
5852
5853
5854FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5855{
5856 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5857 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
5858
5859 switch (pVCpu->iem.s.enmEffOpSize)
5860 {
5861 case IEMMODE_16BIT:
5862 IEM_MC_BEGIN(5, 1);
5863 IEM_MC_ARG(uint16_t, uSel, 0);
5864 IEM_MC_ARG(uint16_t, offSeg, 1);
5865 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5866 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5867 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5868 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5871 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5872 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
5873 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5874 IEM_MC_END();
5875 return VINF_SUCCESS;
5876
5877 case IEMMODE_32BIT:
5878 IEM_MC_BEGIN(5, 1);
5879 IEM_MC_ARG(uint16_t, uSel, 0);
5880 IEM_MC_ARG(uint32_t, offSeg, 1);
5881 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5882 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5883 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5884 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5887 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5888 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
5889 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5890 IEM_MC_END();
5891 return VINF_SUCCESS;
5892
5893 case IEMMODE_64BIT:
5894 IEM_MC_BEGIN(5, 1);
5895 IEM_MC_ARG(uint16_t, uSel, 0);
5896 IEM_MC_ARG(uint64_t, offSeg, 1);
5897 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5898 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5899 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5900 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5903 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5904 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5905 else
5906 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5907 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
5908 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5909 IEM_MC_END();
5910 return VINF_SUCCESS;
5911
5912 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5913 }
5914}
5915
5916
5917/** Opcode 0x0f 0xb2. */
5918FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5919{
5920 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
5921 IEMOP_HLP_MIN_386();
5922 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5923 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5924 return IEMOP_RAISE_INVALID_OPCODE();
5925 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5926}
5927
5928
5929/** Opcode 0x0f 0xb3. */
5930FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5931{
5932 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
5933 IEMOP_HLP_MIN_386();
5934 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5935}
5936
5937
5938/** Opcode 0x0f 0xb4. */
5939FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5940{
5941 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
5942 IEMOP_HLP_MIN_386();
5943 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5944 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5945 return IEMOP_RAISE_INVALID_OPCODE();
5946 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5947}
5948
5949
5950/** Opcode 0x0f 0xb5. */
5951FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5952{
5953 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
5954 IEMOP_HLP_MIN_386();
5955 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5956 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5957 return IEMOP_RAISE_INVALID_OPCODE();
5958 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5959}
5960
5961
5962/** Opcode 0x0f 0xb6. */
5963FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5964{
5965 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
5966 IEMOP_HLP_MIN_386();
5967
5968 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5969
5970 /*
5971 * If rm is denoting a register, no more instruction bytes.
5972 */
5973 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5974 {
5975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5976 switch (pVCpu->iem.s.enmEffOpSize)
5977 {
5978 case IEMMODE_16BIT:
5979 IEM_MC_BEGIN(0, 1);
5980 IEM_MC_LOCAL(uint16_t, u16Value);
5981 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5982 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
5983 IEM_MC_ADVANCE_RIP();
5984 IEM_MC_END();
5985 return VINF_SUCCESS;
5986
5987 case IEMMODE_32BIT:
5988 IEM_MC_BEGIN(0, 1);
5989 IEM_MC_LOCAL(uint32_t, u32Value);
5990 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5991 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
5992 IEM_MC_ADVANCE_RIP();
5993 IEM_MC_END();
5994 return VINF_SUCCESS;
5995
5996 case IEMMODE_64BIT:
5997 IEM_MC_BEGIN(0, 1);
5998 IEM_MC_LOCAL(uint64_t, u64Value);
5999 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6000 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6001 IEM_MC_ADVANCE_RIP();
6002 IEM_MC_END();
6003 return VINF_SUCCESS;
6004
6005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6006 }
6007 }
6008 else
6009 {
6010 /*
6011 * We're loading a register from memory.
6012 */
6013 switch (pVCpu->iem.s.enmEffOpSize)
6014 {
6015 case IEMMODE_16BIT:
6016 IEM_MC_BEGIN(0, 2);
6017 IEM_MC_LOCAL(uint16_t, u16Value);
6018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6021 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6022 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6023 IEM_MC_ADVANCE_RIP();
6024 IEM_MC_END();
6025 return VINF_SUCCESS;
6026
6027 case IEMMODE_32BIT:
6028 IEM_MC_BEGIN(0, 2);
6029 IEM_MC_LOCAL(uint32_t, u32Value);
6030 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6033 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6034 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6035 IEM_MC_ADVANCE_RIP();
6036 IEM_MC_END();
6037 return VINF_SUCCESS;
6038
6039 case IEMMODE_64BIT:
6040 IEM_MC_BEGIN(0, 2);
6041 IEM_MC_LOCAL(uint64_t, u64Value);
6042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6045 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6046 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6047 IEM_MC_ADVANCE_RIP();
6048 IEM_MC_END();
6049 return VINF_SUCCESS;
6050
6051 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6052 }
6053 }
6054}
6055
6056
6057/** Opcode 0x0f 0xb7. */
6058FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6059{
6060 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6061 IEMOP_HLP_MIN_386();
6062
6063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6064
6065 /** @todo Not entirely sure how the operand size prefix is handled here,
6066 * assuming that it will be ignored. Would be nice to have a few
6067 * test for this. */
6068 /*
6069 * If rm is denoting a register, no more instruction bytes.
6070 */
6071 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6072 {
6073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6074 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6075 {
6076 IEM_MC_BEGIN(0, 1);
6077 IEM_MC_LOCAL(uint32_t, u32Value);
6078 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6079 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6080 IEM_MC_ADVANCE_RIP();
6081 IEM_MC_END();
6082 }
6083 else
6084 {
6085 IEM_MC_BEGIN(0, 1);
6086 IEM_MC_LOCAL(uint64_t, u64Value);
6087 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6088 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6089 IEM_MC_ADVANCE_RIP();
6090 IEM_MC_END();
6091 }
6092 }
6093 else
6094 {
6095 /*
6096 * We're loading a register from memory.
6097 */
6098 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6099 {
6100 IEM_MC_BEGIN(0, 2);
6101 IEM_MC_LOCAL(uint32_t, u32Value);
6102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6105 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6106 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6107 IEM_MC_ADVANCE_RIP();
6108 IEM_MC_END();
6109 }
6110 else
6111 {
6112 IEM_MC_BEGIN(0, 2);
6113 IEM_MC_LOCAL(uint64_t, u64Value);
6114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6115 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6117 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6118 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6119 IEM_MC_ADVANCE_RIP();
6120 IEM_MC_END();
6121 }
6122 }
6123 return VINF_SUCCESS;
6124}
6125
6126
6127/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6128FNIEMOP_UD_STUB(iemOp_jmpe);
6129/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6130FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6131
6132
6133/** Opcode 0x0f 0xb9. */
6134FNIEMOP_DEF(iemOp_Grp10)
6135{
6136 Log(("iemOp_Grp10 -> #UD\n"));
6137 return IEMOP_RAISE_INVALID_OPCODE();
6138}
6139
6140
6141/** Opcode 0x0f 0xba. */
6142FNIEMOP_DEF(iemOp_Grp8)
6143{
6144 IEMOP_HLP_MIN_386();
6145 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6146 PCIEMOPBINSIZES pImpl;
6147 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6148 {
6149 case 0: case 1: case 2: case 3:
6150 return IEMOP_RAISE_INVALID_OPCODE();
6151 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6152 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6153 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6154 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6156 }
6157 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6158
6159 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6160 {
6161 /* register destination. */
6162 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6164
6165 switch (pVCpu->iem.s.enmEffOpSize)
6166 {
6167 case IEMMODE_16BIT:
6168 IEM_MC_BEGIN(3, 0);
6169 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6170 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6171 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6172
6173 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6174 IEM_MC_REF_EFLAGS(pEFlags);
6175 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6176
6177 IEM_MC_ADVANCE_RIP();
6178 IEM_MC_END();
6179 return VINF_SUCCESS;
6180
6181 case IEMMODE_32BIT:
6182 IEM_MC_BEGIN(3, 0);
6183 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6184 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6185 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6186
6187 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6188 IEM_MC_REF_EFLAGS(pEFlags);
6189 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6190
6191 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6192 IEM_MC_ADVANCE_RIP();
6193 IEM_MC_END();
6194 return VINF_SUCCESS;
6195
6196 case IEMMODE_64BIT:
6197 IEM_MC_BEGIN(3, 0);
6198 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6199 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6200 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6201
6202 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6203 IEM_MC_REF_EFLAGS(pEFlags);
6204 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6205
6206 IEM_MC_ADVANCE_RIP();
6207 IEM_MC_END();
6208 return VINF_SUCCESS;
6209
6210 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6211 }
6212 }
6213 else
6214 {
6215 /* memory destination. */
6216
6217 uint32_t fAccess;
6218 if (pImpl->pfnLockedU16)
6219 fAccess = IEM_ACCESS_DATA_RW;
6220 else /* BT */
6221 fAccess = IEM_ACCESS_DATA_R;
6222
6223 /** @todo test negative bit offsets! */
6224 switch (pVCpu->iem.s.enmEffOpSize)
6225 {
6226 case IEMMODE_16BIT:
6227 IEM_MC_BEGIN(3, 1);
6228 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6229 IEM_MC_ARG(uint16_t, u16Src, 1);
6230 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6232
6233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6234 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6235 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6236 if (pImpl->pfnLockedU16)
6237 IEMOP_HLP_DONE_DECODING();
6238 else
6239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6240 IEM_MC_FETCH_EFLAGS(EFlags);
6241 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6242 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6243 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6244 else
6245 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6246 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6247
6248 IEM_MC_COMMIT_EFLAGS(EFlags);
6249 IEM_MC_ADVANCE_RIP();
6250 IEM_MC_END();
6251 return VINF_SUCCESS;
6252
6253 case IEMMODE_32BIT:
6254 IEM_MC_BEGIN(3, 1);
6255 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6256 IEM_MC_ARG(uint32_t, u32Src, 1);
6257 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6259
6260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6261 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6262 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6263 if (pImpl->pfnLockedU16)
6264 IEMOP_HLP_DONE_DECODING();
6265 else
6266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6267 IEM_MC_FETCH_EFLAGS(EFlags);
6268 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6269 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6270 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6271 else
6272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6273 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6274
6275 IEM_MC_COMMIT_EFLAGS(EFlags);
6276 IEM_MC_ADVANCE_RIP();
6277 IEM_MC_END();
6278 return VINF_SUCCESS;
6279
6280 case IEMMODE_64BIT:
6281 IEM_MC_BEGIN(3, 1);
6282 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6283 IEM_MC_ARG(uint64_t, u64Src, 1);
6284 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6286
6287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6288 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6289 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6290 if (pImpl->pfnLockedU16)
6291 IEMOP_HLP_DONE_DECODING();
6292 else
6293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6294 IEM_MC_FETCH_EFLAGS(EFlags);
6295 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6296 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6297 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6298 else
6299 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6300 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6301
6302 IEM_MC_COMMIT_EFLAGS(EFlags);
6303 IEM_MC_ADVANCE_RIP();
6304 IEM_MC_END();
6305 return VINF_SUCCESS;
6306
6307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6308 }
6309 }
6310
6311}
6312
6313
6314/** Opcode 0x0f 0xbb. */
6315FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6316{
6317 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6318 IEMOP_HLP_MIN_386();
6319 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6320}
6321
6322
6323/** Opcode 0x0f 0xbc. */
6324FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6325{
6326 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6327 IEMOP_HLP_MIN_386();
6328 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6329 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6330}
6331
6332
6333/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6334FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6335
6336
6337/** Opcode 0x0f 0xbd. */
6338FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6339{
6340 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6341 IEMOP_HLP_MIN_386();
6342 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6343 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6344}
6345
6346
6347/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6348FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6349
6350
6351/** Opcode 0x0f 0xbe. */
6352FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6353{
6354 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6355 IEMOP_HLP_MIN_386();
6356
6357 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6358
6359 /*
6360 * If rm is denoting a register, no more instruction bytes.
6361 */
6362 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6363 {
6364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6365 switch (pVCpu->iem.s.enmEffOpSize)
6366 {
6367 case IEMMODE_16BIT:
6368 IEM_MC_BEGIN(0, 1);
6369 IEM_MC_LOCAL(uint16_t, u16Value);
6370 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6371 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6372 IEM_MC_ADVANCE_RIP();
6373 IEM_MC_END();
6374 return VINF_SUCCESS;
6375
6376 case IEMMODE_32BIT:
6377 IEM_MC_BEGIN(0, 1);
6378 IEM_MC_LOCAL(uint32_t, u32Value);
6379 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6380 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6381 IEM_MC_ADVANCE_RIP();
6382 IEM_MC_END();
6383 return VINF_SUCCESS;
6384
6385 case IEMMODE_64BIT:
6386 IEM_MC_BEGIN(0, 1);
6387 IEM_MC_LOCAL(uint64_t, u64Value);
6388 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6389 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6390 IEM_MC_ADVANCE_RIP();
6391 IEM_MC_END();
6392 return VINF_SUCCESS;
6393
6394 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6395 }
6396 }
6397 else
6398 {
6399 /*
6400 * We're loading a register from memory.
6401 */
6402 switch (pVCpu->iem.s.enmEffOpSize)
6403 {
6404 case IEMMODE_16BIT:
6405 IEM_MC_BEGIN(0, 2);
6406 IEM_MC_LOCAL(uint16_t, u16Value);
6407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6410 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6411 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6412 IEM_MC_ADVANCE_RIP();
6413 IEM_MC_END();
6414 return VINF_SUCCESS;
6415
6416 case IEMMODE_32BIT:
6417 IEM_MC_BEGIN(0, 2);
6418 IEM_MC_LOCAL(uint32_t, u32Value);
6419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6422 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6423 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6424 IEM_MC_ADVANCE_RIP();
6425 IEM_MC_END();
6426 return VINF_SUCCESS;
6427
6428 case IEMMODE_64BIT:
6429 IEM_MC_BEGIN(0, 2);
6430 IEM_MC_LOCAL(uint64_t, u64Value);
6431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6434 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6435 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6436 IEM_MC_ADVANCE_RIP();
6437 IEM_MC_END();
6438 return VINF_SUCCESS;
6439
6440 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6441 }
6442 }
6443}
6444
6445
6446/** Opcode 0x0f 0xbf. */
6447FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6448{
6449 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6450 IEMOP_HLP_MIN_386();
6451
6452 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6453
6454 /** @todo Not entirely sure how the operand size prefix is handled here,
6455 * assuming that it will be ignored. Would be nice to have a few
6456 * test for this. */
6457 /*
6458 * If rm is denoting a register, no more instruction bytes.
6459 */
6460 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6461 {
6462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6463 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6464 {
6465 IEM_MC_BEGIN(0, 1);
6466 IEM_MC_LOCAL(uint32_t, u32Value);
6467 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6468 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6469 IEM_MC_ADVANCE_RIP();
6470 IEM_MC_END();
6471 }
6472 else
6473 {
6474 IEM_MC_BEGIN(0, 1);
6475 IEM_MC_LOCAL(uint64_t, u64Value);
6476 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6477 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6478 IEM_MC_ADVANCE_RIP();
6479 IEM_MC_END();
6480 }
6481 }
6482 else
6483 {
6484 /*
6485 * We're loading a register from memory.
6486 */
6487 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6488 {
6489 IEM_MC_BEGIN(0, 2);
6490 IEM_MC_LOCAL(uint32_t, u32Value);
6491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6494 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6495 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6496 IEM_MC_ADVANCE_RIP();
6497 IEM_MC_END();
6498 }
6499 else
6500 {
6501 IEM_MC_BEGIN(0, 2);
6502 IEM_MC_LOCAL(uint64_t, u64Value);
6503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6506 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6507 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6508 IEM_MC_ADVANCE_RIP();
6509 IEM_MC_END();
6510 }
6511 }
6512 return VINF_SUCCESS;
6513}
6514
6515
6516/** Opcode 0x0f 0xc0. */
6517FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6518{
6519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6520 IEMOP_HLP_MIN_486();
6521 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6522
6523 /*
6524 * If rm is denoting a register, no more instruction bytes.
6525 */
6526 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6527 {
6528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6529
6530 IEM_MC_BEGIN(3, 0);
6531 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6532 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6533 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6534
6535 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6536 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6537 IEM_MC_REF_EFLAGS(pEFlags);
6538 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6539
6540 IEM_MC_ADVANCE_RIP();
6541 IEM_MC_END();
6542 }
6543 else
6544 {
6545 /*
6546 * We're accessing memory.
6547 */
6548 IEM_MC_BEGIN(3, 3);
6549 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6550 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6551 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6552 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6554
6555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6556 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6557 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6558 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6559 IEM_MC_FETCH_EFLAGS(EFlags);
6560 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6561 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6562 else
6563 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6564
6565 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6566 IEM_MC_COMMIT_EFLAGS(EFlags);
6567 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6568 IEM_MC_ADVANCE_RIP();
6569 IEM_MC_END();
6570 return VINF_SUCCESS;
6571 }
6572 return VINF_SUCCESS;
6573}
6574
6575
6576/** Opcode 0x0f 0xc1. */
6577FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6578{
6579 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6580 IEMOP_HLP_MIN_486();
6581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6582
6583 /*
6584 * If rm is denoting a register, no more instruction bytes.
6585 */
6586 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6587 {
6588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6589
6590 switch (pVCpu->iem.s.enmEffOpSize)
6591 {
6592 case IEMMODE_16BIT:
6593 IEM_MC_BEGIN(3, 0);
6594 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6595 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6596 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6597
6598 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6599 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6600 IEM_MC_REF_EFLAGS(pEFlags);
6601 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6602
6603 IEM_MC_ADVANCE_RIP();
6604 IEM_MC_END();
6605 return VINF_SUCCESS;
6606
6607 case IEMMODE_32BIT:
6608 IEM_MC_BEGIN(3, 0);
6609 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6610 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6611 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6612
6613 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6614 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6615 IEM_MC_REF_EFLAGS(pEFlags);
6616 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6617
6618 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6619 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6620 IEM_MC_ADVANCE_RIP();
6621 IEM_MC_END();
6622 return VINF_SUCCESS;
6623
6624 case IEMMODE_64BIT:
6625 IEM_MC_BEGIN(3, 0);
6626 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6627 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6628 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6629
6630 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6631 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6632 IEM_MC_REF_EFLAGS(pEFlags);
6633 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6634
6635 IEM_MC_ADVANCE_RIP();
6636 IEM_MC_END();
6637 return VINF_SUCCESS;
6638
6639 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6640 }
6641 }
6642 else
6643 {
6644 /*
6645 * We're accessing memory.
6646 */
6647 switch (pVCpu->iem.s.enmEffOpSize)
6648 {
6649 case IEMMODE_16BIT:
6650 IEM_MC_BEGIN(3, 3);
6651 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6652 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6653 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6654 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6656
6657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6658 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6659 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6660 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6661 IEM_MC_FETCH_EFLAGS(EFlags);
6662 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6663 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6664 else
6665 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6666
6667 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6668 IEM_MC_COMMIT_EFLAGS(EFlags);
6669 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
6670 IEM_MC_ADVANCE_RIP();
6671 IEM_MC_END();
6672 return VINF_SUCCESS;
6673
6674 case IEMMODE_32BIT:
6675 IEM_MC_BEGIN(3, 3);
6676 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6677 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6678 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6679 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6681
6682 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6683 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6684 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6685 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6686 IEM_MC_FETCH_EFLAGS(EFlags);
6687 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6688 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6689 else
6690 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6691
6692 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6693 IEM_MC_COMMIT_EFLAGS(EFlags);
6694 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
6695 IEM_MC_ADVANCE_RIP();
6696 IEM_MC_END();
6697 return VINF_SUCCESS;
6698
6699 case IEMMODE_64BIT:
6700 IEM_MC_BEGIN(3, 3);
6701 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6702 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6703 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6704 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6706
6707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6708 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6709 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6710 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6711 IEM_MC_FETCH_EFLAGS(EFlags);
6712 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6713 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6714 else
6715 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6716
6717 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6718 IEM_MC_COMMIT_EFLAGS(EFlags);
6719 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
6720 IEM_MC_ADVANCE_RIP();
6721 IEM_MC_END();
6722 return VINF_SUCCESS;
6723
6724 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6725 }
6726 }
6727}
6728
6729
6730/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
6731FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
6732/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
6733FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
6734/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
6735FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
6736/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
6737FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
6738
6739
6740/** Opcode 0x0f 0xc3. */
6741FNIEMOP_DEF(iemOp_movnti_My_Gy)
6742{
6743 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
6744
6745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6746
6747 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6748 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6749 {
6750 switch (pVCpu->iem.s.enmEffOpSize)
6751 {
6752 case IEMMODE_32BIT:
6753 IEM_MC_BEGIN(0, 2);
6754 IEM_MC_LOCAL(uint32_t, u32Value);
6755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6756
6757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6759 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6760 return IEMOP_RAISE_INVALID_OPCODE();
6761
6762 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6763 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6764 IEM_MC_ADVANCE_RIP();
6765 IEM_MC_END();
6766 break;
6767
6768 case IEMMODE_64BIT:
6769 IEM_MC_BEGIN(0, 2);
6770 IEM_MC_LOCAL(uint64_t, u64Value);
6771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6772
6773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6775 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6776 return IEMOP_RAISE_INVALID_OPCODE();
6777
6778 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6779 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
6780 IEM_MC_ADVANCE_RIP();
6781 IEM_MC_END();
6782 break;
6783
6784 case IEMMODE_16BIT:
6785 /** @todo check this form. */
6786 return IEMOP_RAISE_INVALID_OPCODE();
6787 }
6788 }
6789 else
6790 return IEMOP_RAISE_INVALID_OPCODE();
6791 return VINF_SUCCESS;
6792}
6793/* Opcode 0x66 0x0f 0xc3 - invalid */
6794/* Opcode 0xf3 0x0f 0xc3 - invalid */
6795/* Opcode 0xf2 0x0f 0xc3 - invalid */
6796
6797/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
6798FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
6799/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
6800FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
6801/* Opcode 0xf3 0x0f 0xc4 - invalid */
6802/* Opcode 0xf2 0x0f 0xc4 - invalid */
6803
6804/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
6805FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
6806/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
6807FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
6808/* Opcode 0xf3 0x0f 0xc5 - invalid */
6809/* Opcode 0xf2 0x0f 0xc5 - invalid */
6810
6811/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
6812FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
6813/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
6814FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
6815/* Opcode 0xf3 0x0f 0xc6 - invalid */
6816/* Opcode 0xf2 0x0f 0xc6 - invalid */
6817
6818
6819/** Opcode 0x0f 0xc7 !11/1. */
6820FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6821{
6822 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
6823
6824 IEM_MC_BEGIN(4, 3);
6825 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6826 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6827 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6828 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6829 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6830 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6832
6833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6834 IEMOP_HLP_DONE_DECODING();
6835 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6836
6837 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6838 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6839 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6840
6841 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6842 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6843 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6844
6845 IEM_MC_FETCH_EFLAGS(EFlags);
6846 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6847 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6848 else
6849 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6850
6851 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6852 IEM_MC_COMMIT_EFLAGS(EFlags);
6853 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6854 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6855 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6856 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6857 IEM_MC_ENDIF();
6858 IEM_MC_ADVANCE_RIP();
6859
6860 IEM_MC_END();
6861 return VINF_SUCCESS;
6862}
6863
6864
6865/** Opcode REX.W 0x0f 0xc7 !11/1. */
6866FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
6867{
6868 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
6869 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6870 {
6871#if 0
6872 RT_NOREF(bRm);
6873 IEMOP_BITCH_ABOUT_STUB();
6874 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6875#else
6876 IEM_MC_BEGIN(4, 3);
6877 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
6878 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
6879 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
6880 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6881 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
6882 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
6883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6884
6885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6886 IEMOP_HLP_DONE_DECODING();
6887 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
6888 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6889
6890 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
6891 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
6892 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
6893
6894 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
6895 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
6896 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
6897
6898 IEM_MC_FETCH_EFLAGS(EFlags);
6899# ifdef RT_ARCH_AMD64
6900 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6901 {
6902 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6903 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6904 else
6905 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6906 }
6907 else
6908# endif
6909 {
6910 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
6911 accesses and not all all atomic, which works fine on in UNI CPU guest
6912 configuration (ignoring DMA). If guest SMP is active we have no choice
6913 but to use a rendezvous callback here. Sigh. */
6914 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
6915 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6916 else
6917 {
6918 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6919 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
6920 }
6921 }
6922
6923 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
6924 IEM_MC_COMMIT_EFLAGS(EFlags);
6925 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6926 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
6927 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
6928 IEM_MC_ENDIF();
6929 IEM_MC_ADVANCE_RIP();
6930
6931 IEM_MC_END();
6932 return VINF_SUCCESS;
6933#endif
6934 }
6935 Log(("cmpxchg16b -> #UD\n"));
6936 return IEMOP_RAISE_INVALID_OPCODE();
6937}
6938
6939
6940/** Opcode 0x0f 0xc7 11/6. */
6941FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6942
6943/** Opcode 0x0f 0xc7 !11/6. */
6944FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6945
6946/** Opcode 0x66 0x0f 0xc7 !11/6. */
6947FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6948
6949/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6950FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6951
6952/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6953FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6954
6955
6956/** Opcode 0x0f 0xc7. */
6957FNIEMOP_DEF(iemOp_Grp9)
6958{
6959 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6960 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6961 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6962 {
6963 case 0: case 2: case 3: case 4: case 5:
6964 return IEMOP_RAISE_INVALID_OPCODE();
6965 case 1:
6966 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6967 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6968 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6969 return IEMOP_RAISE_INVALID_OPCODE();
6970 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6971 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6972 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6973 case 6:
6974 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6975 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6976 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6977 {
6978 case 0:
6979 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6980 case IEM_OP_PRF_SIZE_OP:
6981 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6982 case IEM_OP_PRF_REPZ:
6983 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6984 default:
6985 return IEMOP_RAISE_INVALID_OPCODE();
6986 }
6987 case 7:
6988 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6989 {
6990 case 0:
6991 case IEM_OP_PRF_REPZ:
6992 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6993 default:
6994 return IEMOP_RAISE_INVALID_OPCODE();
6995 }
6996 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6997 }
6998}
6999
7000
7001/**
7002 * Common 'bswap register' helper.
7003 */
7004FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7005{
7006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7007 switch (pVCpu->iem.s.enmEffOpSize)
7008 {
7009 case IEMMODE_16BIT:
7010 IEM_MC_BEGIN(1, 0);
7011 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7012 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7013 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7014 IEM_MC_ADVANCE_RIP();
7015 IEM_MC_END();
7016 return VINF_SUCCESS;
7017
7018 case IEMMODE_32BIT:
7019 IEM_MC_BEGIN(1, 0);
7020 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7021 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7022 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7023 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7024 IEM_MC_ADVANCE_RIP();
7025 IEM_MC_END();
7026 return VINF_SUCCESS;
7027
7028 case IEMMODE_64BIT:
7029 IEM_MC_BEGIN(1, 0);
7030 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7031 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7032 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7033 IEM_MC_ADVANCE_RIP();
7034 IEM_MC_END();
7035 return VINF_SUCCESS;
7036
7037 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7038 }
7039}
7040
7041
7042/** Opcode 0x0f 0xc8. */
7043FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7044{
7045 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7046 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7047 prefix. REX.B is the correct prefix it appears. For a parallel
7048 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7049 IEMOP_HLP_MIN_486();
7050 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7051}
7052
7053
7054/** Opcode 0x0f 0xc9. */
7055FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7056{
7057 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7058 IEMOP_HLP_MIN_486();
7059 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7060}
7061
7062
7063/** Opcode 0x0f 0xca. */
7064FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7065{
7066 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7067 IEMOP_HLP_MIN_486();
7068 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7069}
7070
7071
7072/** Opcode 0x0f 0xcb. */
7073FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7074{
7075 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7076 IEMOP_HLP_MIN_486();
7077 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7078}
7079
7080
7081/** Opcode 0x0f 0xcc. */
7082FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7083{
7084 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7085 IEMOP_HLP_MIN_486();
7086 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7087}
7088
7089
7090/** Opcode 0x0f 0xcd. */
7091FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7092{
7093 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7094 IEMOP_HLP_MIN_486();
7095 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7096}
7097
7098
7099/** Opcode 0x0f 0xce. */
7100FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7101{
7102 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7103 IEMOP_HLP_MIN_486();
7104 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7105}
7106
7107
7108/** Opcode 0x0f 0xcf. */
7109FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7110{
7111 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7112 IEMOP_HLP_MIN_486();
7113 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7114}
7115
7116
7117/* Opcode 0x0f 0xd0 - invalid */
7118/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7119FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7120/* Opcode 0xf3 0x0f 0xd0 - invalid */
7121/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7122FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7123
7124/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7125FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7126/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7127FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7128/* Opcode 0xf3 0x0f 0xd1 - invalid */
7129/* Opcode 0xf2 0x0f 0xd1 - invalid */
7130
7131/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7132FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7133/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7134FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7135/* Opcode 0xf3 0x0f 0xd2 - invalid */
7136/* Opcode 0xf2 0x0f 0xd2 - invalid */
7137
7138/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7139FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7140/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7141FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7142/* Opcode 0xf3 0x0f 0xd3 - invalid */
7143/* Opcode 0xf2 0x0f 0xd3 - invalid */
7144
7145/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7146FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7147/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7148FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7149/* Opcode 0xf3 0x0f 0xd4 - invalid */
7150/* Opcode 0xf2 0x0f 0xd4 - invalid */
7151
7152/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7153FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7154/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7155FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7156/* Opcode 0xf3 0x0f 0xd5 - invalid */
7157/* Opcode 0xf2 0x0f 0xd5 - invalid */
7158
7159/* Opcode 0x0f 0xd6 - invalid */
7160/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7161FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7162/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7163FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7164/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7165FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7166#if 0
7167FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7168{
7169 /* Docs says register only. */
7170 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7171
7172 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7173 {
7174 case IEM_OP_PRF_SIZE_OP: /* SSE */
7175 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7176 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7177 IEM_MC_BEGIN(2, 0);
7178 IEM_MC_ARG(uint64_t *, pDst, 0);
7179 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7180 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7181 IEM_MC_PREPARE_SSE_USAGE();
7182 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7183 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7184 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7185 IEM_MC_ADVANCE_RIP();
7186 IEM_MC_END();
7187 return VINF_SUCCESS;
7188
7189 case 0: /* MMX */
7190 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7191 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7192 IEM_MC_BEGIN(2, 0);
7193 IEM_MC_ARG(uint64_t *, pDst, 0);
7194 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7195 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7196 IEM_MC_PREPARE_FPU_USAGE();
7197 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7198 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7199 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7200 IEM_MC_ADVANCE_RIP();
7201 IEM_MC_END();
7202 return VINF_SUCCESS;
7203
7204 default:
7205 return IEMOP_RAISE_INVALID_OPCODE();
7206 }
7207}
7208#endif
7209
7210
7211/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7212FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7213{
7214 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7215 /** @todo testcase: Check that the instruction implicitly clears the high
7216 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7217 * and opcode modifications are made to work with the whole width (not
7218 * just 128). */
7219 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
7220 /* Docs says register only. */
7221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7222 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7223 {
7224 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7225 IEM_MC_BEGIN(2, 0);
7226 IEM_MC_ARG(uint64_t *, pDst, 0);
7227 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7228 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7229 IEM_MC_PREPARE_FPU_USAGE();
7230 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7231 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7232 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7233 IEM_MC_ADVANCE_RIP();
7234 IEM_MC_END();
7235 return VINF_SUCCESS;
7236 }
7237 return IEMOP_RAISE_INVALID_OPCODE();
7238}
7239
7240/** Opcode 0x66 0x0f 0xd7 - */
7241FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
7242{
7243 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7244 /** @todo testcase: Check that the instruction implicitly clears the high
7245 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7246 * and opcode modifications are made to work with the whole width (not
7247 * just 128). */
7248 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
7249 /* Docs says register only. */
7250 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7251 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7252 {
7253 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7254 IEM_MC_BEGIN(2, 0);
7255 IEM_MC_ARG(uint64_t *, pDst, 0);
7256 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7257 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7258 IEM_MC_PREPARE_SSE_USAGE();
7259 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7260 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7261 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7262 IEM_MC_ADVANCE_RIP();
7263 IEM_MC_END();
7264 return VINF_SUCCESS;
7265 }
7266 return IEMOP_RAISE_INVALID_OPCODE();
7267}
7268
7269/* Opcode 0xf3 0x0f 0xd7 - invalid */
7270/* Opcode 0xf2 0x0f 0xd7 - invalid */
7271
7272
7273/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7274FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7275/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7276FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7277/* Opcode 0xf3 0x0f 0xd8 - invalid */
7278/* Opcode 0xf2 0x0f 0xd8 - invalid */
7279
7280/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7281FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7282/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7283FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7284/* Opcode 0xf3 0x0f 0xd9 - invalid */
7285/* Opcode 0xf2 0x0f 0xd9 - invalid */
7286
7287/** Opcode 0x0f 0xda - pminub Pq, Qq */
7288FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7289/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7290FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7291/* Opcode 0xf3 0x0f 0xda - invalid */
7292/* Opcode 0xf2 0x0f 0xda - invalid */
7293
7294/** Opcode 0x0f 0xdb - pand Pq, Qq */
7295FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7296/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7297FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7298/* Opcode 0xf3 0x0f 0xdb - invalid */
7299/* Opcode 0xf2 0x0f 0xdb - invalid */
7300
7301/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7302FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7303/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7304FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7305/* Opcode 0xf3 0x0f 0xdc - invalid */
7306/* Opcode 0xf2 0x0f 0xdc - invalid */
7307
7308/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7309FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7310/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7311FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7312/* Opcode 0xf3 0x0f 0xdd - invalid */
7313/* Opcode 0xf2 0x0f 0xdd - invalid */
7314
7315/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7316FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7317/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7318FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7319/* Opcode 0xf3 0x0f 0xde - invalid */
7320/* Opcode 0xf2 0x0f 0xde - invalid */
7321
7322/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7323FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7324/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7325FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7326/* Opcode 0xf3 0x0f 0xdf - invalid */
7327/* Opcode 0xf2 0x0f 0xdf - invalid */
7328
7329/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7330FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7331/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7332FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7333/* Opcode 0xf3 0x0f 0xe0 - invalid */
7334/* Opcode 0xf2 0x0f 0xe0 - invalid */
7335
7336/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7337FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7338/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7339FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7340/* Opcode 0xf3 0x0f 0xe1 - invalid */
7341/* Opcode 0xf2 0x0f 0xe1 - invalid */
7342
7343/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7344FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7345/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7346FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7347/* Opcode 0xf3 0x0f 0xe2 - invalid */
7348/* Opcode 0xf2 0x0f 0xe2 - invalid */
7349
7350/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7351FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7352/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7353FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7354/* Opcode 0xf3 0x0f 0xe3 - invalid */
7355/* Opcode 0xf2 0x0f 0xe3 - invalid */
7356
7357/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7358FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7359/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7360FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7361/* Opcode 0xf3 0x0f 0xe4 - invalid */
7362/* Opcode 0xf2 0x0f 0xe4 - invalid */
7363
7364/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7365FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7366/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7367FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7368/* Opcode 0xf3 0x0f 0xe5 - invalid */
7369/* Opcode 0xf2 0x0f 0xe5 - invalid */
7370
7371/* Opcode 0x0f 0xe6 - invalid */
7372/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7373FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7374/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7375FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7376/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7377FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7378
7379
7380/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
7381FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
7382{
7383 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7384 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7385 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7386 {
7387 /* Register, memory. */
7388 IEM_MC_BEGIN(0, 2);
7389 IEM_MC_LOCAL(uint64_t, uSrc);
7390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7391
7392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7394 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7395 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7396
7397 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7398 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7399
7400 IEM_MC_ADVANCE_RIP();
7401 IEM_MC_END();
7402 return VINF_SUCCESS;
7403 }
7404 /* The register, register encoding is invalid. */
7405 return IEMOP_RAISE_INVALID_OPCODE();
7406}
7407
7408/** Opcode 0x66 0x0f 0xe7 - vmovntdq Mx, Vx */
7409FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
7410{
7411 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7412 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7413 {
7414 /* Register, memory. */
7415 IEMOP_MNEMONIC(vmovntdq_Mx_Vx, "vmovntdq Mx,Vx");
7416 IEM_MC_BEGIN(0, 2);
7417 IEM_MC_LOCAL(uint128_t, uSrc);
7418 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7419
7420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7422 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7423 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7424
7425 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7426 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7427
7428 IEM_MC_ADVANCE_RIP();
7429 IEM_MC_END();
7430 return VINF_SUCCESS;
7431 }
7432
7433 /* The register, register encoding is invalid. */
7434 return IEMOP_RAISE_INVALID_OPCODE();
7435}
7436
7437/* Opcode 0xf3 0x0f 0xe7 - invalid */
7438/* Opcode 0xf2 0x0f 0xe7 - invalid */
7439
7440
7441/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7442FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7443/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7444FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7445/* Opcode 0xf3 0x0f 0xe8 - invalid */
7446/* Opcode 0xf2 0x0f 0xe8 - invalid */
7447
7448/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7449FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7450/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7451FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7452/* Opcode 0xf3 0x0f 0xe9 - invalid */
7453/* Opcode 0xf2 0x0f 0xe9 - invalid */
7454
7455/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7456FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7457/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7458FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7459/* Opcode 0xf3 0x0f 0xea - invalid */
7460/* Opcode 0xf2 0x0f 0xea - invalid */
7461
7462/** Opcode 0x0f 0xeb - por Pq, Qq */
7463FNIEMOP_STUB(iemOp_por_Pq_Qq);
7464/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7465FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7466/* Opcode 0xf3 0x0f 0xeb - invalid */
7467/* Opcode 0xf2 0x0f 0xeb - invalid */
7468
7469/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7470FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7471/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7472FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7473/* Opcode 0xf3 0x0f 0xec - invalid */
7474/* Opcode 0xf2 0x0f 0xec - invalid */
7475
7476/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7477FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7478/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7479FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7480/* Opcode 0xf3 0x0f 0xed - invalid */
7481/* Opcode 0xf2 0x0f 0xed - invalid */
7482
7483/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7484FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7485/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7486FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7487/* Opcode 0xf3 0x0f 0xee - invalid */
7488/* Opcode 0xf2 0x0f 0xee - invalid */
7489
7490
7491/** Opcode 0x0f 0xef - pxor Pq, Qq */
7492FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
7493{
7494 IEMOP_MNEMONIC(pxor, "pxor");
7495 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
7496}
7497
7498/** Opcode 0x66 0x0f 0xef - vpxor Vx, Hx, Wx */
7499FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
7500{
7501 IEMOP_MNEMONIC(vpxor, "vpxor");
7502 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7503}
7504
7505/* Opcode 0xf3 0x0f 0xef - invalid */
7506/* Opcode 0xf2 0x0f 0xef - invalid */
7507
7508/* Opcode 0x0f 0xf0 - invalid */
7509/* Opcode 0x66 0x0f 0xf0 - invalid */
7510/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7511FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7512
7513/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7514FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7515/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7516FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7517/* Opcode 0xf2 0x0f 0xf1 - invalid */
7518
7519/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7520FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7521/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7522FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7523/* Opcode 0xf2 0x0f 0xf2 - invalid */
7524
7525/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7526FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7527/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7528FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7529/* Opcode 0xf2 0x0f 0xf3 - invalid */
7530
7531/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7532FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7533/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7534FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7535/* Opcode 0xf2 0x0f 0xf4 - invalid */
7536
7537/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7538FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7539/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7540FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7541/* Opcode 0xf2 0x0f 0xf5 - invalid */
7542
7543/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7544FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7545/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7546FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7547/* Opcode 0xf2 0x0f 0xf6 - invalid */
7548
7549/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7550FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7551/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7552FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7553/* Opcode 0xf2 0x0f 0xf7 - invalid */
7554
7555/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7556FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7557/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7558FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7559/* Opcode 0xf2 0x0f 0xf8 - invalid */
7560
7561/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7562FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7563/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7564FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7565/* Opcode 0xf2 0x0f 0xf9 - invalid */
7566
7567/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7568FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7569/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7570FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7571/* Opcode 0xf2 0x0f 0xfa - invalid */
7572
7573/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7574FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7575/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7576FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7577/* Opcode 0xf2 0x0f 0xfb - invalid */
7578
7579/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7580FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7581/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7582FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7583/* Opcode 0xf2 0x0f 0xfc - invalid */
7584
7585/** Opcode 0x0f 0xfd - paddw Pq, Qq */
7586FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
7587/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
7588FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
7589/* Opcode 0xf2 0x0f 0xfd - invalid */
7590
7591/** Opcode 0x0f 0xfe - paddd Pq, Qq */
7592FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
7593/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
7594FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
7595/* Opcode 0xf2 0x0f 0xfe - invalid */
7596
7597
7598/** Opcode **** 0x0f 0xff - UD0 */
7599FNIEMOP_DEF(iemOp_ud0)
7600{
7601 IEMOP_MNEMONIC(ud0, "ud0");
7602 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7603 {
7604 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7605#ifndef TST_IEM_CHECK_MC
7606 RTGCPTR GCPtrEff;
7607 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
7608 if (rcStrict != VINF_SUCCESS)
7609 return rcStrict;
7610#endif
7611 IEMOP_HLP_DONE_DECODING();
7612 }
7613 return IEMOP_RAISE_INVALID_OPCODE();
7614}
7615
7616
7617
7618/**
7619 * Two byte opcode map, first byte 0x0f.
7620 *
7621 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
7622 * check if it needs updating as well when making changes.
7623 */
7624IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
7625{
7626 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7627 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
7628 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
7629 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
7630 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
7631 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
7632 /* 0x05 */ IEMOP_X4(iemOp_syscall),
7633 /* 0x06 */ IEMOP_X4(iemOp_clts),
7634 /* 0x07 */ IEMOP_X4(iemOp_sysret),
7635 /* 0x08 */ IEMOP_X4(iemOp_invd),
7636 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
7637 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
7638 /* 0x0b */ IEMOP_X4(iemOp_ud2),
7639 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
7640 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
7641 /* 0x0e */ IEMOP_X4(iemOp_femms),
7642 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
7643
7644 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7645 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7646 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7647 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7648 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7649 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7650 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7651 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7652 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
7653 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
7654 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
7655 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
7656 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
7657 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
7658 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
7659 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
7660
7661 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
7662 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
7663 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
7664 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
7665 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
7666 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7667 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
7668 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7669 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7670 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7671 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7672 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7673 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7674 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7675 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7676 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7677
7678 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
7679 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
7680 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
7681 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
7682 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
7683 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
7684 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
7685 /* 0x37 */ IEMOP_X4(iemOp_getsec),
7686 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
7687 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7688 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
7689 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7690 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7691 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7692 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7693 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7694
7695 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
7696 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
7697 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
7698 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
7699 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
7700 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
7701 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
7702 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
7703 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
7704 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
7705 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
7706 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
7707 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
7708 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
7709 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
7710 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
7711
7712 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7713 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
7714 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7715 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7716 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7717 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7718 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7719 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7720 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
7721 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
7722 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
7723 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
7724 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
7725 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
7726 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
7727 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
7728
7729 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7730 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7731 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7732 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7733 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7734 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7735 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7736 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7737 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7738 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7739 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7740 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7741 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7742 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7743 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7744 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
7745
7746 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
7747 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
7748 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
7749 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
7750 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7751 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7752 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7753 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7754
7755 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7756 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7757 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7758 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7759 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
7760 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
7761 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
7762 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
7763
7764 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
7765 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
7766 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
7767 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
7768 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
7769 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
7770 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
7771 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
7772 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
7773 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
7774 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
7775 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
7776 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
7777 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
7778 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
7779 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
7780
7781 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
7782 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
7783 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
7784 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
7785 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
7786 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
7787 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
7788 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
7789 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
7790 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
7791 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
7792 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
7793 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
7794 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
7795 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
7796 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
7797
7798 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
7799 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
7800 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
7801 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
7802 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
7803 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
7804 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
7805 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7806 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
7807 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
7808 /* 0xaa */ IEMOP_X4(iemOp_rsm),
7809 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
7810 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
7811 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
7812 /* 0xae */ IEMOP_X4(iemOp_Grp15),
7813 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
7814
7815 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
7816 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
7817 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
7818 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
7819 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
7820 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
7821 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
7822 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
7823 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
7824 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
7825 /* 0xba */ IEMOP_X4(iemOp_Grp8),
7826 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
7827 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
7828 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
7829 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
7830 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
7831
7832 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
7833 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
7834 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
7835 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7836 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7837 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7838 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
7839 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
7840 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
7841 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
7842 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
7843 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
7844 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
7845 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
7846 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
7847 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
7848
7849 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
7850 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7851 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7852 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7853 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7854 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7855 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
7856 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7857 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7858 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7859 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7860 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7861 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7862 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7863 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7864 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7865
7866 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7867 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7868 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7869 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7870 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7871 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7872 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
7873 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7874 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7875 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7876 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7877 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7878 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7879 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7880 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7881 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7882
7883 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
7884 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7885 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7886 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7887 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7888 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7889 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7890 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7891 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7892 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7893 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7894 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7895 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7896 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7897 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7898 /* 0xff */ IEMOP_X4(iemOp_ud0),
7899};
7900AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
7901
7902
7903/**
7904 * VEX opcode map \#1.
7905 *
7906 * @remarks This is (currently) a subset of g_apfnTwoByteMap, so please check if
7907 * it it needs updating too when making changes.
7908 */
7909IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
7910{
7911 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7912 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
7913 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
7914 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
7915 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
7916 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
7917 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
7918 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
7919 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
7920 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
7921 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
7922 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
7923 /* 0x0b */ IEMOP_X4(iemOp_InvalidNeedRM),
7924 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
7925 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
7926 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
7927 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
7928
7929 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7930 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7931 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7932 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7933 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7934 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7935 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7936 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7937 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
7938 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
7939 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
7940 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
7941 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
7942 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
7943 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
7944 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
7945
7946 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
7947 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
7948 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
7949 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
7950 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
7951 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
7952 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
7953 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
7954 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7955 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7956 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7957 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7958 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7959 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7960 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7961 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7962
7963 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
7964 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
7965 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
7966 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
7967 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
7968 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
7969 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
7970 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
7971 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7972 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7973 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7974 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7975 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7976 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7977 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7978 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7979
7980 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
7981 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
7982 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
7983 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
7984 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
7985 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
7986 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
7987 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
7988 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
7989 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
7990 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
7991 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
7992 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
7993 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
7994 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
7995 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
7996
7997 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7998 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
7999 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8000 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8001 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8002 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8003 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8004 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8005 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8006 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8007 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8008 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8009 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8010 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8011 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8012 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8013
8014 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8015 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8016 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8017 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8018 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8019 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8020 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8021 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8022 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8023 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8024 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8025 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8026 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8027 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8028 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8029 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8030
8031 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8032 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_Grp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8033 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_Grp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8034 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_Grp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8035 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8036 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8037 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8038 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8039 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
8040 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
8041 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
8042 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
8043 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8044 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8045 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
8046 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
8047
8048 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
8049 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
8050 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
8051 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
8052 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
8053 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
8054 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
8055 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
8056 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
8057 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
8058 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
8059 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
8060 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
8061 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
8062 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
8063 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
8064 IEMOP_X4(iemOp_InvalidNeedRM),
8065 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
8066 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
8067 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
8068 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
8069 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
8070 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
8071 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
8072 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
8073 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
8074 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
8075 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
8076 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
8077 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
8078 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
8079 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
8080 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
8081
8082 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8083 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8084 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8085 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8086 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8087 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8088 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8089 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8090 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8091 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8092 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
8093 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
8094 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
8095 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
8096 /* 0xae */ IEMOP_X4(iemOp_Grp15), /** @todo groups and vex */
8097 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
8098
8099 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8100 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8101 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8102 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8103 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8104 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8105 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8106 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8107 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8108 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8109 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
8110 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
8111 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
8112 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
8113 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
8114 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
8115
8116 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8117 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8118 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8119 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8120 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8121 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8122 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8123 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8124 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8125 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8126 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
8127 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
8128 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
8129 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
8130 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
8131 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
8132
8133 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8134 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8135 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8136 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8137 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8138 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8139 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8140 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8141 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8142 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8143 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8144 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8145 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8146 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8147 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8148 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8149
8150 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8151 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8152 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8153 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8154 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8155 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8156 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8157 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8158 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8159 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8160 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8161 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8162 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8163 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8164 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8165 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8166
8167 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8168 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8169 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8170 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8171 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8172 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8173 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8174 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8175 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8176 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8177 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8178 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8179 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8180 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8181 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8182 /* 0xff */ IEMOP_X4(iemOp_ud0),
8183};
8184AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8185/** @} */
8186
8187
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette