VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 65770

Last change on this file since 65770 was 65770, checked in by vboxsync, 8 years ago

IEM: 0x0f 0x2c split up.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 308.2 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 65770 2017-02-13 15:09:30Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/** @name ..... opcodes.
26 *
27 * @{
28 */
29
30/** @} */
31
32
33/** @name Two byte opcodes (first byte 0x0f).
34 *
35 * @{
36 */
37
38/** Opcode 0x0f 0x00 /0. */
39FNIEMOPRM_DEF(iemOp_Grp6_sldt)
40{
41 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
42 IEMOP_HLP_MIN_286();
43 IEMOP_HLP_NO_REAL_OR_V86_MODE();
44
45 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
46 {
47 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
48 switch (pVCpu->iem.s.enmEffOpSize)
49 {
50 case IEMMODE_16BIT:
51 IEM_MC_BEGIN(0, 1);
52 IEM_MC_LOCAL(uint16_t, u16Ldtr);
53 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
54 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
55 IEM_MC_ADVANCE_RIP();
56 IEM_MC_END();
57 break;
58
59 case IEMMODE_32BIT:
60 IEM_MC_BEGIN(0, 1);
61 IEM_MC_LOCAL(uint32_t, u32Ldtr);
62 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
63 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
64 IEM_MC_ADVANCE_RIP();
65 IEM_MC_END();
66 break;
67
68 case IEMMODE_64BIT:
69 IEM_MC_BEGIN(0, 1);
70 IEM_MC_LOCAL(uint64_t, u64Ldtr);
71 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
72 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
73 IEM_MC_ADVANCE_RIP();
74 IEM_MC_END();
75 break;
76
77 IEM_NOT_REACHED_DEFAULT_CASE_RET();
78 }
79 }
80 else
81 {
82 IEM_MC_BEGIN(0, 2);
83 IEM_MC_LOCAL(uint16_t, u16Ldtr);
84 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
85 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
86 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
87 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
88 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
89 IEM_MC_ADVANCE_RIP();
90 IEM_MC_END();
91 }
92 return VINF_SUCCESS;
93}
94
95
96/** Opcode 0x0f 0x00 /1. */
97FNIEMOPRM_DEF(iemOp_Grp6_str)
98{
99 IEMOP_MNEMONIC(str, "str Rv/Mw");
100 IEMOP_HLP_MIN_286();
101 IEMOP_HLP_NO_REAL_OR_V86_MODE();
102
103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
104 {
105 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
106 switch (pVCpu->iem.s.enmEffOpSize)
107 {
108 case IEMMODE_16BIT:
109 IEM_MC_BEGIN(0, 1);
110 IEM_MC_LOCAL(uint16_t, u16Tr);
111 IEM_MC_FETCH_TR_U16(u16Tr);
112 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 break;
116
117 case IEMMODE_32BIT:
118 IEM_MC_BEGIN(0, 1);
119 IEM_MC_LOCAL(uint32_t, u32Tr);
120 IEM_MC_FETCH_TR_U32(u32Tr);
121 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
122 IEM_MC_ADVANCE_RIP();
123 IEM_MC_END();
124 break;
125
126 case IEMMODE_64BIT:
127 IEM_MC_BEGIN(0, 1);
128 IEM_MC_LOCAL(uint64_t, u64Tr);
129 IEM_MC_FETCH_TR_U64(u64Tr);
130 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
131 IEM_MC_ADVANCE_RIP();
132 IEM_MC_END();
133 break;
134
135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
136 }
137 }
138 else
139 {
140 IEM_MC_BEGIN(0, 2);
141 IEM_MC_LOCAL(uint16_t, u16Tr);
142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
144 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
145 IEM_MC_FETCH_TR_U16(u16Tr);
146 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
147 IEM_MC_ADVANCE_RIP();
148 IEM_MC_END();
149 }
150 return VINF_SUCCESS;
151}
152
153
154/** Opcode 0x0f 0x00 /2. */
155FNIEMOPRM_DEF(iemOp_Grp6_lldt)
156{
157 IEMOP_MNEMONIC(lldt, "lldt Ew");
158 IEMOP_HLP_MIN_286();
159 IEMOP_HLP_NO_REAL_OR_V86_MODE();
160
161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
162 {
163 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
164 IEM_MC_BEGIN(1, 0);
165 IEM_MC_ARG(uint16_t, u16Sel, 0);
166 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
167 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
168 IEM_MC_END();
169 }
170 else
171 {
172 IEM_MC_BEGIN(1, 1);
173 IEM_MC_ARG(uint16_t, u16Sel, 0);
174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
176 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
177 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
178 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
179 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
180 IEM_MC_END();
181 }
182 return VINF_SUCCESS;
183}
184
185
186/** Opcode 0x0f 0x00 /3. */
187FNIEMOPRM_DEF(iemOp_Grp6_ltr)
188{
189 IEMOP_MNEMONIC(ltr, "ltr Ew");
190 IEMOP_HLP_MIN_286();
191 IEMOP_HLP_NO_REAL_OR_V86_MODE();
192
193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
194 {
195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
196 IEM_MC_BEGIN(1, 0);
197 IEM_MC_ARG(uint16_t, u16Sel, 0);
198 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
199 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
200 IEM_MC_END();
201 }
202 else
203 {
204 IEM_MC_BEGIN(1, 1);
205 IEM_MC_ARG(uint16_t, u16Sel, 0);
206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
209 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
210 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
211 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
212 IEM_MC_END();
213 }
214 return VINF_SUCCESS;
215}
216
217
218/** Opcode 0x0f 0x00 /3. */
219FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
220{
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
223
224 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
225 {
226 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
227 IEM_MC_BEGIN(2, 0);
228 IEM_MC_ARG(uint16_t, u16Sel, 0);
229 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
230 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
231 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
232 IEM_MC_END();
233 }
234 else
235 {
236 IEM_MC_BEGIN(2, 1);
237 IEM_MC_ARG(uint16_t, u16Sel, 0);
238 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
241 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
242 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
243 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
244 IEM_MC_END();
245 }
246 return VINF_SUCCESS;
247}
248
249
250/** Opcode 0x0f 0x00 /4. */
251FNIEMOPRM_DEF(iemOp_Grp6_verr)
252{
253 IEMOP_MNEMONIC(verr, "verr Ew");
254 IEMOP_HLP_MIN_286();
255 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
256}
257
258
259/** Opcode 0x0f 0x00 /5. */
260FNIEMOPRM_DEF(iemOp_Grp6_verw)
261{
262 IEMOP_MNEMONIC(verw, "verw Ew");
263 IEMOP_HLP_MIN_286();
264 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
265}
266
267
268/**
269 * Group 6 jump table.
270 */
271IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
272{
273 iemOp_Grp6_sldt,
274 iemOp_Grp6_str,
275 iemOp_Grp6_lldt,
276 iemOp_Grp6_ltr,
277 iemOp_Grp6_verr,
278 iemOp_Grp6_verw,
279 iemOp_InvalidWithRM,
280 iemOp_InvalidWithRM
281};
282
283/** Opcode 0x0f 0x00. */
284FNIEMOP_DEF(iemOp_Grp6)
285{
286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
287 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
288}
289
290
291/** Opcode 0x0f 0x01 /0. */
292FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
293{
294 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
295 IEMOP_HLP_MIN_286();
296 IEMOP_HLP_64BIT_OP_SIZE();
297 IEM_MC_BEGIN(2, 1);
298 IEM_MC_ARG(uint8_t, iEffSeg, 0);
299 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
302 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
303 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
304 IEM_MC_END();
305 return VINF_SUCCESS;
306}
307
308
309/** Opcode 0x0f 0x01 /0. */
310FNIEMOP_DEF(iemOp_Grp7_vmcall)
311{
312 IEMOP_BITCH_ABOUT_STUB();
313 return IEMOP_RAISE_INVALID_OPCODE();
314}
315
316
317/** Opcode 0x0f 0x01 /0. */
318FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
319{
320 IEMOP_BITCH_ABOUT_STUB();
321 return IEMOP_RAISE_INVALID_OPCODE();
322}
323
324
325/** Opcode 0x0f 0x01 /0. */
326FNIEMOP_DEF(iemOp_Grp7_vmresume)
327{
328 IEMOP_BITCH_ABOUT_STUB();
329 return IEMOP_RAISE_INVALID_OPCODE();
330}
331
332
333/** Opcode 0x0f 0x01 /0. */
334FNIEMOP_DEF(iemOp_Grp7_vmxoff)
335{
336 IEMOP_BITCH_ABOUT_STUB();
337 return IEMOP_RAISE_INVALID_OPCODE();
338}
339
340
341/** Opcode 0x0f 0x01 /1. */
342FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
343{
344 IEMOP_MNEMONIC(sidt, "sidt Ms");
345 IEMOP_HLP_MIN_286();
346 IEMOP_HLP_64BIT_OP_SIZE();
347 IEM_MC_BEGIN(2, 1);
348 IEM_MC_ARG(uint8_t, iEffSeg, 0);
349 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
352 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
353 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
354 IEM_MC_END();
355 return VINF_SUCCESS;
356}
357
358
359/** Opcode 0x0f 0x01 /1. */
360FNIEMOP_DEF(iemOp_Grp7_monitor)
361{
362 IEMOP_MNEMONIC(monitor, "monitor");
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
364 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
365}
366
367
368/** Opcode 0x0f 0x01 /1. */
369FNIEMOP_DEF(iemOp_Grp7_mwait)
370{
371 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
374}
375
376
377/** Opcode 0x0f 0x01 /2. */
378FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
379{
380 IEMOP_MNEMONIC(lgdt, "lgdt");
381 IEMOP_HLP_64BIT_OP_SIZE();
382 IEM_MC_BEGIN(3, 1);
383 IEM_MC_ARG(uint8_t, iEffSeg, 0);
384 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
385 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
389 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
390 IEM_MC_END();
391 return VINF_SUCCESS;
392}
393
394
395/** Opcode 0x0f 0x01 0xd0. */
396FNIEMOP_DEF(iemOp_Grp7_xgetbv)
397{
398 IEMOP_MNEMONIC(xgetbv, "xgetbv");
399 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
400 {
401 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
402 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
403 }
404 return IEMOP_RAISE_INVALID_OPCODE();
405}
406
407
408/** Opcode 0x0f 0x01 0xd1. */
409FNIEMOP_DEF(iemOp_Grp7_xsetbv)
410{
411 IEMOP_MNEMONIC(xsetbv, "xsetbv");
412 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
413 {
414 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
416 }
417 return IEMOP_RAISE_INVALID_OPCODE();
418}
419
420
421/** Opcode 0x0f 0x01 /3. */
422FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
423{
424 IEMOP_MNEMONIC(lidt, "lidt");
425 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
426 ? IEMMODE_64BIT
427 : pVCpu->iem.s.enmEffOpSize;
428 IEM_MC_BEGIN(3, 1);
429 IEM_MC_ARG(uint8_t, iEffSeg, 0);
430 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
431 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
434 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
435 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
436 IEM_MC_END();
437 return VINF_SUCCESS;
438}
439
440
441/** Opcode 0x0f 0x01 0xd8. */
442FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
443
444/** Opcode 0x0f 0x01 0xd9. */
445FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
446
447/** Opcode 0x0f 0x01 0xda. */
448FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
449
450/** Opcode 0x0f 0x01 0xdb. */
451FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
452
453/** Opcode 0x0f 0x01 0xdc. */
454FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
455
456/** Opcode 0x0f 0x01 0xdd. */
457FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
458
459/** Opcode 0x0f 0x01 0xde. */
460FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
461
462/** Opcode 0x0f 0x01 0xdf. */
463FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
464
465/** Opcode 0x0f 0x01 /4. */
466FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
467{
468 IEMOP_MNEMONIC(smsw, "smsw");
469 IEMOP_HLP_MIN_286();
470 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
471 {
472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
473 switch (pVCpu->iem.s.enmEffOpSize)
474 {
475 case IEMMODE_16BIT:
476 IEM_MC_BEGIN(0, 1);
477 IEM_MC_LOCAL(uint16_t, u16Tmp);
478 IEM_MC_FETCH_CR0_U16(u16Tmp);
479 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
480 { /* likely */ }
481 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
482 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
483 else
484 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
485 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
486 IEM_MC_ADVANCE_RIP();
487 IEM_MC_END();
488 return VINF_SUCCESS;
489
490 case IEMMODE_32BIT:
491 IEM_MC_BEGIN(0, 1);
492 IEM_MC_LOCAL(uint32_t, u32Tmp);
493 IEM_MC_FETCH_CR0_U32(u32Tmp);
494 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
495 IEM_MC_ADVANCE_RIP();
496 IEM_MC_END();
497 return VINF_SUCCESS;
498
499 case IEMMODE_64BIT:
500 IEM_MC_BEGIN(0, 1);
501 IEM_MC_LOCAL(uint64_t, u64Tmp);
502 IEM_MC_FETCH_CR0_U64(u64Tmp);
503 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
504 IEM_MC_ADVANCE_RIP();
505 IEM_MC_END();
506 return VINF_SUCCESS;
507
508 IEM_NOT_REACHED_DEFAULT_CASE_RET();
509 }
510 }
511 else
512 {
513 /* Ignore operand size here, memory refs are always 16-bit. */
514 IEM_MC_BEGIN(0, 2);
515 IEM_MC_LOCAL(uint16_t, u16Tmp);
516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
519 IEM_MC_FETCH_CR0_U16(u16Tmp);
520 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
521 { /* likely */ }
522 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
523 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
524 else
525 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
526 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
527 IEM_MC_ADVANCE_RIP();
528 IEM_MC_END();
529 return VINF_SUCCESS;
530 }
531}
532
533
534/** Opcode 0x0f 0x01 /6. */
535FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
536{
537 /* The operand size is effectively ignored, all is 16-bit and only the
538 lower 3-bits are used. */
539 IEMOP_MNEMONIC(lmsw, "lmsw");
540 IEMOP_HLP_MIN_286();
541 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
542 {
543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
544 IEM_MC_BEGIN(1, 0);
545 IEM_MC_ARG(uint16_t, u16Tmp, 0);
546 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
547 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
548 IEM_MC_END();
549 }
550 else
551 {
552 IEM_MC_BEGIN(1, 1);
553 IEM_MC_ARG(uint16_t, u16Tmp, 0);
554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
557 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
558 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
559 IEM_MC_END();
560 }
561 return VINF_SUCCESS;
562}
563
564
565/** Opcode 0x0f 0x01 /7. */
566FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
567{
568 IEMOP_MNEMONIC(invlpg, "invlpg");
569 IEMOP_HLP_MIN_486();
570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
571 IEM_MC_BEGIN(1, 1);
572 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
574 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
575 IEM_MC_END();
576 return VINF_SUCCESS;
577}
578
579
580/** Opcode 0x0f 0x01 /7. */
581FNIEMOP_DEF(iemOp_Grp7_swapgs)
582{
583 IEMOP_MNEMONIC(swapgs, "swapgs");
584 IEMOP_HLP_ONLY_64BIT();
585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
586 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
587}
588
589
590/** Opcode 0x0f 0x01 /7. */
591FNIEMOP_DEF(iemOp_Grp7_rdtscp)
592{
593 NOREF(pVCpu);
594 IEMOP_BITCH_ABOUT_STUB();
595 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
596}
597
598
599/** Opcode 0x0f 0x01. */
600FNIEMOP_DEF(iemOp_Grp7)
601{
602 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
603 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
604 {
605 case 0:
606 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
607 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
608 switch (bRm & X86_MODRM_RM_MASK)
609 {
610 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
611 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
612 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
613 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
614 }
615 return IEMOP_RAISE_INVALID_OPCODE();
616
617 case 1:
618 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
619 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
620 switch (bRm & X86_MODRM_RM_MASK)
621 {
622 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
623 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
624 }
625 return IEMOP_RAISE_INVALID_OPCODE();
626
627 case 2:
628 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
629 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
630 switch (bRm & X86_MODRM_RM_MASK)
631 {
632 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
633 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
634 }
635 return IEMOP_RAISE_INVALID_OPCODE();
636
637 case 3:
638 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
639 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
640 switch (bRm & X86_MODRM_RM_MASK)
641 {
642 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
643 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
644 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
645 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
646 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
647 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
648 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
649 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
651 }
652
653 case 4:
654 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
655
656 case 5:
657 return IEMOP_RAISE_INVALID_OPCODE();
658
659 case 6:
660 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
661
662 case 7:
663 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
664 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
665 switch (bRm & X86_MODRM_RM_MASK)
666 {
667 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
668 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
669 }
670 return IEMOP_RAISE_INVALID_OPCODE();
671
672 IEM_NOT_REACHED_DEFAULT_CASE_RET();
673 }
674}
675
676/** Opcode 0x0f 0x00 /3. */
677FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
678{
679 IEMOP_HLP_NO_REAL_OR_V86_MODE();
680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
681
682 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
683 {
684 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
685 switch (pVCpu->iem.s.enmEffOpSize)
686 {
687 case IEMMODE_16BIT:
688 {
689 IEM_MC_BEGIN(3, 0);
690 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
691 IEM_MC_ARG(uint16_t, u16Sel, 1);
692 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
693
694 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
695 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
696 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
697
698 IEM_MC_END();
699 return VINF_SUCCESS;
700 }
701
702 case IEMMODE_32BIT:
703 case IEMMODE_64BIT:
704 {
705 IEM_MC_BEGIN(3, 0);
706 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
707 IEM_MC_ARG(uint16_t, u16Sel, 1);
708 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
709
710 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
711 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
712 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
713
714 IEM_MC_END();
715 return VINF_SUCCESS;
716 }
717
718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
719 }
720 }
721 else
722 {
723 switch (pVCpu->iem.s.enmEffOpSize)
724 {
725 case IEMMODE_16BIT:
726 {
727 IEM_MC_BEGIN(3, 1);
728 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
729 IEM_MC_ARG(uint16_t, u16Sel, 1);
730 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
732
733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
734 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
735
736 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
737 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
738 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
739
740 IEM_MC_END();
741 return VINF_SUCCESS;
742 }
743
744 case IEMMODE_32BIT:
745 case IEMMODE_64BIT:
746 {
747 IEM_MC_BEGIN(3, 1);
748 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
749 IEM_MC_ARG(uint16_t, u16Sel, 1);
750 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
752
753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755/** @todo testcase: make sure it's a 16-bit read. */
756
757 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
758 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
759 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
760
761 IEM_MC_END();
762 return VINF_SUCCESS;
763 }
764
765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
766 }
767 }
768}
769
770
771
772/** Opcode 0x0f 0x02. */
773FNIEMOP_DEF(iemOp_lar_Gv_Ew)
774{
775 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
776 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
777}
778
779
780/** Opcode 0x0f 0x03. */
781FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
782{
783 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
784 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
785}
786
787
788/** Opcode 0x0f 0x05. */
789FNIEMOP_DEF(iemOp_syscall)
790{
791 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
793 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
794}
795
796
797/** Opcode 0x0f 0x06. */
798FNIEMOP_DEF(iemOp_clts)
799{
800 IEMOP_MNEMONIC(clts, "clts");
801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
802 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
803}
804
805
806/** Opcode 0x0f 0x07. */
807FNIEMOP_DEF(iemOp_sysret)
808{
809 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
811 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
812}
813
814
815/** Opcode 0x0f 0x08. */
816FNIEMOP_STUB(iemOp_invd);
817// IEMOP_HLP_MIN_486();
818
819
820/** Opcode 0x0f 0x09. */
821FNIEMOP_DEF(iemOp_wbinvd)
822{
823 IEMOP_MNEMONIC(wbinvd, "wbinvd");
824 IEMOP_HLP_MIN_486();
825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
826 IEM_MC_BEGIN(0, 0);
827 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
828 IEM_MC_ADVANCE_RIP();
829 IEM_MC_END();
830 return VINF_SUCCESS; /* ignore for now */
831}
832
833
834/** Opcode 0x0f 0x0b. */
835FNIEMOP_DEF(iemOp_ud2)
836{
837 IEMOP_MNEMONIC(ud2, "ud2");
838 return IEMOP_RAISE_INVALID_OPCODE();
839}
840
841/** Opcode 0x0f 0x0d. */
842FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
843{
844 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
845 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
846 {
847 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
848 return IEMOP_RAISE_INVALID_OPCODE();
849 }
850
851 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
852 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
853 {
854 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
855 return IEMOP_RAISE_INVALID_OPCODE();
856 }
857
858 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
859 {
860 case 2: /* Aliased to /0 for the time being. */
861 case 4: /* Aliased to /0 for the time being. */
862 case 5: /* Aliased to /0 for the time being. */
863 case 6: /* Aliased to /0 for the time being. */
864 case 7: /* Aliased to /0 for the time being. */
865 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
866 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
867 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
869 }
870
871 IEM_MC_BEGIN(0, 1);
872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
875 /* Currently a NOP. */
876 NOREF(GCPtrEffSrc);
877 IEM_MC_ADVANCE_RIP();
878 IEM_MC_END();
879 return VINF_SUCCESS;
880}
881
882
883/** Opcode 0x0f 0x0e. */
884FNIEMOP_STUB(iemOp_femms);
885
886
887/** Opcode 0x0f 0x0f 0x0c. */
888FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
889
890/** Opcode 0x0f 0x0f 0x0d. */
891FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
892
893/** Opcode 0x0f 0x0f 0x1c. */
894FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
895
896/** Opcode 0x0f 0x0f 0x1d. */
897FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
898
899/** Opcode 0x0f 0x0f 0x8a. */
900FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
901
902/** Opcode 0x0f 0x0f 0x8e. */
903FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
904
905/** Opcode 0x0f 0x0f 0x90. */
906FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
907
908/** Opcode 0x0f 0x0f 0x94. */
909FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
910
911/** Opcode 0x0f 0x0f 0x96. */
912FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
913
914/** Opcode 0x0f 0x0f 0x97. */
915FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
916
917/** Opcode 0x0f 0x0f 0x9a. */
918FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
919
920/** Opcode 0x0f 0x0f 0x9e. */
921FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
922
923/** Opcode 0x0f 0x0f 0xa0. */
924FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
925
926/** Opcode 0x0f 0x0f 0xa4. */
927FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
928
929/** Opcode 0x0f 0x0f 0xa6. */
930FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
931
932/** Opcode 0x0f 0x0f 0xa7. */
933FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
934
935/** Opcode 0x0f 0x0f 0xaa. */
936FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
937
938/** Opcode 0x0f 0x0f 0xae. */
939FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
940
941/** Opcode 0x0f 0x0f 0xb0. */
942FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
943
944/** Opcode 0x0f 0x0f 0xb4. */
945FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
946
947/** Opcode 0x0f 0x0f 0xb6. */
948FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
949
950/** Opcode 0x0f 0x0f 0xb7. */
951FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
952
953/** Opcode 0x0f 0x0f 0xbb. */
954FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
955
956/** Opcode 0x0f 0x0f 0xbf. */
957FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
958
959
960/** Opcode 0x0f 0x0f. */
961FNIEMOP_DEF(iemOp_3Dnow)
962{
963 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
964 {
965 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
966 return IEMOP_RAISE_INVALID_OPCODE();
967 }
968
969 /* This is pretty sparse, use switch instead of table. */
970 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
971 switch (b)
972 {
973 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
974 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
975 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
976 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
977 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
978 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
979 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
980 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
981 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
982 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
983 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
984 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
985 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
986 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
987 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
988 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
989 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
990 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
991 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
992 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
993 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
994 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
995 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
996 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
997 default:
998 return IEMOP_RAISE_INVALID_OPCODE();
999 }
1000}
1001
1002
1003/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1004FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1005/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1006FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1007/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1008FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1009/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1010FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1011
1012
1013/** Opcode 0x0f 0x11 - vmovups Wps, Vps */
1014FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1015{
1016 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1017 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1018 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1019 {
1020 /*
1021 * Register, register.
1022 */
1023 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1024 IEM_MC_BEGIN(0, 0);
1025 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1026 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1027 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1028 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1029 IEM_MC_ADVANCE_RIP();
1030 IEM_MC_END();
1031 }
1032 else
1033 {
1034 /*
1035 * Memory, register.
1036 */
1037 IEM_MC_BEGIN(0, 2);
1038 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1040
1041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1042 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1043 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1044 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1045
1046 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1047 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1048
1049 IEM_MC_ADVANCE_RIP();
1050 IEM_MC_END();
1051 }
1052 return VINF_SUCCESS;
1053}
1054
1055
1056/** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */
1057FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
1058
1059/** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */
1060FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
1061
1062/** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */
1063FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1064{
1065 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1066 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1067 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1068 {
1069 /*
1070 * Register, register.
1071 */
1072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1073 IEM_MC_BEGIN(0, 1);
1074 IEM_MC_LOCAL(uint64_t, uSrc);
1075
1076 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1077 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1078 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1079 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1080
1081 IEM_MC_ADVANCE_RIP();
1082 IEM_MC_END();
1083 }
1084 else
1085 {
1086 /*
1087 * Memory, register.
1088 */
1089 IEM_MC_BEGIN(0, 2);
1090 IEM_MC_LOCAL(uint64_t, uSrc);
1091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1092
1093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1095 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1096 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1097
1098 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1099 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1100
1101 IEM_MC_ADVANCE_RIP();
1102 IEM_MC_END();
1103 }
1104 return VINF_SUCCESS;
1105}
1106
1107
1108/** Opcode 0x0f 0x12. */
1109FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1110
1111/** Opcode 0x66 0x0f 0x12. */
1112FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1113
1114/** Opcode 0xf3 0x0f 0x12. */
1115FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1116
1117/** Opcode 0xf2 0x0f 0x12. */
1118FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1119
1120/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1121FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1122
1123/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1124FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1125{
1126 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1127 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1128 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1129 {
1130#if 0
1131 /*
1132 * Register, register.
1133 */
1134 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1135 IEM_MC_BEGIN(0, 1);
1136 IEM_MC_LOCAL(uint64_t, uSrc);
1137 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1138 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1139 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1140 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1141 IEM_MC_ADVANCE_RIP();
1142 IEM_MC_END();
1143#else
1144 return IEMOP_RAISE_INVALID_OPCODE();
1145#endif
1146 }
1147 else
1148 {
1149 /*
1150 * Memory, register.
1151 */
1152 IEM_MC_BEGIN(0, 2);
1153 IEM_MC_LOCAL(uint64_t, uSrc);
1154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1155
1156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1157 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1158 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1159 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1160
1161 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1162 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1163
1164 IEM_MC_ADVANCE_RIP();
1165 IEM_MC_END();
1166 }
1167 return VINF_SUCCESS;
1168}
1169
1170/* Opcode 0xf3 0x0f 0x13 - invalid */
1171/* Opcode 0xf2 0x0f 0x13 - invalid */
1172
1173/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1174FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1175/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1176FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1177/* Opcode 0xf3 0x0f 0x14 - invalid */
1178/* Opcode 0xf2 0x0f 0x14 - invalid */
1179/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1180FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1181/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1182FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1183/* Opcode 0xf3 0x0f 0x15 - invalid */
1184/* Opcode 0xf2 0x0f 0x15 - invalid */
1185/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1186FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1187/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1188FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1189/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1190FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1191/* Opcode 0xf2 0x0f 0x16 - invalid */
1192/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1193FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1194/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1195FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1196/* Opcode 0xf3 0x0f 0x17 - invalid */
1197/* Opcode 0xf2 0x0f 0x17 - invalid */
1198
1199
1200/** Opcode 0x0f 0x18. */
1201FNIEMOP_DEF(iemOp_prefetch_Grp16)
1202{
1203 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1204 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1205 {
1206 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1207 {
1208 case 4: /* Aliased to /0 for the time being according to AMD. */
1209 case 5: /* Aliased to /0 for the time being according to AMD. */
1210 case 6: /* Aliased to /0 for the time being according to AMD. */
1211 case 7: /* Aliased to /0 for the time being according to AMD. */
1212 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1213 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1214 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1215 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1217 }
1218
1219 IEM_MC_BEGIN(0, 1);
1220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1223 /* Currently a NOP. */
1224 NOREF(GCPtrEffSrc);
1225 IEM_MC_ADVANCE_RIP();
1226 IEM_MC_END();
1227 return VINF_SUCCESS;
1228 }
1229
1230 return IEMOP_RAISE_INVALID_OPCODE();
1231}
1232
1233
1234/** Opcode 0x0f 0x19..0x1f. */
1235FNIEMOP_DEF(iemOp_nop_Ev)
1236{
1237 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1238 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1239 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1240 {
1241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1242 IEM_MC_BEGIN(0, 0);
1243 IEM_MC_ADVANCE_RIP();
1244 IEM_MC_END();
1245 }
1246 else
1247 {
1248 IEM_MC_BEGIN(0, 1);
1249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1252 /* Currently a NOP. */
1253 NOREF(GCPtrEffSrc);
1254 IEM_MC_ADVANCE_RIP();
1255 IEM_MC_END();
1256 }
1257 return VINF_SUCCESS;
1258}
1259
1260
1261/** Opcode 0x0f 0x20. */
1262FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1263{
1264 /* mod is ignored, as is operand size overrides. */
1265 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1266 IEMOP_HLP_MIN_386();
1267 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1268 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1269 else
1270 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1271
1272 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1273 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1274 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1275 {
1276 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1277 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1278 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1279 iCrReg |= 8;
1280 }
1281 switch (iCrReg)
1282 {
1283 case 0: case 2: case 3: case 4: case 8:
1284 break;
1285 default:
1286 return IEMOP_RAISE_INVALID_OPCODE();
1287 }
1288 IEMOP_HLP_DONE_DECODING();
1289
1290 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1291}
1292
1293
1294/** Opcode 0x0f 0x21. */
1295FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1296{
1297 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1298 IEMOP_HLP_MIN_386();
1299 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1301 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1302 return IEMOP_RAISE_INVALID_OPCODE();
1303 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1304 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1305 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1306}
1307
1308
1309/** Opcode 0x0f 0x22. */
1310FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1311{
1312 /* mod is ignored, as is operand size overrides. */
1313 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1314 IEMOP_HLP_MIN_386();
1315 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1316 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1317 else
1318 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1319
1320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1321 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1322 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1323 {
1324 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1325 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1326 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1327 iCrReg |= 8;
1328 }
1329 switch (iCrReg)
1330 {
1331 case 0: case 2: case 3: case 4: case 8:
1332 break;
1333 default:
1334 return IEMOP_RAISE_INVALID_OPCODE();
1335 }
1336 IEMOP_HLP_DONE_DECODING();
1337
1338 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1339}
1340
1341
1342/** Opcode 0x0f 0x23. */
1343FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1344{
1345 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1346 IEMOP_HLP_MIN_386();
1347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1349 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1350 return IEMOP_RAISE_INVALID_OPCODE();
1351 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1352 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1353 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1354}
1355
1356
1357/** Opcode 0x0f 0x24. */
1358FNIEMOP_DEF(iemOp_mov_Rd_Td)
1359{
1360 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1361 /** @todo works on 386 and 486. */
1362 /* The RM byte is not considered, see testcase. */
1363 return IEMOP_RAISE_INVALID_OPCODE();
1364}
1365
1366
1367/** Opcode 0x0f 0x26. */
1368FNIEMOP_DEF(iemOp_mov_Td_Rd)
1369{
1370 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1371 /** @todo works on 386 and 486. */
1372 /* The RM byte is not considered, see testcase. */
1373 return IEMOP_RAISE_INVALID_OPCODE();
1374}
1375
1376
1377/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
1378FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1379{
1380 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1382 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1383 {
1384 /*
1385 * Register, register.
1386 */
1387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1388 IEM_MC_BEGIN(0, 0);
1389 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1390 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1391 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1392 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1393 IEM_MC_ADVANCE_RIP();
1394 IEM_MC_END();
1395 }
1396 else
1397 {
1398 /*
1399 * Register, memory.
1400 */
1401 IEM_MC_BEGIN(0, 2);
1402 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1404
1405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1407 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1408 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1409
1410 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1411 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1412
1413 IEM_MC_ADVANCE_RIP();
1414 IEM_MC_END();
1415 }
1416 return VINF_SUCCESS;
1417}
1418
1419/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
1420FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1421{
1422 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1423 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1424 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1425 {
1426 /*
1427 * Register, register.
1428 */
1429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1430 IEM_MC_BEGIN(0, 0);
1431 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1432 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1433 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1434 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1435 IEM_MC_ADVANCE_RIP();
1436 IEM_MC_END();
1437 }
1438 else
1439 {
1440 /*
1441 * Register, memory.
1442 */
1443 IEM_MC_BEGIN(0, 2);
1444 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1446
1447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1449 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1450 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1451
1452 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1453 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1454
1455 IEM_MC_ADVANCE_RIP();
1456 IEM_MC_END();
1457 }
1458 return VINF_SUCCESS;
1459}
1460
1461/* Opcode 0xf3 0x0f 0x28 - invalid */
1462/* Opcode 0xf2 0x0f 0x28 - invalid */
1463
1464/** Opcode 0x0f 0x29 - vmovaps Wps, Vps */
1465FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1466{
1467 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1468 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1469 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1470 {
1471 /*
1472 * Register, register.
1473 */
1474 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1475 IEM_MC_BEGIN(0, 0);
1476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1477 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1478 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1479 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1480 IEM_MC_ADVANCE_RIP();
1481 IEM_MC_END();
1482 }
1483 else
1484 {
1485 /*
1486 * Memory, register.
1487 */
1488 IEM_MC_BEGIN(0, 2);
1489 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1491
1492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1493 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1494 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1495 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1496
1497 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1498 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1499
1500 IEM_MC_ADVANCE_RIP();
1501 IEM_MC_END();
1502 }
1503 return VINF_SUCCESS;
1504}
1505
1506/** Opcode 0x66 0x0f 0x29 - vmovapd Wpd,Vpd */
1507FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
1508{
1509 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1511 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1512 {
1513 /*
1514 * Register, register.
1515 */
1516 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1517 IEM_MC_BEGIN(0, 0);
1518 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1519 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1520 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1521 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1522 IEM_MC_ADVANCE_RIP();
1523 IEM_MC_END();
1524 }
1525 else
1526 {
1527 /*
1528 * Memory, register.
1529 */
1530 IEM_MC_BEGIN(0, 2);
1531 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1533
1534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1535 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1536 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1537 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1538
1539 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1540 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1541
1542 IEM_MC_ADVANCE_RIP();
1543 IEM_MC_END();
1544 }
1545 return VINF_SUCCESS;
1546}
1547
1548/* Opcode 0xf3 0x0f 0x29 - invalid */
1549/* Opcode 0xf2 0x0f 0x29 - invalid */
1550
1551
1552/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
1553FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
1554/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
1555FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
1556/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
1557FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
1558/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1559FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
1560
1561
1562/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
1563FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
1564{
1565 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1567 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1568 {
1569 /*
1570 * memory, register.
1571 */
1572 IEM_MC_BEGIN(0, 2);
1573 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1575
1576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1577 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1578 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1579 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1580
1581 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1582 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1583
1584 IEM_MC_ADVANCE_RIP();
1585 IEM_MC_END();
1586 }
1587 /* The register, register encoding is invalid. */
1588 else
1589 return IEMOP_RAISE_INVALID_OPCODE();
1590 return VINF_SUCCESS;
1591}
1592
1593/** Opcode 0x66 0x0f 0x2b - vmovntpd Mpd, Vpd */
1594FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
1595{
1596 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1598 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1599 {
1600 /*
1601 * memory, register.
1602 */
1603 IEM_MC_BEGIN(0, 2);
1604 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1606
1607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1608 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1609 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1610 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1611
1612 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1613 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1614
1615 IEM_MC_ADVANCE_RIP();
1616 IEM_MC_END();
1617 }
1618 /* The register, register encoding is invalid. */
1619 else
1620 return IEMOP_RAISE_INVALID_OPCODE();
1621 return VINF_SUCCESS;
1622}
1623/* Opcode 0xf3 0x0f 0x2b - invalid */
1624/* Opcode 0xf2 0x0f 0x2b - invalid */
1625
1626
1627/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
1628FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
1629/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
1630FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
1631/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
1632FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
1633/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
1634FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
1635
1636/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
1637FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
1638/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
1639FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
1640/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
1641FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
1642/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
1643FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
1644
1645/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
1646FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
1647/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
1648FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
1649/* Opcode 0xf3 0x0f 0x2e - invalid */
1650/* Opcode 0xf2 0x0f 0x2e - invalid */
1651
1652/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
1653FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
1654/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
1655FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
1656/* Opcode 0xf3 0x0f 0x2f - invalid */
1657/* Opcode 0xf2 0x0f 0x2f - invalid */
1658
1659/** Opcode 0x0f 0x30. */
1660FNIEMOP_DEF(iemOp_wrmsr)
1661{
1662 IEMOP_MNEMONIC(wrmsr, "wrmsr");
1663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1664 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1665}
1666
1667
1668/** Opcode 0x0f 0x31. */
1669FNIEMOP_DEF(iemOp_rdtsc)
1670{
1671 IEMOP_MNEMONIC(rdtsc, "rdtsc");
1672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1673 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1674}
1675
1676
1677/** Opcode 0x0f 0x33. */
1678FNIEMOP_DEF(iemOp_rdmsr)
1679{
1680 IEMOP_MNEMONIC(rdmsr, "rdmsr");
1681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1682 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1683}
1684
1685
1686/** Opcode 0x0f 0x34. */
1687FNIEMOP_STUB(iemOp_rdpmc);
1688/** Opcode 0x0f 0x34. */
1689FNIEMOP_STUB(iemOp_sysenter);
1690/** Opcode 0x0f 0x35. */
1691FNIEMOP_STUB(iemOp_sysexit);
1692/** Opcode 0x0f 0x37. */
1693FNIEMOP_STUB(iemOp_getsec);
1694/** Opcode 0x0f 0x38. */
1695FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1696/** Opcode 0x0f 0x3a. */
1697FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1698
1699
1700/**
1701 * Implements a conditional move.
1702 *
1703 * Wish there was an obvious way to do this where we could share and reduce
1704 * code bloat.
1705 *
1706 * @param a_Cnd The conditional "microcode" operation.
1707 */
1708#define CMOV_X(a_Cnd) \
1709 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1710 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1711 { \
1712 switch (pVCpu->iem.s.enmEffOpSize) \
1713 { \
1714 case IEMMODE_16BIT: \
1715 IEM_MC_BEGIN(0, 1); \
1716 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1717 a_Cnd { \
1718 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1719 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
1720 } IEM_MC_ENDIF(); \
1721 IEM_MC_ADVANCE_RIP(); \
1722 IEM_MC_END(); \
1723 return VINF_SUCCESS; \
1724 \
1725 case IEMMODE_32BIT: \
1726 IEM_MC_BEGIN(0, 1); \
1727 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1728 a_Cnd { \
1729 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1730 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
1731 } IEM_MC_ELSE() { \
1732 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
1733 } IEM_MC_ENDIF(); \
1734 IEM_MC_ADVANCE_RIP(); \
1735 IEM_MC_END(); \
1736 return VINF_SUCCESS; \
1737 \
1738 case IEMMODE_64BIT: \
1739 IEM_MC_BEGIN(0, 1); \
1740 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1741 a_Cnd { \
1742 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1743 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
1744 } IEM_MC_ENDIF(); \
1745 IEM_MC_ADVANCE_RIP(); \
1746 IEM_MC_END(); \
1747 return VINF_SUCCESS; \
1748 \
1749 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1750 } \
1751 } \
1752 else \
1753 { \
1754 switch (pVCpu->iem.s.enmEffOpSize) \
1755 { \
1756 case IEMMODE_16BIT: \
1757 IEM_MC_BEGIN(0, 2); \
1758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1759 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1761 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1762 a_Cnd { \
1763 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
1764 } IEM_MC_ENDIF(); \
1765 IEM_MC_ADVANCE_RIP(); \
1766 IEM_MC_END(); \
1767 return VINF_SUCCESS; \
1768 \
1769 case IEMMODE_32BIT: \
1770 IEM_MC_BEGIN(0, 2); \
1771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1772 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1774 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1775 a_Cnd { \
1776 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
1777 } IEM_MC_ELSE() { \
1778 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
1779 } IEM_MC_ENDIF(); \
1780 IEM_MC_ADVANCE_RIP(); \
1781 IEM_MC_END(); \
1782 return VINF_SUCCESS; \
1783 \
1784 case IEMMODE_64BIT: \
1785 IEM_MC_BEGIN(0, 2); \
1786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1787 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1789 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1790 a_Cnd { \
1791 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
1792 } IEM_MC_ENDIF(); \
1793 IEM_MC_ADVANCE_RIP(); \
1794 IEM_MC_END(); \
1795 return VINF_SUCCESS; \
1796 \
1797 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1798 } \
1799 } do {} while (0)
1800
1801
1802
1803/** Opcode 0x0f 0x40. */
1804FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1805{
1806 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
1807 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1808}
1809
1810
1811/** Opcode 0x0f 0x41. */
1812FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1813{
1814 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
1815 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1816}
1817
1818
1819/** Opcode 0x0f 0x42. */
1820FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1821{
1822 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
1823 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1824}
1825
1826
1827/** Opcode 0x0f 0x43. */
1828FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1829{
1830 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
1831 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1832}
1833
1834
1835/** Opcode 0x0f 0x44. */
1836FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1837{
1838 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
1839 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1840}
1841
1842
1843/** Opcode 0x0f 0x45. */
1844FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1845{
1846 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
1847 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1848}
1849
1850
1851/** Opcode 0x0f 0x46. */
1852FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1853{
1854 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
1855 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1856}
1857
1858
1859/** Opcode 0x0f 0x47. */
1860FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1861{
1862 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
1863 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1864}
1865
1866
1867/** Opcode 0x0f 0x48. */
1868FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1869{
1870 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
1871 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1872}
1873
1874
1875/** Opcode 0x0f 0x49. */
1876FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1877{
1878 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
1879 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1880}
1881
1882
1883/** Opcode 0x0f 0x4a. */
1884FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1885{
1886 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
1887 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1888}
1889
1890
1891/** Opcode 0x0f 0x4b. */
1892FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1893{
1894 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
1895 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1896}
1897
1898
1899/** Opcode 0x0f 0x4c. */
1900FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1901{
1902 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
1903 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1904}
1905
1906
1907/** Opcode 0x0f 0x4d. */
1908FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1909{
1910 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
1911 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1912}
1913
1914
1915/** Opcode 0x0f 0x4e. */
1916FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1917{
1918 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
1919 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1920}
1921
1922
1923/** Opcode 0x0f 0x4f. */
1924FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1925{
1926 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
1927 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1928}
1929
1930#undef CMOV_X
1931
1932/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
1933FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
1934/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
1935FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
1936/* Opcode 0xf3 0x0f 0x50 - invalid */
1937/* Opcode 0xf2 0x0f 0x50 - invalid */
1938
1939/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
1940FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
1941/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
1942FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
1943/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
1944FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
1945/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
1946FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
1947
1948/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
1949FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
1950/* Opcode 0x66 0x0f 0x52 - invalid */
1951/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
1952FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
1953/* Opcode 0xf2 0x0f 0x52 - invalid */
1954
1955/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
1956FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
1957/* Opcode 0x66 0x0f 0x53 - invalid */
1958/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
1959FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
1960/* Opcode 0xf2 0x0f 0x53 - invalid */
1961
1962/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
1963FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
1964/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
1965FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
1966/* Opcode 0xf3 0x0f 0x54 - invalid */
1967/* Opcode 0xf2 0x0f 0x54 - invalid */
1968
1969/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
1970FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
1971/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
1972FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
1973/* Opcode 0xf3 0x0f 0x55 - invalid */
1974/* Opcode 0xf2 0x0f 0x55 - invalid */
1975
1976/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
1977FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
1978/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
1979FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
1980/* Opcode 0xf3 0x0f 0x56 - invalid */
1981/* Opcode 0xf2 0x0f 0x56 - invalid */
1982
1983/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
1984FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
1985/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
1986FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
1987/* Opcode 0xf3 0x0f 0x57 - invalid */
1988/* Opcode 0xf2 0x0f 0x57 - invalid */
1989
1990/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
1991FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
1992/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
1993FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
1994/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
1995FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
1996/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
1997FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
1998
1999/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2000FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2001/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2002FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2003/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2004FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2005/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2006FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2007
2008/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2009FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2010/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2011FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2012/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2013FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2014/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2015FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2016
2017/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2018FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2019/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2020FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2021/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2022FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2023/* Opcode 0xf2 0x0f 0x5b - invalid */
2024
2025/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2026FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2027/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2028FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2029/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2030FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2031/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2032FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2033
2034/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2035FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2036/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2037FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2038/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2039FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2040/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2041FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2042
2043/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2044FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2045/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2046FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2047/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2048FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2049/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2050FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2051
2052/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2053FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2054/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2055FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2056/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2057FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2058/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2059FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2060
2061/**
2062 * Common worker for MMX instructions on the forms:
2063 * pxxxx mm1, mm2/mem32
2064 *
2065 * The 2nd operand is the first half of a register, which in the memory case
2066 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2067 * memory accessed for MMX.
2068 *
2069 * Exceptions type 4.
2070 */
2071FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2072{
2073 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2074 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2075 {
2076 /*
2077 * Register, register.
2078 */
2079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2080 IEM_MC_BEGIN(2, 0);
2081 IEM_MC_ARG(uint128_t *, pDst, 0);
2082 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2083 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2084 IEM_MC_PREPARE_SSE_USAGE();
2085 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2086 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2087 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2088 IEM_MC_ADVANCE_RIP();
2089 IEM_MC_END();
2090 }
2091 else
2092 {
2093 /*
2094 * Register, memory.
2095 */
2096 IEM_MC_BEGIN(2, 2);
2097 IEM_MC_ARG(uint128_t *, pDst, 0);
2098 IEM_MC_LOCAL(uint64_t, uSrc);
2099 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2101
2102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2104 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2105 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2106
2107 IEM_MC_PREPARE_SSE_USAGE();
2108 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2109 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2110
2111 IEM_MC_ADVANCE_RIP();
2112 IEM_MC_END();
2113 }
2114 return VINF_SUCCESS;
2115}
2116
2117
2118/**
2119 * Common worker for SSE2 instructions on the forms:
2120 * pxxxx xmm1, xmm2/mem128
2121 *
2122 * The 2nd operand is the first half of a register, which in the memory case
2123 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2124 * memory accessed for MMX.
2125 *
2126 * Exceptions type 4.
2127 */
2128FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2129{
2130 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2131 if (!pImpl->pfnU64)
2132 return IEMOP_RAISE_INVALID_OPCODE();
2133 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2134 {
2135 /*
2136 * Register, register.
2137 */
2138 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2139 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2141 IEM_MC_BEGIN(2, 0);
2142 IEM_MC_ARG(uint64_t *, pDst, 0);
2143 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2144 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2145 IEM_MC_PREPARE_FPU_USAGE();
2146 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2147 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2148 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2149 IEM_MC_ADVANCE_RIP();
2150 IEM_MC_END();
2151 }
2152 else
2153 {
2154 /*
2155 * Register, memory.
2156 */
2157 IEM_MC_BEGIN(2, 2);
2158 IEM_MC_ARG(uint64_t *, pDst, 0);
2159 IEM_MC_LOCAL(uint32_t, uSrc);
2160 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2161 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2162
2163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2165 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2166 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2167
2168 IEM_MC_PREPARE_FPU_USAGE();
2169 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2170 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2171
2172 IEM_MC_ADVANCE_RIP();
2173 IEM_MC_END();
2174 }
2175 return VINF_SUCCESS;
2176}
2177
2178
2179/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2180FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2181{
2182 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2183 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2184}
2185
2186/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2187FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2188{
2189 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2190 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2191}
2192
2193/* Opcode 0xf3 0x0f 0x60 - invalid */
2194
2195
2196/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2197FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2198{
2199 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2200 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2201}
2202
2203/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2204FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2205{
2206 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2207 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2208}
2209
2210/* Opcode 0xf3 0x0f 0x61 - invalid */
2211
2212
2213/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2214FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2215{
2216 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2217 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2218}
2219
2220/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2221FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2222{
2223 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2224 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2225}
2226
2227/* Opcode 0xf3 0x0f 0x62 - invalid */
2228
2229
2230
2231/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2232FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2233/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2234FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2235/* Opcode 0xf3 0x0f 0x63 - invalid */
2236
2237/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2238FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2239/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2240FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2241/* Opcode 0xf3 0x0f 0x64 - invalid */
2242
2243/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2244FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2245/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2246FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2247/* Opcode 0xf3 0x0f 0x65 - invalid */
2248
2249/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2250FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2251/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2252FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2253/* Opcode 0xf3 0x0f 0x66 - invalid */
2254
2255/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2256FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2257/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2258FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2259/* Opcode 0xf3 0x0f 0x67 - invalid */
2260
2261
2262/**
2263 * Common worker for MMX instructions on the form:
2264 * pxxxx mm1, mm2/mem64
2265 *
2266 * The 2nd operand is the second half of a register, which in the memory case
2267 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2268 * where it may read the full 128 bits or only the upper 64 bits.
2269 *
2270 * Exceptions type 4.
2271 */
2272FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2273{
2274 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2275 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2276 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2277 {
2278 /*
2279 * Register, register.
2280 */
2281 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2282 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2284 IEM_MC_BEGIN(2, 0);
2285 IEM_MC_ARG(uint64_t *, pDst, 0);
2286 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2287 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2288 IEM_MC_PREPARE_FPU_USAGE();
2289 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2290 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2291 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2292 IEM_MC_ADVANCE_RIP();
2293 IEM_MC_END();
2294 }
2295 else
2296 {
2297 /*
2298 * Register, memory.
2299 */
2300 IEM_MC_BEGIN(2, 2);
2301 IEM_MC_ARG(uint64_t *, pDst, 0);
2302 IEM_MC_LOCAL(uint64_t, uSrc);
2303 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2305
2306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2308 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2309 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2310
2311 IEM_MC_PREPARE_FPU_USAGE();
2312 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2313 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2314
2315 IEM_MC_ADVANCE_RIP();
2316 IEM_MC_END();
2317 }
2318 return VINF_SUCCESS;
2319}
2320
2321
2322/**
2323 * Common worker for SSE2 instructions on the form:
2324 * pxxxx xmm1, xmm2/mem128
2325 *
2326 * The 2nd operand is the second half of a register, which in the memory case
2327 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2328 * where it may read the full 128 bits or only the upper 64 bits.
2329 *
2330 * Exceptions type 4.
2331 */
2332FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2333{
2334 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2335 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2336 {
2337 /*
2338 * Register, register.
2339 */
2340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2341 IEM_MC_BEGIN(2, 0);
2342 IEM_MC_ARG(uint128_t *, pDst, 0);
2343 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2344 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2345 IEM_MC_PREPARE_SSE_USAGE();
2346 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2347 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2348 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2349 IEM_MC_ADVANCE_RIP();
2350 IEM_MC_END();
2351 }
2352 else
2353 {
2354 /*
2355 * Register, memory.
2356 */
2357 IEM_MC_BEGIN(2, 2);
2358 IEM_MC_ARG(uint128_t *, pDst, 0);
2359 IEM_MC_LOCAL(uint128_t, uSrc);
2360 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2362
2363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2365 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2366 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2367
2368 IEM_MC_PREPARE_SSE_USAGE();
2369 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2370 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2371
2372 IEM_MC_ADVANCE_RIP();
2373 IEM_MC_END();
2374 }
2375 return VINF_SUCCESS;
2376}
2377
2378
2379/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2380FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2381{
2382 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2383 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2384}
2385
2386/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2387FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2388{
2389 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2390 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2391}
2392/* Opcode 0xf3 0x0f 0x68 - invalid */
2393
2394
2395/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2396FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2397{
2398 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2399 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2400}
2401
2402/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2403FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2404{
2405 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2406 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2407
2408}
2409/* Opcode 0xf3 0x0f 0x69 - invalid */
2410
2411
2412/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2413FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2414{
2415 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2416 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2417}
2418
2419/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2420FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2421{
2422 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2423 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2424}
2425/* Opcode 0xf3 0x0f 0x6a - invalid */
2426
2427
2428/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2429FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2430/** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */
2431FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2432/* Opcode 0xf3 0x0f 0x6b - invalid */
2433
2434
2435/* Opcode 0x0f 0x6c - invalid */
2436
2437/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2438FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2439{
2440 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
2441 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2442}
2443
2444/* Opcode 0xf3 0x0f 0x6c - invalid */
2445/* Opcode 0xf2 0x0f 0x6c - invalid */
2446
2447
2448/* Opcode 0x0f 0x6d - invalid */
2449
2450/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
2451FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
2452{
2453 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2454 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2455}
2456
2457/* Opcode 0xf3 0x0f 0x6d - invalid */
2458
2459
2460/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
2461FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
2462{
2463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2464 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2465 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2466 else
2467 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2468 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2469 {
2470 /* MMX, greg */
2471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2472 IEM_MC_BEGIN(0, 1);
2473 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2474 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2475 IEM_MC_LOCAL(uint64_t, u64Tmp);
2476 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2477 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2478 else
2479 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2480 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2481 IEM_MC_ADVANCE_RIP();
2482 IEM_MC_END();
2483 }
2484 else
2485 {
2486 /* MMX, [mem] */
2487 IEM_MC_BEGIN(0, 2);
2488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2489 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2492 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2493 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2494 {
2495 IEM_MC_LOCAL(uint64_t, u64Tmp);
2496 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2497 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2498 }
2499 else
2500 {
2501 IEM_MC_LOCAL(uint32_t, u32Tmp);
2502 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2503 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2504 }
2505 IEM_MC_ADVANCE_RIP();
2506 IEM_MC_END();
2507 }
2508 return VINF_SUCCESS;
2509}
2510
2511/** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */
2512FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
2513{
2514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2515 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2516 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
2517 else
2518 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
2519 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2520 {
2521 /* XMM, greg*/
2522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2523 IEM_MC_BEGIN(0, 1);
2524 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2525 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2526 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2527 {
2528 IEM_MC_LOCAL(uint64_t, u64Tmp);
2529 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2530 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2531 }
2532 else
2533 {
2534 IEM_MC_LOCAL(uint32_t, u32Tmp);
2535 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2536 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2537 }
2538 IEM_MC_ADVANCE_RIP();
2539 IEM_MC_END();
2540 }
2541 else
2542 {
2543 /* XMM, [mem] */
2544 IEM_MC_BEGIN(0, 2);
2545 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2546 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2549 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2550 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2551 {
2552 IEM_MC_LOCAL(uint64_t, u64Tmp);
2553 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2554 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2555 }
2556 else
2557 {
2558 IEM_MC_LOCAL(uint32_t, u32Tmp);
2559 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2560 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2561 }
2562 IEM_MC_ADVANCE_RIP();
2563 IEM_MC_END();
2564 }
2565 return VINF_SUCCESS;
2566}
2567
2568/* Opcode 0xf3 0x0f 0x6e - invalid */
2569
2570
2571/** Opcode 0x0f 0x6f - movq Pq, Qq */
2572FNIEMOP_DEF(iemOp_movq_Pq_Qq)
2573{
2574 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2575 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2576 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2577 {
2578 /*
2579 * Register, register.
2580 */
2581 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2582 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2584 IEM_MC_BEGIN(0, 1);
2585 IEM_MC_LOCAL(uint64_t, u64Tmp);
2586 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2587 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2588 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2589 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2590 IEM_MC_ADVANCE_RIP();
2591 IEM_MC_END();
2592 }
2593 else
2594 {
2595 /*
2596 * Register, memory.
2597 */
2598 IEM_MC_BEGIN(0, 2);
2599 IEM_MC_LOCAL(uint64_t, u64Tmp);
2600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2601
2602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2604 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2605 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2606 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2607 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2608
2609 IEM_MC_ADVANCE_RIP();
2610 IEM_MC_END();
2611 }
2612 return VINF_SUCCESS;
2613}
2614
2615/** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */
2616FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
2617{
2618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2619 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
2620 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2621 {
2622 /*
2623 * Register, register.
2624 */
2625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2626 IEM_MC_BEGIN(0, 0);
2627 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2628 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2629 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2630 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2631 IEM_MC_ADVANCE_RIP();
2632 IEM_MC_END();
2633 }
2634 else
2635 {
2636 /*
2637 * Register, memory.
2638 */
2639 IEM_MC_BEGIN(0, 2);
2640 IEM_MC_LOCAL(uint128_t, u128Tmp);
2641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2642
2643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2645 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2646 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2647 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2648 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2649
2650 IEM_MC_ADVANCE_RIP();
2651 IEM_MC_END();
2652 }
2653 return VINF_SUCCESS;
2654}
2655
2656/** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */
2657FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
2658{
2659 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2660 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
2661 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2662 {
2663 /*
2664 * Register, register.
2665 */
2666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2667 IEM_MC_BEGIN(0, 0);
2668 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2669 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2670 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2671 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2672 IEM_MC_ADVANCE_RIP();
2673 IEM_MC_END();
2674 }
2675 else
2676 {
2677 /*
2678 * Register, memory.
2679 */
2680 IEM_MC_BEGIN(0, 2);
2681 IEM_MC_LOCAL(uint128_t, u128Tmp);
2682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2683
2684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2686 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2687 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2688 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2689 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2690
2691 IEM_MC_ADVANCE_RIP();
2692 IEM_MC_END();
2693 }
2694 return VINF_SUCCESS;
2695}
2696
2697
2698/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
2699FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
2700{
2701 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
2702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2703 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2704 {
2705 /*
2706 * Register, register.
2707 */
2708 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2710
2711 IEM_MC_BEGIN(3, 0);
2712 IEM_MC_ARG(uint64_t *, pDst, 0);
2713 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2714 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2715 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2716 IEM_MC_PREPARE_FPU_USAGE();
2717 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2718 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2719 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2720 IEM_MC_ADVANCE_RIP();
2721 IEM_MC_END();
2722 }
2723 else
2724 {
2725 /*
2726 * Register, memory.
2727 */
2728 IEM_MC_BEGIN(3, 2);
2729 IEM_MC_ARG(uint64_t *, pDst, 0);
2730 IEM_MC_LOCAL(uint64_t, uSrc);
2731 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2733
2734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2735 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2736 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2738 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2739
2740 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2741 IEM_MC_PREPARE_FPU_USAGE();
2742 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2743 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2744
2745 IEM_MC_ADVANCE_RIP();
2746 IEM_MC_END();
2747 }
2748 return VINF_SUCCESS;
2749}
2750
2751/** Opcode 0x66 0x0f 0x70 - vpshufd Vx, Wx, Ib */
2752FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
2753{
2754 IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
2755 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2756 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2757 {
2758 /*
2759 * Register, register.
2760 */
2761 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2763
2764 IEM_MC_BEGIN(3, 0);
2765 IEM_MC_ARG(uint128_t *, pDst, 0);
2766 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2767 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2768 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2769 IEM_MC_PREPARE_SSE_USAGE();
2770 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2771 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2772 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2773 IEM_MC_ADVANCE_RIP();
2774 IEM_MC_END();
2775 }
2776 else
2777 {
2778 /*
2779 * Register, memory.
2780 */
2781 IEM_MC_BEGIN(3, 2);
2782 IEM_MC_ARG(uint128_t *, pDst, 0);
2783 IEM_MC_LOCAL(uint128_t, uSrc);
2784 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2786
2787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2788 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2789 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2791 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2792
2793 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2794 IEM_MC_PREPARE_SSE_USAGE();
2795 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2796 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2797
2798 IEM_MC_ADVANCE_RIP();
2799 IEM_MC_END();
2800 }
2801 return VINF_SUCCESS;
2802}
2803
2804/** Opcode 0xf3 0x0f 0x70 - vpshufhw Vx, Wx, Ib */
2805FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
2806{
2807 IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
2808 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2809 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2810 {
2811 /*
2812 * Register, register.
2813 */
2814 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2816
2817 IEM_MC_BEGIN(3, 0);
2818 IEM_MC_ARG(uint128_t *, pDst, 0);
2819 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2820 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2821 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2822 IEM_MC_PREPARE_SSE_USAGE();
2823 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2824 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2825 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2826 IEM_MC_ADVANCE_RIP();
2827 IEM_MC_END();
2828 }
2829 else
2830 {
2831 /*
2832 * Register, memory.
2833 */
2834 IEM_MC_BEGIN(3, 2);
2835 IEM_MC_ARG(uint128_t *, pDst, 0);
2836 IEM_MC_LOCAL(uint128_t, uSrc);
2837 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2839
2840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2841 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2842 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2844 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2845
2846 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2847 IEM_MC_PREPARE_SSE_USAGE();
2848 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2849 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2850
2851 IEM_MC_ADVANCE_RIP();
2852 IEM_MC_END();
2853 }
2854 return VINF_SUCCESS;
2855}
2856
2857/** Opcode 0xf2 0x0f 0x70 - vpshuflw Vx, Wx, Ib */
2858FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
2859{
2860 IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
2861 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2862 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2863 {
2864 /*
2865 * Register, register.
2866 */
2867 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2869
2870 IEM_MC_BEGIN(3, 0);
2871 IEM_MC_ARG(uint128_t *, pDst, 0);
2872 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2873 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2874 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2875 IEM_MC_PREPARE_SSE_USAGE();
2876 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2877 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2878 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2879 IEM_MC_ADVANCE_RIP();
2880 IEM_MC_END();
2881 }
2882 else
2883 {
2884 /*
2885 * Register, memory.
2886 */
2887 IEM_MC_BEGIN(3, 2);
2888 IEM_MC_ARG(uint128_t *, pDst, 0);
2889 IEM_MC_LOCAL(uint128_t, uSrc);
2890 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2892
2893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2894 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2895 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2897 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2898
2899 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2900 IEM_MC_PREPARE_SSE_USAGE();
2901 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2902 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2903
2904 IEM_MC_ADVANCE_RIP();
2905 IEM_MC_END();
2906 }
2907 return VINF_SUCCESS;
2908}
2909
2910
2911/** Opcode 0x0f 0x71 11/2. */
2912FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2913
2914/** Opcode 0x66 0x0f 0x71 11/2. */
2915FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2916
2917/** Opcode 0x0f 0x71 11/4. */
2918FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2919
2920/** Opcode 0x66 0x0f 0x71 11/4. */
2921FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2922
2923/** Opcode 0x0f 0x71 11/6. */
2924FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2925
2926/** Opcode 0x66 0x0f 0x71 11/6. */
2927FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2928
2929
2930/** Opcode 0x0f 0x71. */
2931FNIEMOP_DEF(iemOp_Grp12)
2932{
2933 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2934 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2935 return IEMOP_RAISE_INVALID_OPCODE();
2936 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2937 {
2938 case 0: case 1: case 3: case 5: case 7:
2939 return IEMOP_RAISE_INVALID_OPCODE();
2940 case 2:
2941 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2942 {
2943 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2944 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
2945 default: return IEMOP_RAISE_INVALID_OPCODE();
2946 }
2947 case 4:
2948 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2949 {
2950 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
2951 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
2952 default: return IEMOP_RAISE_INVALID_OPCODE();
2953 }
2954 case 6:
2955 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2956 {
2957 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
2958 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
2959 default: return IEMOP_RAISE_INVALID_OPCODE();
2960 }
2961 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2962 }
2963}
2964
2965
2966/** Opcode 0x0f 0x72 11/2. */
2967FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2968
2969/** Opcode 0x66 0x0f 0x72 11/2. */
2970FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
2971
2972/** Opcode 0x0f 0x72 11/4. */
2973FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2974
2975/** Opcode 0x66 0x0f 0x72 11/4. */
2976FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
2977
2978/** Opcode 0x0f 0x72 11/6. */
2979FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2980
2981/** Opcode 0x66 0x0f 0x72 11/6. */
2982FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
2983
2984
2985/** Opcode 0x0f 0x72. */
2986FNIEMOP_DEF(iemOp_Grp13)
2987{
2988 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2989 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2990 return IEMOP_RAISE_INVALID_OPCODE();
2991 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2992 {
2993 case 0: case 1: case 3: case 5: case 7:
2994 return IEMOP_RAISE_INVALID_OPCODE();
2995 case 2:
2996 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2997 {
2998 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
2999 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
3000 default: return IEMOP_RAISE_INVALID_OPCODE();
3001 }
3002 case 4:
3003 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3004 {
3005 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
3006 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3007 default: return IEMOP_RAISE_INVALID_OPCODE();
3008 }
3009 case 6:
3010 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3011 {
3012 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3013 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3014 default: return IEMOP_RAISE_INVALID_OPCODE();
3015 }
3016 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3017 }
3018}
3019
3020
3021/** Opcode 0x0f 0x73 11/2. */
3022FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3023
3024/** Opcode 0x66 0x0f 0x73 11/2. */
3025FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3026
3027/** Opcode 0x66 0x0f 0x73 11/3. */
3028FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3029
3030/** Opcode 0x0f 0x73 11/6. */
3031FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3032
3033/** Opcode 0x66 0x0f 0x73 11/6. */
3034FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3035
3036/** Opcode 0x66 0x0f 0x73 11/7. */
3037FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3038
3039
3040/** Opcode 0x0f 0x73. */
3041FNIEMOP_DEF(iemOp_Grp14)
3042{
3043 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3044 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3045 return IEMOP_RAISE_INVALID_OPCODE();
3046 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3047 {
3048 case 0: case 1: case 4: case 5:
3049 return IEMOP_RAISE_INVALID_OPCODE();
3050 case 2:
3051 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3052 {
3053 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3054 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3055 default: return IEMOP_RAISE_INVALID_OPCODE();
3056 }
3057 case 3:
3058 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3059 {
3060 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3061 default: return IEMOP_RAISE_INVALID_OPCODE();
3062 }
3063 case 6:
3064 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3065 {
3066 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3067 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3068 default: return IEMOP_RAISE_INVALID_OPCODE();
3069 }
3070 case 7:
3071 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3072 {
3073 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3074 default: return IEMOP_RAISE_INVALID_OPCODE();
3075 }
3076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3077 }
3078}
3079
3080
3081/**
3082 * Common worker for MMX instructions on the form:
3083 * pxxx mm1, mm2/mem64
3084 */
3085FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3086{
3087 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3088 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3089 {
3090 /*
3091 * Register, register.
3092 */
3093 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3094 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3096 IEM_MC_BEGIN(2, 0);
3097 IEM_MC_ARG(uint64_t *, pDst, 0);
3098 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3099 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3100 IEM_MC_PREPARE_FPU_USAGE();
3101 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3102 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3103 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3104 IEM_MC_ADVANCE_RIP();
3105 IEM_MC_END();
3106 }
3107 else
3108 {
3109 /*
3110 * Register, memory.
3111 */
3112 IEM_MC_BEGIN(2, 2);
3113 IEM_MC_ARG(uint64_t *, pDst, 0);
3114 IEM_MC_LOCAL(uint64_t, uSrc);
3115 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3117
3118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3120 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3121 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3122
3123 IEM_MC_PREPARE_FPU_USAGE();
3124 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3125 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3126
3127 IEM_MC_ADVANCE_RIP();
3128 IEM_MC_END();
3129 }
3130 return VINF_SUCCESS;
3131}
3132
3133
3134/**
3135 * Common worker for SSE2 instructions on the forms:
3136 * pxxx xmm1, xmm2/mem128
3137 *
3138 * Proper alignment of the 128-bit operand is enforced.
3139 * Exceptions type 4. SSE2 cpuid checks.
3140 */
3141FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3142{
3143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3144 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3145 {
3146 /*
3147 * Register, register.
3148 */
3149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3150 IEM_MC_BEGIN(2, 0);
3151 IEM_MC_ARG(uint128_t *, pDst, 0);
3152 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3153 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3154 IEM_MC_PREPARE_SSE_USAGE();
3155 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3156 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3157 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3158 IEM_MC_ADVANCE_RIP();
3159 IEM_MC_END();
3160 }
3161 else
3162 {
3163 /*
3164 * Register, memory.
3165 */
3166 IEM_MC_BEGIN(2, 2);
3167 IEM_MC_ARG(uint128_t *, pDst, 0);
3168 IEM_MC_LOCAL(uint128_t, uSrc);
3169 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3171
3172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3174 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3175 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3176
3177 IEM_MC_PREPARE_SSE_USAGE();
3178 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3179 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3180
3181 IEM_MC_ADVANCE_RIP();
3182 IEM_MC_END();
3183 }
3184 return VINF_SUCCESS;
3185}
3186
3187
3188/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3189FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3190{
3191 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3192 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3193}
3194
3195/** Opcode 0x66 0x0f 0x74 - vpcmpeqb Vx, Hx, Wx */
3196FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3197{
3198 IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb");
3199 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3200}
3201
3202/* Opcode 0xf3 0x0f 0x74 - invalid */
3203/* Opcode 0xf2 0x0f 0x74 - invalid */
3204
3205
3206/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3207FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3208{
3209 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3210 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3211}
3212
3213/** Opcode 0x66 0x0f 0x75 - vpcmpeqw Vx, Hx, Wx */
3214FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3215{
3216 IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw");
3217 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3218}
3219
3220/* Opcode 0xf3 0x0f 0x75 - invalid */
3221/* Opcode 0xf2 0x0f 0x75 - invalid */
3222
3223
3224/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3225FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3226{
3227 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3228 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3229}
3230
3231/** Opcode 0x66 0x0f 0x76 - vpcmpeqd Vx, Hx, Wx */
3232FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3233{
3234 IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd");
3235 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3236}
3237
3238/* Opcode 0xf3 0x0f 0x76 - invalid */
3239/* Opcode 0xf2 0x0f 0x76 - invalid */
3240
3241
3242/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3243FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3244/* Opcode 0x66 0x0f 0x77 - invalid */
3245/* Opcode 0xf3 0x0f 0x77 - invalid */
3246/* Opcode 0xf2 0x0f 0x77 - invalid */
3247
3248/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3249FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3250/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3251FNIEMOP_STUB(iemOp_AmdGrp17);
3252/* Opcode 0xf3 0x0f 0x78 - invalid */
3253/* Opcode 0xf2 0x0f 0x78 - invalid */
3254
3255/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3256FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3257/* Opcode 0x66 0x0f 0x79 - invalid */
3258/* Opcode 0xf3 0x0f 0x79 - invalid */
3259/* Opcode 0xf2 0x0f 0x79 - invalid */
3260
3261/* Opcode 0x0f 0x7a - invalid */
3262/* Opcode 0x66 0x0f 0x7a - invalid */
3263/* Opcode 0xf3 0x0f 0x7a - invalid */
3264/* Opcode 0xf2 0x0f 0x7a - invalid */
3265
3266/* Opcode 0x0f 0x7b - invalid */
3267/* Opcode 0x66 0x0f 0x7b - invalid */
3268/* Opcode 0xf3 0x0f 0x7b - invalid */
3269/* Opcode 0xf2 0x0f 0x7b - invalid */
3270
3271/* Opcode 0x0f 0x7c - invalid */
3272/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3273FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3274/* Opcode 0xf3 0x0f 0x7c - invalid */
3275/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3276FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3277
3278/* Opcode 0x0f 0x7d - invalid */
3279/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3280FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3281/* Opcode 0xf3 0x0f 0x7d - invalid */
3282/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3283FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3284
3285
3286/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3287FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3288{
3289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3290 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3291 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3292 else
3293 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3294 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3295 {
3296 /* greg, MMX */
3297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3298 IEM_MC_BEGIN(0, 1);
3299 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3300 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3301 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3302 {
3303 IEM_MC_LOCAL(uint64_t, u64Tmp);
3304 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3305 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3306 }
3307 else
3308 {
3309 IEM_MC_LOCAL(uint32_t, u32Tmp);
3310 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3311 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3312 }
3313 IEM_MC_ADVANCE_RIP();
3314 IEM_MC_END();
3315 }
3316 else
3317 {
3318 /* [mem], MMX */
3319 IEM_MC_BEGIN(0, 2);
3320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3321 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3324 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3325 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3326 {
3327 IEM_MC_LOCAL(uint64_t, u64Tmp);
3328 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3329 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3330 }
3331 else
3332 {
3333 IEM_MC_LOCAL(uint32_t, u32Tmp);
3334 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3335 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3336 }
3337 IEM_MC_ADVANCE_RIP();
3338 IEM_MC_END();
3339 }
3340 return VINF_SUCCESS;
3341}
3342
3343/** Opcode 0x66 0x0f 0x7e - vmovd_q Ey, Vy */
3344FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3345{
3346 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3347 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3348 IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq");
3349 else
3350 IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd");
3351 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3352 {
3353 /* greg, XMM */
3354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3355 IEM_MC_BEGIN(0, 1);
3356 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3357 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3358 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3359 {
3360 IEM_MC_LOCAL(uint64_t, u64Tmp);
3361 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3362 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3363 }
3364 else
3365 {
3366 IEM_MC_LOCAL(uint32_t, u32Tmp);
3367 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3368 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3369 }
3370 IEM_MC_ADVANCE_RIP();
3371 IEM_MC_END();
3372 }
3373 else
3374 {
3375 /* [mem], XMM */
3376 IEM_MC_BEGIN(0, 2);
3377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3378 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3381 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3382 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3383 {
3384 IEM_MC_LOCAL(uint64_t, u64Tmp);
3385 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3386 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3387 }
3388 else
3389 {
3390 IEM_MC_LOCAL(uint32_t, u32Tmp);
3391 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3392 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3393 }
3394 IEM_MC_ADVANCE_RIP();
3395 IEM_MC_END();
3396 }
3397 return VINF_SUCCESS;
3398}
3399
3400/** Opcode 0xf3 0x0f 0x7e - vmovq Vq, Wq */
3401FNIEMOP_STUB(iemOp_vmovq_Vq_Wq);
3402/* Opcode 0xf2 0x0f 0x7e - invalid */
3403
3404
3405/** Opcode 0x0f 0x7f - movq Qq, Pq */
3406FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3407{
3408 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3409 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3410 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3411 {
3412 /*
3413 * Register, register.
3414 */
3415 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3416 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3418 IEM_MC_BEGIN(0, 1);
3419 IEM_MC_LOCAL(uint64_t, u64Tmp);
3420 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3421 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3422 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3423 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3424 IEM_MC_ADVANCE_RIP();
3425 IEM_MC_END();
3426 }
3427 else
3428 {
3429 /*
3430 * Register, memory.
3431 */
3432 IEM_MC_BEGIN(0, 2);
3433 IEM_MC_LOCAL(uint64_t, u64Tmp);
3434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3435
3436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3438 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3439 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3440
3441 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3442 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3443
3444 IEM_MC_ADVANCE_RIP();
3445 IEM_MC_END();
3446 }
3447 return VINF_SUCCESS;
3448}
3449
3450/** Opcode 0x66 0x0f 0x7f - vmovdqa Wx,Vx */
3451FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3452{
3453 IEMOP_MNEMONIC(vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx");
3454 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3455 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3456 {
3457 /*
3458 * Register, register.
3459 */
3460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3461 IEM_MC_BEGIN(0, 0);
3462 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3463 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3464 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3465 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3466 IEM_MC_ADVANCE_RIP();
3467 IEM_MC_END();
3468 }
3469 else
3470 {
3471 /*
3472 * Register, memory.
3473 */
3474 IEM_MC_BEGIN(0, 2);
3475 IEM_MC_LOCAL(uint128_t, u128Tmp);
3476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3477
3478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3480 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3481 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3482
3483 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3484 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3485
3486 IEM_MC_ADVANCE_RIP();
3487 IEM_MC_END();
3488 }
3489 return VINF_SUCCESS;
3490}
3491
3492/** Opcode 0xf3 0x0f 0x7f - vmovdqu Wx,Vx */
3493FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
3494{
3495 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3496 IEMOP_MNEMONIC(vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx");
3497 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3498 {
3499 /*
3500 * Register, register.
3501 */
3502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3503 IEM_MC_BEGIN(0, 0);
3504 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3505 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3506 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3507 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3508 IEM_MC_ADVANCE_RIP();
3509 IEM_MC_END();
3510 }
3511 else
3512 {
3513 /*
3514 * Register, memory.
3515 */
3516 IEM_MC_BEGIN(0, 2);
3517 IEM_MC_LOCAL(uint128_t, u128Tmp);
3518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3519
3520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3522 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3523 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3524
3525 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3526 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3527
3528 IEM_MC_ADVANCE_RIP();
3529 IEM_MC_END();
3530 }
3531 return VINF_SUCCESS;
3532}
3533
3534/* Opcode 0xf2 0x0f 0x7f - invalid */
3535
3536
3537
3538/** Opcode 0x0f 0x80. */
3539FNIEMOP_DEF(iemOp_jo_Jv)
3540{
3541 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3542 IEMOP_HLP_MIN_386();
3543 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3544 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3545 {
3546 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3548
3549 IEM_MC_BEGIN(0, 0);
3550 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3551 IEM_MC_REL_JMP_S16(i16Imm);
3552 } IEM_MC_ELSE() {
3553 IEM_MC_ADVANCE_RIP();
3554 } IEM_MC_ENDIF();
3555 IEM_MC_END();
3556 }
3557 else
3558 {
3559 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3561
3562 IEM_MC_BEGIN(0, 0);
3563 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3564 IEM_MC_REL_JMP_S32(i32Imm);
3565 } IEM_MC_ELSE() {
3566 IEM_MC_ADVANCE_RIP();
3567 } IEM_MC_ENDIF();
3568 IEM_MC_END();
3569 }
3570 return VINF_SUCCESS;
3571}
3572
3573
3574/** Opcode 0x0f 0x81. */
3575FNIEMOP_DEF(iemOp_jno_Jv)
3576{
3577 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3578 IEMOP_HLP_MIN_386();
3579 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3580 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3581 {
3582 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3584
3585 IEM_MC_BEGIN(0, 0);
3586 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3587 IEM_MC_ADVANCE_RIP();
3588 } IEM_MC_ELSE() {
3589 IEM_MC_REL_JMP_S16(i16Imm);
3590 } IEM_MC_ENDIF();
3591 IEM_MC_END();
3592 }
3593 else
3594 {
3595 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3597
3598 IEM_MC_BEGIN(0, 0);
3599 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3600 IEM_MC_ADVANCE_RIP();
3601 } IEM_MC_ELSE() {
3602 IEM_MC_REL_JMP_S32(i32Imm);
3603 } IEM_MC_ENDIF();
3604 IEM_MC_END();
3605 }
3606 return VINF_SUCCESS;
3607}
3608
3609
3610/** Opcode 0x0f 0x82. */
3611FNIEMOP_DEF(iemOp_jc_Jv)
3612{
3613 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3614 IEMOP_HLP_MIN_386();
3615 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3616 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3617 {
3618 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3620
3621 IEM_MC_BEGIN(0, 0);
3622 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3623 IEM_MC_REL_JMP_S16(i16Imm);
3624 } IEM_MC_ELSE() {
3625 IEM_MC_ADVANCE_RIP();
3626 } IEM_MC_ENDIF();
3627 IEM_MC_END();
3628 }
3629 else
3630 {
3631 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3633
3634 IEM_MC_BEGIN(0, 0);
3635 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3636 IEM_MC_REL_JMP_S32(i32Imm);
3637 } IEM_MC_ELSE() {
3638 IEM_MC_ADVANCE_RIP();
3639 } IEM_MC_ENDIF();
3640 IEM_MC_END();
3641 }
3642 return VINF_SUCCESS;
3643}
3644
3645
3646/** Opcode 0x0f 0x83. */
3647FNIEMOP_DEF(iemOp_jnc_Jv)
3648{
3649 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3650 IEMOP_HLP_MIN_386();
3651 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3652 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3653 {
3654 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3656
3657 IEM_MC_BEGIN(0, 0);
3658 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3659 IEM_MC_ADVANCE_RIP();
3660 } IEM_MC_ELSE() {
3661 IEM_MC_REL_JMP_S16(i16Imm);
3662 } IEM_MC_ENDIF();
3663 IEM_MC_END();
3664 }
3665 else
3666 {
3667 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3669
3670 IEM_MC_BEGIN(0, 0);
3671 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3672 IEM_MC_ADVANCE_RIP();
3673 } IEM_MC_ELSE() {
3674 IEM_MC_REL_JMP_S32(i32Imm);
3675 } IEM_MC_ENDIF();
3676 IEM_MC_END();
3677 }
3678 return VINF_SUCCESS;
3679}
3680
3681
3682/** Opcode 0x0f 0x84. */
3683FNIEMOP_DEF(iemOp_je_Jv)
3684{
3685 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
3686 IEMOP_HLP_MIN_386();
3687 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3688 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3689 {
3690 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3692
3693 IEM_MC_BEGIN(0, 0);
3694 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3695 IEM_MC_REL_JMP_S16(i16Imm);
3696 } IEM_MC_ELSE() {
3697 IEM_MC_ADVANCE_RIP();
3698 } IEM_MC_ENDIF();
3699 IEM_MC_END();
3700 }
3701 else
3702 {
3703 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3705
3706 IEM_MC_BEGIN(0, 0);
3707 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3708 IEM_MC_REL_JMP_S32(i32Imm);
3709 } IEM_MC_ELSE() {
3710 IEM_MC_ADVANCE_RIP();
3711 } IEM_MC_ENDIF();
3712 IEM_MC_END();
3713 }
3714 return VINF_SUCCESS;
3715}
3716
3717
3718/** Opcode 0x0f 0x85. */
3719FNIEMOP_DEF(iemOp_jne_Jv)
3720{
3721 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
3722 IEMOP_HLP_MIN_386();
3723 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3724 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3725 {
3726 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3728
3729 IEM_MC_BEGIN(0, 0);
3730 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3731 IEM_MC_ADVANCE_RIP();
3732 } IEM_MC_ELSE() {
3733 IEM_MC_REL_JMP_S16(i16Imm);
3734 } IEM_MC_ENDIF();
3735 IEM_MC_END();
3736 }
3737 else
3738 {
3739 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3741
3742 IEM_MC_BEGIN(0, 0);
3743 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3744 IEM_MC_ADVANCE_RIP();
3745 } IEM_MC_ELSE() {
3746 IEM_MC_REL_JMP_S32(i32Imm);
3747 } IEM_MC_ENDIF();
3748 IEM_MC_END();
3749 }
3750 return VINF_SUCCESS;
3751}
3752
3753
3754/** Opcode 0x0f 0x86. */
3755FNIEMOP_DEF(iemOp_jbe_Jv)
3756{
3757 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
3758 IEMOP_HLP_MIN_386();
3759 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3760 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3761 {
3762 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3764
3765 IEM_MC_BEGIN(0, 0);
3766 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3767 IEM_MC_REL_JMP_S16(i16Imm);
3768 } IEM_MC_ELSE() {
3769 IEM_MC_ADVANCE_RIP();
3770 } IEM_MC_ENDIF();
3771 IEM_MC_END();
3772 }
3773 else
3774 {
3775 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3777
3778 IEM_MC_BEGIN(0, 0);
3779 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3780 IEM_MC_REL_JMP_S32(i32Imm);
3781 } IEM_MC_ELSE() {
3782 IEM_MC_ADVANCE_RIP();
3783 } IEM_MC_ENDIF();
3784 IEM_MC_END();
3785 }
3786 return VINF_SUCCESS;
3787}
3788
3789
3790/** Opcode 0x0f 0x87. */
3791FNIEMOP_DEF(iemOp_jnbe_Jv)
3792{
3793 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
3794 IEMOP_HLP_MIN_386();
3795 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3796 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3797 {
3798 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3800
3801 IEM_MC_BEGIN(0, 0);
3802 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3803 IEM_MC_ADVANCE_RIP();
3804 } IEM_MC_ELSE() {
3805 IEM_MC_REL_JMP_S16(i16Imm);
3806 } IEM_MC_ENDIF();
3807 IEM_MC_END();
3808 }
3809 else
3810 {
3811 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3813
3814 IEM_MC_BEGIN(0, 0);
3815 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3816 IEM_MC_ADVANCE_RIP();
3817 } IEM_MC_ELSE() {
3818 IEM_MC_REL_JMP_S32(i32Imm);
3819 } IEM_MC_ENDIF();
3820 IEM_MC_END();
3821 }
3822 return VINF_SUCCESS;
3823}
3824
3825
3826/** Opcode 0x0f 0x88. */
3827FNIEMOP_DEF(iemOp_js_Jv)
3828{
3829 IEMOP_MNEMONIC(js_Jv, "js Jv");
3830 IEMOP_HLP_MIN_386();
3831 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3832 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3833 {
3834 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3836
3837 IEM_MC_BEGIN(0, 0);
3838 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3839 IEM_MC_REL_JMP_S16(i16Imm);
3840 } IEM_MC_ELSE() {
3841 IEM_MC_ADVANCE_RIP();
3842 } IEM_MC_ENDIF();
3843 IEM_MC_END();
3844 }
3845 else
3846 {
3847 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3849
3850 IEM_MC_BEGIN(0, 0);
3851 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3852 IEM_MC_REL_JMP_S32(i32Imm);
3853 } IEM_MC_ELSE() {
3854 IEM_MC_ADVANCE_RIP();
3855 } IEM_MC_ENDIF();
3856 IEM_MC_END();
3857 }
3858 return VINF_SUCCESS;
3859}
3860
3861
3862/** Opcode 0x0f 0x89. */
3863FNIEMOP_DEF(iemOp_jns_Jv)
3864{
3865 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
3866 IEMOP_HLP_MIN_386();
3867 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3868 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3869 {
3870 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3872
3873 IEM_MC_BEGIN(0, 0);
3874 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3875 IEM_MC_ADVANCE_RIP();
3876 } IEM_MC_ELSE() {
3877 IEM_MC_REL_JMP_S16(i16Imm);
3878 } IEM_MC_ENDIF();
3879 IEM_MC_END();
3880 }
3881 else
3882 {
3883 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3885
3886 IEM_MC_BEGIN(0, 0);
3887 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3888 IEM_MC_ADVANCE_RIP();
3889 } IEM_MC_ELSE() {
3890 IEM_MC_REL_JMP_S32(i32Imm);
3891 } IEM_MC_ENDIF();
3892 IEM_MC_END();
3893 }
3894 return VINF_SUCCESS;
3895}
3896
3897
3898/** Opcode 0x0f 0x8a. */
3899FNIEMOP_DEF(iemOp_jp_Jv)
3900{
3901 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
3902 IEMOP_HLP_MIN_386();
3903 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3904 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3905 {
3906 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3908
3909 IEM_MC_BEGIN(0, 0);
3910 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3911 IEM_MC_REL_JMP_S16(i16Imm);
3912 } IEM_MC_ELSE() {
3913 IEM_MC_ADVANCE_RIP();
3914 } IEM_MC_ENDIF();
3915 IEM_MC_END();
3916 }
3917 else
3918 {
3919 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3921
3922 IEM_MC_BEGIN(0, 0);
3923 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3924 IEM_MC_REL_JMP_S32(i32Imm);
3925 } IEM_MC_ELSE() {
3926 IEM_MC_ADVANCE_RIP();
3927 } IEM_MC_ENDIF();
3928 IEM_MC_END();
3929 }
3930 return VINF_SUCCESS;
3931}
3932
3933
3934/** Opcode 0x0f 0x8b. */
3935FNIEMOP_DEF(iemOp_jnp_Jv)
3936{
3937 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
3938 IEMOP_HLP_MIN_386();
3939 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3940 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3941 {
3942 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3944
3945 IEM_MC_BEGIN(0, 0);
3946 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3947 IEM_MC_ADVANCE_RIP();
3948 } IEM_MC_ELSE() {
3949 IEM_MC_REL_JMP_S16(i16Imm);
3950 } IEM_MC_ENDIF();
3951 IEM_MC_END();
3952 }
3953 else
3954 {
3955 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3957
3958 IEM_MC_BEGIN(0, 0);
3959 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3960 IEM_MC_ADVANCE_RIP();
3961 } IEM_MC_ELSE() {
3962 IEM_MC_REL_JMP_S32(i32Imm);
3963 } IEM_MC_ENDIF();
3964 IEM_MC_END();
3965 }
3966 return VINF_SUCCESS;
3967}
3968
3969
3970/** Opcode 0x0f 0x8c. */
3971FNIEMOP_DEF(iemOp_jl_Jv)
3972{
3973 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
3974 IEMOP_HLP_MIN_386();
3975 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3976 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3977 {
3978 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3980
3981 IEM_MC_BEGIN(0, 0);
3982 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3983 IEM_MC_REL_JMP_S16(i16Imm);
3984 } IEM_MC_ELSE() {
3985 IEM_MC_ADVANCE_RIP();
3986 } IEM_MC_ENDIF();
3987 IEM_MC_END();
3988 }
3989 else
3990 {
3991 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3993
3994 IEM_MC_BEGIN(0, 0);
3995 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3996 IEM_MC_REL_JMP_S32(i32Imm);
3997 } IEM_MC_ELSE() {
3998 IEM_MC_ADVANCE_RIP();
3999 } IEM_MC_ENDIF();
4000 IEM_MC_END();
4001 }
4002 return VINF_SUCCESS;
4003}
4004
4005
4006/** Opcode 0x0f 0x8d. */
4007FNIEMOP_DEF(iemOp_jnl_Jv)
4008{
4009 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4010 IEMOP_HLP_MIN_386();
4011 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4012 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4013 {
4014 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4016
4017 IEM_MC_BEGIN(0, 0);
4018 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4019 IEM_MC_ADVANCE_RIP();
4020 } IEM_MC_ELSE() {
4021 IEM_MC_REL_JMP_S16(i16Imm);
4022 } IEM_MC_ENDIF();
4023 IEM_MC_END();
4024 }
4025 else
4026 {
4027 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4029
4030 IEM_MC_BEGIN(0, 0);
4031 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4032 IEM_MC_ADVANCE_RIP();
4033 } IEM_MC_ELSE() {
4034 IEM_MC_REL_JMP_S32(i32Imm);
4035 } IEM_MC_ENDIF();
4036 IEM_MC_END();
4037 }
4038 return VINF_SUCCESS;
4039}
4040
4041
4042/** Opcode 0x0f 0x8e. */
4043FNIEMOP_DEF(iemOp_jle_Jv)
4044{
4045 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4046 IEMOP_HLP_MIN_386();
4047 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4048 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4049 {
4050 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4052
4053 IEM_MC_BEGIN(0, 0);
4054 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4055 IEM_MC_REL_JMP_S16(i16Imm);
4056 } IEM_MC_ELSE() {
4057 IEM_MC_ADVANCE_RIP();
4058 } IEM_MC_ENDIF();
4059 IEM_MC_END();
4060 }
4061 else
4062 {
4063 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4065
4066 IEM_MC_BEGIN(0, 0);
4067 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4068 IEM_MC_REL_JMP_S32(i32Imm);
4069 } IEM_MC_ELSE() {
4070 IEM_MC_ADVANCE_RIP();
4071 } IEM_MC_ENDIF();
4072 IEM_MC_END();
4073 }
4074 return VINF_SUCCESS;
4075}
4076
4077
4078/** Opcode 0x0f 0x8f. */
4079FNIEMOP_DEF(iemOp_jnle_Jv)
4080{
4081 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4082 IEMOP_HLP_MIN_386();
4083 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4084 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4085 {
4086 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4088
4089 IEM_MC_BEGIN(0, 0);
4090 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4091 IEM_MC_ADVANCE_RIP();
4092 } IEM_MC_ELSE() {
4093 IEM_MC_REL_JMP_S16(i16Imm);
4094 } IEM_MC_ENDIF();
4095 IEM_MC_END();
4096 }
4097 else
4098 {
4099 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4101
4102 IEM_MC_BEGIN(0, 0);
4103 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4104 IEM_MC_ADVANCE_RIP();
4105 } IEM_MC_ELSE() {
4106 IEM_MC_REL_JMP_S32(i32Imm);
4107 } IEM_MC_ENDIF();
4108 IEM_MC_END();
4109 }
4110 return VINF_SUCCESS;
4111}
4112
4113
4114/** Opcode 0x0f 0x90. */
4115FNIEMOP_DEF(iemOp_seto_Eb)
4116{
4117 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4118 IEMOP_HLP_MIN_386();
4119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4120
4121 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4122 * any way. AMD says it's "unused", whatever that means. We're
4123 * ignoring for now. */
4124 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4125 {
4126 /* register target */
4127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4128 IEM_MC_BEGIN(0, 0);
4129 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4130 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4131 } IEM_MC_ELSE() {
4132 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4133 } IEM_MC_ENDIF();
4134 IEM_MC_ADVANCE_RIP();
4135 IEM_MC_END();
4136 }
4137 else
4138 {
4139 /* memory target */
4140 IEM_MC_BEGIN(0, 1);
4141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4144 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4145 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4146 } IEM_MC_ELSE() {
4147 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4148 } IEM_MC_ENDIF();
4149 IEM_MC_ADVANCE_RIP();
4150 IEM_MC_END();
4151 }
4152 return VINF_SUCCESS;
4153}
4154
4155
4156/** Opcode 0x0f 0x91. */
4157FNIEMOP_DEF(iemOp_setno_Eb)
4158{
4159 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4160 IEMOP_HLP_MIN_386();
4161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4162
4163 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4164 * any way. AMD says it's "unused", whatever that means. We're
4165 * ignoring for now. */
4166 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4167 {
4168 /* register target */
4169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4170 IEM_MC_BEGIN(0, 0);
4171 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4172 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4173 } IEM_MC_ELSE() {
4174 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4175 } IEM_MC_ENDIF();
4176 IEM_MC_ADVANCE_RIP();
4177 IEM_MC_END();
4178 }
4179 else
4180 {
4181 /* memory target */
4182 IEM_MC_BEGIN(0, 1);
4183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4186 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4187 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4188 } IEM_MC_ELSE() {
4189 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4190 } IEM_MC_ENDIF();
4191 IEM_MC_ADVANCE_RIP();
4192 IEM_MC_END();
4193 }
4194 return VINF_SUCCESS;
4195}
4196
4197
4198/** Opcode 0x0f 0x92. */
4199FNIEMOP_DEF(iemOp_setc_Eb)
4200{
4201 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4202 IEMOP_HLP_MIN_386();
4203 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4204
4205 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4206 * any way. AMD says it's "unused", whatever that means. We're
4207 * ignoring for now. */
4208 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4209 {
4210 /* register target */
4211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4212 IEM_MC_BEGIN(0, 0);
4213 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4214 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4215 } IEM_MC_ELSE() {
4216 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4217 } IEM_MC_ENDIF();
4218 IEM_MC_ADVANCE_RIP();
4219 IEM_MC_END();
4220 }
4221 else
4222 {
4223 /* memory target */
4224 IEM_MC_BEGIN(0, 1);
4225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4228 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4229 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4230 } IEM_MC_ELSE() {
4231 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4232 } IEM_MC_ENDIF();
4233 IEM_MC_ADVANCE_RIP();
4234 IEM_MC_END();
4235 }
4236 return VINF_SUCCESS;
4237}
4238
4239
4240/** Opcode 0x0f 0x93. */
4241FNIEMOP_DEF(iemOp_setnc_Eb)
4242{
4243 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4244 IEMOP_HLP_MIN_386();
4245 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4246
4247 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4248 * any way. AMD says it's "unused", whatever that means. We're
4249 * ignoring for now. */
4250 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4251 {
4252 /* register target */
4253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4254 IEM_MC_BEGIN(0, 0);
4255 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4256 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4257 } IEM_MC_ELSE() {
4258 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4259 } IEM_MC_ENDIF();
4260 IEM_MC_ADVANCE_RIP();
4261 IEM_MC_END();
4262 }
4263 else
4264 {
4265 /* memory target */
4266 IEM_MC_BEGIN(0, 1);
4267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4268 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4270 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4271 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4272 } IEM_MC_ELSE() {
4273 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4274 } IEM_MC_ENDIF();
4275 IEM_MC_ADVANCE_RIP();
4276 IEM_MC_END();
4277 }
4278 return VINF_SUCCESS;
4279}
4280
4281
4282/** Opcode 0x0f 0x94. */
4283FNIEMOP_DEF(iemOp_sete_Eb)
4284{
4285 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4286 IEMOP_HLP_MIN_386();
4287 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4288
4289 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4290 * any way. AMD says it's "unused", whatever that means. We're
4291 * ignoring for now. */
4292 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4293 {
4294 /* register target */
4295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4296 IEM_MC_BEGIN(0, 0);
4297 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4298 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4299 } IEM_MC_ELSE() {
4300 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4301 } IEM_MC_ENDIF();
4302 IEM_MC_ADVANCE_RIP();
4303 IEM_MC_END();
4304 }
4305 else
4306 {
4307 /* memory target */
4308 IEM_MC_BEGIN(0, 1);
4309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4312 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4313 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4314 } IEM_MC_ELSE() {
4315 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4316 } IEM_MC_ENDIF();
4317 IEM_MC_ADVANCE_RIP();
4318 IEM_MC_END();
4319 }
4320 return VINF_SUCCESS;
4321}
4322
4323
4324/** Opcode 0x0f 0x95. */
4325FNIEMOP_DEF(iemOp_setne_Eb)
4326{
4327 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4328 IEMOP_HLP_MIN_386();
4329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4330
4331 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4332 * any way. AMD says it's "unused", whatever that means. We're
4333 * ignoring for now. */
4334 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4335 {
4336 /* register target */
4337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4338 IEM_MC_BEGIN(0, 0);
4339 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4340 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4341 } IEM_MC_ELSE() {
4342 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4343 } IEM_MC_ENDIF();
4344 IEM_MC_ADVANCE_RIP();
4345 IEM_MC_END();
4346 }
4347 else
4348 {
4349 /* memory target */
4350 IEM_MC_BEGIN(0, 1);
4351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4354 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4355 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4356 } IEM_MC_ELSE() {
4357 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4358 } IEM_MC_ENDIF();
4359 IEM_MC_ADVANCE_RIP();
4360 IEM_MC_END();
4361 }
4362 return VINF_SUCCESS;
4363}
4364
4365
4366/** Opcode 0x0f 0x96. */
4367FNIEMOP_DEF(iemOp_setbe_Eb)
4368{
4369 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4370 IEMOP_HLP_MIN_386();
4371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4372
4373 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4374 * any way. AMD says it's "unused", whatever that means. We're
4375 * ignoring for now. */
4376 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4377 {
4378 /* register target */
4379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4380 IEM_MC_BEGIN(0, 0);
4381 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4382 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4383 } IEM_MC_ELSE() {
4384 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4385 } IEM_MC_ENDIF();
4386 IEM_MC_ADVANCE_RIP();
4387 IEM_MC_END();
4388 }
4389 else
4390 {
4391 /* memory target */
4392 IEM_MC_BEGIN(0, 1);
4393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4396 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4397 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4398 } IEM_MC_ELSE() {
4399 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4400 } IEM_MC_ENDIF();
4401 IEM_MC_ADVANCE_RIP();
4402 IEM_MC_END();
4403 }
4404 return VINF_SUCCESS;
4405}
4406
4407
4408/** Opcode 0x0f 0x97. */
4409FNIEMOP_DEF(iemOp_setnbe_Eb)
4410{
4411 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4412 IEMOP_HLP_MIN_386();
4413 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4414
4415 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4416 * any way. AMD says it's "unused", whatever that means. We're
4417 * ignoring for now. */
4418 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4419 {
4420 /* register target */
4421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4422 IEM_MC_BEGIN(0, 0);
4423 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4424 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4425 } IEM_MC_ELSE() {
4426 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4427 } IEM_MC_ENDIF();
4428 IEM_MC_ADVANCE_RIP();
4429 IEM_MC_END();
4430 }
4431 else
4432 {
4433 /* memory target */
4434 IEM_MC_BEGIN(0, 1);
4435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4438 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4439 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4440 } IEM_MC_ELSE() {
4441 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4442 } IEM_MC_ENDIF();
4443 IEM_MC_ADVANCE_RIP();
4444 IEM_MC_END();
4445 }
4446 return VINF_SUCCESS;
4447}
4448
4449
4450/** Opcode 0x0f 0x98. */
4451FNIEMOP_DEF(iemOp_sets_Eb)
4452{
4453 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4454 IEMOP_HLP_MIN_386();
4455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4456
4457 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4458 * any way. AMD says it's "unused", whatever that means. We're
4459 * ignoring for now. */
4460 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4461 {
4462 /* register target */
4463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4464 IEM_MC_BEGIN(0, 0);
4465 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4466 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4467 } IEM_MC_ELSE() {
4468 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4469 } IEM_MC_ENDIF();
4470 IEM_MC_ADVANCE_RIP();
4471 IEM_MC_END();
4472 }
4473 else
4474 {
4475 /* memory target */
4476 IEM_MC_BEGIN(0, 1);
4477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4480 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4481 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4482 } IEM_MC_ELSE() {
4483 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4484 } IEM_MC_ENDIF();
4485 IEM_MC_ADVANCE_RIP();
4486 IEM_MC_END();
4487 }
4488 return VINF_SUCCESS;
4489}
4490
4491
4492/** Opcode 0x0f 0x99. */
4493FNIEMOP_DEF(iemOp_setns_Eb)
4494{
4495 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4496 IEMOP_HLP_MIN_386();
4497 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4498
4499 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4500 * any way. AMD says it's "unused", whatever that means. We're
4501 * ignoring for now. */
4502 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4503 {
4504 /* register target */
4505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4506 IEM_MC_BEGIN(0, 0);
4507 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4508 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4509 } IEM_MC_ELSE() {
4510 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4511 } IEM_MC_ENDIF();
4512 IEM_MC_ADVANCE_RIP();
4513 IEM_MC_END();
4514 }
4515 else
4516 {
4517 /* memory target */
4518 IEM_MC_BEGIN(0, 1);
4519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4522 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4523 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4524 } IEM_MC_ELSE() {
4525 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4526 } IEM_MC_ENDIF();
4527 IEM_MC_ADVANCE_RIP();
4528 IEM_MC_END();
4529 }
4530 return VINF_SUCCESS;
4531}
4532
4533
4534/** Opcode 0x0f 0x9a. */
4535FNIEMOP_DEF(iemOp_setp_Eb)
4536{
4537 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4538 IEMOP_HLP_MIN_386();
4539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4540
4541 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4542 * any way. AMD says it's "unused", whatever that means. We're
4543 * ignoring for now. */
4544 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4545 {
4546 /* register target */
4547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4548 IEM_MC_BEGIN(0, 0);
4549 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4550 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4551 } IEM_MC_ELSE() {
4552 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4553 } IEM_MC_ENDIF();
4554 IEM_MC_ADVANCE_RIP();
4555 IEM_MC_END();
4556 }
4557 else
4558 {
4559 /* memory target */
4560 IEM_MC_BEGIN(0, 1);
4561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4564 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4565 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4566 } IEM_MC_ELSE() {
4567 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4568 } IEM_MC_ENDIF();
4569 IEM_MC_ADVANCE_RIP();
4570 IEM_MC_END();
4571 }
4572 return VINF_SUCCESS;
4573}
4574
4575
4576/** Opcode 0x0f 0x9b. */
4577FNIEMOP_DEF(iemOp_setnp_Eb)
4578{
4579 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4580 IEMOP_HLP_MIN_386();
4581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4582
4583 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4584 * any way. AMD says it's "unused", whatever that means. We're
4585 * ignoring for now. */
4586 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4587 {
4588 /* register target */
4589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4590 IEM_MC_BEGIN(0, 0);
4591 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4592 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4593 } IEM_MC_ELSE() {
4594 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4595 } IEM_MC_ENDIF();
4596 IEM_MC_ADVANCE_RIP();
4597 IEM_MC_END();
4598 }
4599 else
4600 {
4601 /* memory target */
4602 IEM_MC_BEGIN(0, 1);
4603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4606 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4607 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4608 } IEM_MC_ELSE() {
4609 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4610 } IEM_MC_ENDIF();
4611 IEM_MC_ADVANCE_RIP();
4612 IEM_MC_END();
4613 }
4614 return VINF_SUCCESS;
4615}
4616
4617
4618/** Opcode 0x0f 0x9c. */
4619FNIEMOP_DEF(iemOp_setl_Eb)
4620{
4621 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4622 IEMOP_HLP_MIN_386();
4623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4624
4625 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4626 * any way. AMD says it's "unused", whatever that means. We're
4627 * ignoring for now. */
4628 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4629 {
4630 /* register target */
4631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4632 IEM_MC_BEGIN(0, 0);
4633 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4634 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4635 } IEM_MC_ELSE() {
4636 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4637 } IEM_MC_ENDIF();
4638 IEM_MC_ADVANCE_RIP();
4639 IEM_MC_END();
4640 }
4641 else
4642 {
4643 /* memory target */
4644 IEM_MC_BEGIN(0, 1);
4645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4648 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4649 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4650 } IEM_MC_ELSE() {
4651 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4652 } IEM_MC_ENDIF();
4653 IEM_MC_ADVANCE_RIP();
4654 IEM_MC_END();
4655 }
4656 return VINF_SUCCESS;
4657}
4658
4659
4660/** Opcode 0x0f 0x9d. */
4661FNIEMOP_DEF(iemOp_setnl_Eb)
4662{
4663 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4664 IEMOP_HLP_MIN_386();
4665 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4666
4667 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4668 * any way. AMD says it's "unused", whatever that means. We're
4669 * ignoring for now. */
4670 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4671 {
4672 /* register target */
4673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4674 IEM_MC_BEGIN(0, 0);
4675 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4676 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4677 } IEM_MC_ELSE() {
4678 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4679 } IEM_MC_ENDIF();
4680 IEM_MC_ADVANCE_RIP();
4681 IEM_MC_END();
4682 }
4683 else
4684 {
4685 /* memory target */
4686 IEM_MC_BEGIN(0, 1);
4687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4690 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4691 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4692 } IEM_MC_ELSE() {
4693 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4694 } IEM_MC_ENDIF();
4695 IEM_MC_ADVANCE_RIP();
4696 IEM_MC_END();
4697 }
4698 return VINF_SUCCESS;
4699}
4700
4701
4702/** Opcode 0x0f 0x9e. */
4703FNIEMOP_DEF(iemOp_setle_Eb)
4704{
4705 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
4706 IEMOP_HLP_MIN_386();
4707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4708
4709 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4710 * any way. AMD says it's "unused", whatever that means. We're
4711 * ignoring for now. */
4712 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4713 {
4714 /* register target */
4715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4716 IEM_MC_BEGIN(0, 0);
4717 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4718 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4719 } IEM_MC_ELSE() {
4720 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4721 } IEM_MC_ENDIF();
4722 IEM_MC_ADVANCE_RIP();
4723 IEM_MC_END();
4724 }
4725 else
4726 {
4727 /* memory target */
4728 IEM_MC_BEGIN(0, 1);
4729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4732 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4733 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4734 } IEM_MC_ELSE() {
4735 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4736 } IEM_MC_ENDIF();
4737 IEM_MC_ADVANCE_RIP();
4738 IEM_MC_END();
4739 }
4740 return VINF_SUCCESS;
4741}
4742
4743
4744/** Opcode 0x0f 0x9f. */
4745FNIEMOP_DEF(iemOp_setnle_Eb)
4746{
4747 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
4748 IEMOP_HLP_MIN_386();
4749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4750
4751 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4752 * any way. AMD says it's "unused", whatever that means. We're
4753 * ignoring for now. */
4754 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4755 {
4756 /* register target */
4757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4758 IEM_MC_BEGIN(0, 0);
4759 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4760 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4761 } IEM_MC_ELSE() {
4762 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4763 } IEM_MC_ENDIF();
4764 IEM_MC_ADVANCE_RIP();
4765 IEM_MC_END();
4766 }
4767 else
4768 {
4769 /* memory target */
4770 IEM_MC_BEGIN(0, 1);
4771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4774 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4775 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4776 } IEM_MC_ELSE() {
4777 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4778 } IEM_MC_ENDIF();
4779 IEM_MC_ADVANCE_RIP();
4780 IEM_MC_END();
4781 }
4782 return VINF_SUCCESS;
4783}
4784
4785
4786/**
4787 * Common 'push segment-register' helper.
4788 */
4789FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4790{
4791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4792 if (iReg < X86_SREG_FS)
4793 IEMOP_HLP_NO_64BIT();
4794 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4795
4796 switch (pVCpu->iem.s.enmEffOpSize)
4797 {
4798 case IEMMODE_16BIT:
4799 IEM_MC_BEGIN(0, 1);
4800 IEM_MC_LOCAL(uint16_t, u16Value);
4801 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4802 IEM_MC_PUSH_U16(u16Value);
4803 IEM_MC_ADVANCE_RIP();
4804 IEM_MC_END();
4805 break;
4806
4807 case IEMMODE_32BIT:
4808 IEM_MC_BEGIN(0, 1);
4809 IEM_MC_LOCAL(uint32_t, u32Value);
4810 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4811 IEM_MC_PUSH_U32_SREG(u32Value);
4812 IEM_MC_ADVANCE_RIP();
4813 IEM_MC_END();
4814 break;
4815
4816 case IEMMODE_64BIT:
4817 IEM_MC_BEGIN(0, 1);
4818 IEM_MC_LOCAL(uint64_t, u64Value);
4819 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4820 IEM_MC_PUSH_U64(u64Value);
4821 IEM_MC_ADVANCE_RIP();
4822 IEM_MC_END();
4823 break;
4824 }
4825
4826 return VINF_SUCCESS;
4827}
4828
4829
4830/** Opcode 0x0f 0xa0. */
4831FNIEMOP_DEF(iemOp_push_fs)
4832{
4833 IEMOP_MNEMONIC(push_fs, "push fs");
4834 IEMOP_HLP_MIN_386();
4835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4836 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4837}
4838
4839
4840/** Opcode 0x0f 0xa1. */
4841FNIEMOP_DEF(iemOp_pop_fs)
4842{
4843 IEMOP_MNEMONIC(pop_fs, "pop fs");
4844 IEMOP_HLP_MIN_386();
4845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4846 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
4847}
4848
4849
4850/** Opcode 0x0f 0xa2. */
4851FNIEMOP_DEF(iemOp_cpuid)
4852{
4853 IEMOP_MNEMONIC(cpuid, "cpuid");
4854 IEMOP_HLP_MIN_486(); /* not all 486es. */
4855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4856 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4857}
4858
4859
4860/**
4861 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4862 * iemOp_bts_Ev_Gv.
4863 */
4864FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4865{
4866 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4867 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4868
4869 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4870 {
4871 /* register destination. */
4872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4873 switch (pVCpu->iem.s.enmEffOpSize)
4874 {
4875 case IEMMODE_16BIT:
4876 IEM_MC_BEGIN(3, 0);
4877 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4878 IEM_MC_ARG(uint16_t, u16Src, 1);
4879 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4880
4881 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4882 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4883 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4884 IEM_MC_REF_EFLAGS(pEFlags);
4885 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4886
4887 IEM_MC_ADVANCE_RIP();
4888 IEM_MC_END();
4889 return VINF_SUCCESS;
4890
4891 case IEMMODE_32BIT:
4892 IEM_MC_BEGIN(3, 0);
4893 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4894 IEM_MC_ARG(uint32_t, u32Src, 1);
4895 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4896
4897 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4898 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4899 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4900 IEM_MC_REF_EFLAGS(pEFlags);
4901 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4902
4903 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4904 IEM_MC_ADVANCE_RIP();
4905 IEM_MC_END();
4906 return VINF_SUCCESS;
4907
4908 case IEMMODE_64BIT:
4909 IEM_MC_BEGIN(3, 0);
4910 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4911 IEM_MC_ARG(uint64_t, u64Src, 1);
4912 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4913
4914 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4915 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4916 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4917 IEM_MC_REF_EFLAGS(pEFlags);
4918 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4919
4920 IEM_MC_ADVANCE_RIP();
4921 IEM_MC_END();
4922 return VINF_SUCCESS;
4923
4924 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4925 }
4926 }
4927 else
4928 {
4929 /* memory destination. */
4930
4931 uint32_t fAccess;
4932 if (pImpl->pfnLockedU16)
4933 fAccess = IEM_ACCESS_DATA_RW;
4934 else /* BT */
4935 fAccess = IEM_ACCESS_DATA_R;
4936
4937 /** @todo test negative bit offsets! */
4938 switch (pVCpu->iem.s.enmEffOpSize)
4939 {
4940 case IEMMODE_16BIT:
4941 IEM_MC_BEGIN(3, 2);
4942 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4943 IEM_MC_ARG(uint16_t, u16Src, 1);
4944 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4946 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4947
4948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4949 if (pImpl->pfnLockedU16)
4950 IEMOP_HLP_DONE_DECODING();
4951 else
4952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4953 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4954 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4955 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4956 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4957 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
4958 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4959 IEM_MC_FETCH_EFLAGS(EFlags);
4960
4961 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4962 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4963 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4964 else
4965 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4966 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
4967
4968 IEM_MC_COMMIT_EFLAGS(EFlags);
4969 IEM_MC_ADVANCE_RIP();
4970 IEM_MC_END();
4971 return VINF_SUCCESS;
4972
4973 case IEMMODE_32BIT:
4974 IEM_MC_BEGIN(3, 2);
4975 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4976 IEM_MC_ARG(uint32_t, u32Src, 1);
4977 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4979 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4980
4981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4982 if (pImpl->pfnLockedU16)
4983 IEMOP_HLP_DONE_DECODING();
4984 else
4985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4986 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4987 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4988 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4989 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4990 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4991 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4992 IEM_MC_FETCH_EFLAGS(EFlags);
4993
4994 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4995 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4996 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4997 else
4998 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4999 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5000
5001 IEM_MC_COMMIT_EFLAGS(EFlags);
5002 IEM_MC_ADVANCE_RIP();
5003 IEM_MC_END();
5004 return VINF_SUCCESS;
5005
5006 case IEMMODE_64BIT:
5007 IEM_MC_BEGIN(3, 2);
5008 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5009 IEM_MC_ARG(uint64_t, u64Src, 1);
5010 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5012 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5013
5014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5015 if (pImpl->pfnLockedU16)
5016 IEMOP_HLP_DONE_DECODING();
5017 else
5018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5019 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5020 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5021 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5022 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5023 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5024 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5025 IEM_MC_FETCH_EFLAGS(EFlags);
5026
5027 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5028 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5029 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5030 else
5031 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5032 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5033
5034 IEM_MC_COMMIT_EFLAGS(EFlags);
5035 IEM_MC_ADVANCE_RIP();
5036 IEM_MC_END();
5037 return VINF_SUCCESS;
5038
5039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5040 }
5041 }
5042}
5043
5044
5045/** Opcode 0x0f 0xa3. */
5046FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5047{
5048 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5049 IEMOP_HLP_MIN_386();
5050 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5051}
5052
5053
5054/**
5055 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5056 */
5057FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5058{
5059 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5060 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5061
5062 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5063 {
5064 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5066
5067 switch (pVCpu->iem.s.enmEffOpSize)
5068 {
5069 case IEMMODE_16BIT:
5070 IEM_MC_BEGIN(4, 0);
5071 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5072 IEM_MC_ARG(uint16_t, u16Src, 1);
5073 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5074 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5075
5076 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5077 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5078 IEM_MC_REF_EFLAGS(pEFlags);
5079 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5080
5081 IEM_MC_ADVANCE_RIP();
5082 IEM_MC_END();
5083 return VINF_SUCCESS;
5084
5085 case IEMMODE_32BIT:
5086 IEM_MC_BEGIN(4, 0);
5087 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5088 IEM_MC_ARG(uint32_t, u32Src, 1);
5089 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5090 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5091
5092 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5093 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5094 IEM_MC_REF_EFLAGS(pEFlags);
5095 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5096
5097 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5098 IEM_MC_ADVANCE_RIP();
5099 IEM_MC_END();
5100 return VINF_SUCCESS;
5101
5102 case IEMMODE_64BIT:
5103 IEM_MC_BEGIN(4, 0);
5104 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5105 IEM_MC_ARG(uint64_t, u64Src, 1);
5106 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5107 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5108
5109 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5110 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5111 IEM_MC_REF_EFLAGS(pEFlags);
5112 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5113
5114 IEM_MC_ADVANCE_RIP();
5115 IEM_MC_END();
5116 return VINF_SUCCESS;
5117
5118 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5119 }
5120 }
5121 else
5122 {
5123 switch (pVCpu->iem.s.enmEffOpSize)
5124 {
5125 case IEMMODE_16BIT:
5126 IEM_MC_BEGIN(4, 2);
5127 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5128 IEM_MC_ARG(uint16_t, u16Src, 1);
5129 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5130 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5132
5133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5134 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5135 IEM_MC_ASSIGN(cShiftArg, cShift);
5136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5137 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5138 IEM_MC_FETCH_EFLAGS(EFlags);
5139 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5140 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5141
5142 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5143 IEM_MC_COMMIT_EFLAGS(EFlags);
5144 IEM_MC_ADVANCE_RIP();
5145 IEM_MC_END();
5146 return VINF_SUCCESS;
5147
5148 case IEMMODE_32BIT:
5149 IEM_MC_BEGIN(4, 2);
5150 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5151 IEM_MC_ARG(uint32_t, u32Src, 1);
5152 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5153 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5155
5156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5157 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5158 IEM_MC_ASSIGN(cShiftArg, cShift);
5159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5160 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5161 IEM_MC_FETCH_EFLAGS(EFlags);
5162 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5163 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5164
5165 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5166 IEM_MC_COMMIT_EFLAGS(EFlags);
5167 IEM_MC_ADVANCE_RIP();
5168 IEM_MC_END();
5169 return VINF_SUCCESS;
5170
5171 case IEMMODE_64BIT:
5172 IEM_MC_BEGIN(4, 2);
5173 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5174 IEM_MC_ARG(uint64_t, u64Src, 1);
5175 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5176 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5178
5179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5180 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5181 IEM_MC_ASSIGN(cShiftArg, cShift);
5182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5183 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5184 IEM_MC_FETCH_EFLAGS(EFlags);
5185 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5186 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5187
5188 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5189 IEM_MC_COMMIT_EFLAGS(EFlags);
5190 IEM_MC_ADVANCE_RIP();
5191 IEM_MC_END();
5192 return VINF_SUCCESS;
5193
5194 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5195 }
5196 }
5197}
5198
5199
5200/**
5201 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5202 */
5203FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5204{
5205 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5206 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5207
5208 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5209 {
5210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5211
5212 switch (pVCpu->iem.s.enmEffOpSize)
5213 {
5214 case IEMMODE_16BIT:
5215 IEM_MC_BEGIN(4, 0);
5216 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5217 IEM_MC_ARG(uint16_t, u16Src, 1);
5218 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5219 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5220
5221 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5222 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5223 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5224 IEM_MC_REF_EFLAGS(pEFlags);
5225 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5226
5227 IEM_MC_ADVANCE_RIP();
5228 IEM_MC_END();
5229 return VINF_SUCCESS;
5230
5231 case IEMMODE_32BIT:
5232 IEM_MC_BEGIN(4, 0);
5233 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5234 IEM_MC_ARG(uint32_t, u32Src, 1);
5235 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5236 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5237
5238 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5239 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5240 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5241 IEM_MC_REF_EFLAGS(pEFlags);
5242 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5243
5244 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5245 IEM_MC_ADVANCE_RIP();
5246 IEM_MC_END();
5247 return VINF_SUCCESS;
5248
5249 case IEMMODE_64BIT:
5250 IEM_MC_BEGIN(4, 0);
5251 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5252 IEM_MC_ARG(uint64_t, u64Src, 1);
5253 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5254 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5255
5256 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5257 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5258 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5259 IEM_MC_REF_EFLAGS(pEFlags);
5260 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5261
5262 IEM_MC_ADVANCE_RIP();
5263 IEM_MC_END();
5264 return VINF_SUCCESS;
5265
5266 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5267 }
5268 }
5269 else
5270 {
5271 switch (pVCpu->iem.s.enmEffOpSize)
5272 {
5273 case IEMMODE_16BIT:
5274 IEM_MC_BEGIN(4, 2);
5275 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5276 IEM_MC_ARG(uint16_t, u16Src, 1);
5277 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5278 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5280
5281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5283 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5284 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5285 IEM_MC_FETCH_EFLAGS(EFlags);
5286 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5287 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5288
5289 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5290 IEM_MC_COMMIT_EFLAGS(EFlags);
5291 IEM_MC_ADVANCE_RIP();
5292 IEM_MC_END();
5293 return VINF_SUCCESS;
5294
5295 case IEMMODE_32BIT:
5296 IEM_MC_BEGIN(4, 2);
5297 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5298 IEM_MC_ARG(uint32_t, u32Src, 1);
5299 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5300 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5302
5303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5305 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5306 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5307 IEM_MC_FETCH_EFLAGS(EFlags);
5308 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5309 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5310
5311 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5312 IEM_MC_COMMIT_EFLAGS(EFlags);
5313 IEM_MC_ADVANCE_RIP();
5314 IEM_MC_END();
5315 return VINF_SUCCESS;
5316
5317 case IEMMODE_64BIT:
5318 IEM_MC_BEGIN(4, 2);
5319 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5320 IEM_MC_ARG(uint64_t, u64Src, 1);
5321 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5322 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5324
5325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5327 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5328 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5329 IEM_MC_FETCH_EFLAGS(EFlags);
5330 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5331 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5332
5333 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5334 IEM_MC_COMMIT_EFLAGS(EFlags);
5335 IEM_MC_ADVANCE_RIP();
5336 IEM_MC_END();
5337 return VINF_SUCCESS;
5338
5339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5340 }
5341 }
5342}
5343
5344
5345
5346/** Opcode 0x0f 0xa4. */
5347FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5348{
5349 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5350 IEMOP_HLP_MIN_386();
5351 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5352}
5353
5354
5355/** Opcode 0x0f 0xa5. */
5356FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5357{
5358 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5359 IEMOP_HLP_MIN_386();
5360 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5361}
5362
5363
5364/** Opcode 0x0f 0xa8. */
5365FNIEMOP_DEF(iemOp_push_gs)
5366{
5367 IEMOP_MNEMONIC(push_gs, "push gs");
5368 IEMOP_HLP_MIN_386();
5369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5370 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5371}
5372
5373
5374/** Opcode 0x0f 0xa9. */
5375FNIEMOP_DEF(iemOp_pop_gs)
5376{
5377 IEMOP_MNEMONIC(pop_gs, "pop gs");
5378 IEMOP_HLP_MIN_386();
5379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5380 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5381}
5382
5383
5384/** Opcode 0x0f 0xaa. */
5385FNIEMOP_STUB(iemOp_rsm);
5386//IEMOP_HLP_MIN_386();
5387
5388
5389/** Opcode 0x0f 0xab. */
5390FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5391{
5392 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5393 IEMOP_HLP_MIN_386();
5394 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5395}
5396
5397
5398/** Opcode 0x0f 0xac. */
5399FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5400{
5401 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5402 IEMOP_HLP_MIN_386();
5403 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5404}
5405
5406
5407/** Opcode 0x0f 0xad. */
5408FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5409{
5410 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5411 IEMOP_HLP_MIN_386();
5412 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5413}
5414
5415
5416/** Opcode 0x0f 0xae mem/0. */
5417FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5418{
5419 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5420 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5421 return IEMOP_RAISE_INVALID_OPCODE();
5422
5423 IEM_MC_BEGIN(3, 1);
5424 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5425 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5426 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5429 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5430 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5431 IEM_MC_END();
5432 return VINF_SUCCESS;
5433}
5434
5435
5436/** Opcode 0x0f 0xae mem/1. */
5437FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5438{
5439 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5440 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5441 return IEMOP_RAISE_INVALID_OPCODE();
5442
5443 IEM_MC_BEGIN(3, 1);
5444 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5445 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5446 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5449 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5450 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5451 IEM_MC_END();
5452 return VINF_SUCCESS;
5453}
5454
5455
5456/** Opcode 0x0f 0xae mem/2. */
5457FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5458
5459/** Opcode 0x0f 0xae mem/3. */
5460FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5461
5462/** Opcode 0x0f 0xae mem/4. */
5463FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5464
5465/** Opcode 0x0f 0xae mem/5. */
5466FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5467
5468/** Opcode 0x0f 0xae mem/6. */
5469FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5470
5471/** Opcode 0x0f 0xae mem/7. */
5472FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5473
5474
5475/** Opcode 0x0f 0xae 11b/5. */
5476FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5477{
5478 RT_NOREF_PV(bRm);
5479 IEMOP_MNEMONIC(lfence, "lfence");
5480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5481 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5482 return IEMOP_RAISE_INVALID_OPCODE();
5483
5484 IEM_MC_BEGIN(0, 0);
5485 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5486 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5487 else
5488 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5489 IEM_MC_ADVANCE_RIP();
5490 IEM_MC_END();
5491 return VINF_SUCCESS;
5492}
5493
5494
5495/** Opcode 0x0f 0xae 11b/6. */
5496FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5497{
5498 RT_NOREF_PV(bRm);
5499 IEMOP_MNEMONIC(mfence, "mfence");
5500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5501 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5502 return IEMOP_RAISE_INVALID_OPCODE();
5503
5504 IEM_MC_BEGIN(0, 0);
5505 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5506 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5507 else
5508 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5509 IEM_MC_ADVANCE_RIP();
5510 IEM_MC_END();
5511 return VINF_SUCCESS;
5512}
5513
5514
5515/** Opcode 0x0f 0xae 11b/7. */
5516FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5517{
5518 RT_NOREF_PV(bRm);
5519 IEMOP_MNEMONIC(sfence, "sfence");
5520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5521 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5522 return IEMOP_RAISE_INVALID_OPCODE();
5523
5524 IEM_MC_BEGIN(0, 0);
5525 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5526 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5527 else
5528 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5529 IEM_MC_ADVANCE_RIP();
5530 IEM_MC_END();
5531 return VINF_SUCCESS;
5532}
5533
5534
5535/** Opcode 0xf3 0x0f 0xae 11b/0. */
5536FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5537
5538/** Opcode 0xf3 0x0f 0xae 11b/1. */
5539FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5540
5541/** Opcode 0xf3 0x0f 0xae 11b/2. */
5542FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5543
5544/** Opcode 0xf3 0x0f 0xae 11b/3. */
5545FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5546
5547
5548/** Opcode 0x0f 0xae. */
5549FNIEMOP_DEF(iemOp_Grp15)
5550{
5551 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5552 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5553 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5554 {
5555 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5556 {
5557 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5558 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5559 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5560 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5561 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5562 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5563 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5564 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5565 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5566 }
5567 }
5568 else
5569 {
5570 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5571 {
5572 case 0:
5573 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5574 {
5575 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5576 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5577 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5578 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5579 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5580 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5581 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5582 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5583 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5584 }
5585 break;
5586
5587 case IEM_OP_PRF_REPZ:
5588 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5589 {
5590 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5591 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5592 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5593 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5594 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5595 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5596 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5597 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5599 }
5600 break;
5601
5602 default:
5603 return IEMOP_RAISE_INVALID_OPCODE();
5604 }
5605 }
5606}
5607
5608
5609/** Opcode 0x0f 0xaf. */
5610FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5611{
5612 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5613 IEMOP_HLP_MIN_386();
5614 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5615 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5616}
5617
5618
5619/** Opcode 0x0f 0xb0. */
5620FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5621{
5622 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5623 IEMOP_HLP_MIN_486();
5624 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5625
5626 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5627 {
5628 IEMOP_HLP_DONE_DECODING();
5629 IEM_MC_BEGIN(4, 0);
5630 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5631 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5632 IEM_MC_ARG(uint8_t, u8Src, 2);
5633 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5634
5635 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5636 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5637 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5638 IEM_MC_REF_EFLAGS(pEFlags);
5639 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5640 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5641 else
5642 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5643
5644 IEM_MC_ADVANCE_RIP();
5645 IEM_MC_END();
5646 }
5647 else
5648 {
5649 IEM_MC_BEGIN(4, 3);
5650 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5651 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5652 IEM_MC_ARG(uint8_t, u8Src, 2);
5653 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5655 IEM_MC_LOCAL(uint8_t, u8Al);
5656
5657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5658 IEMOP_HLP_DONE_DECODING();
5659 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5660 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5661 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5662 IEM_MC_FETCH_EFLAGS(EFlags);
5663 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5664 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5665 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5666 else
5667 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5668
5669 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5670 IEM_MC_COMMIT_EFLAGS(EFlags);
5671 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5672 IEM_MC_ADVANCE_RIP();
5673 IEM_MC_END();
5674 }
5675 return VINF_SUCCESS;
5676}
5677
5678/** Opcode 0x0f 0xb1. */
5679FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5680{
5681 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
5682 IEMOP_HLP_MIN_486();
5683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5684
5685 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5686 {
5687 IEMOP_HLP_DONE_DECODING();
5688 switch (pVCpu->iem.s.enmEffOpSize)
5689 {
5690 case IEMMODE_16BIT:
5691 IEM_MC_BEGIN(4, 0);
5692 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5693 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5694 IEM_MC_ARG(uint16_t, u16Src, 2);
5695 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5696
5697 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5698 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5699 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5700 IEM_MC_REF_EFLAGS(pEFlags);
5701 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5702 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5703 else
5704 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5705
5706 IEM_MC_ADVANCE_RIP();
5707 IEM_MC_END();
5708 return VINF_SUCCESS;
5709
5710 case IEMMODE_32BIT:
5711 IEM_MC_BEGIN(4, 0);
5712 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5713 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5714 IEM_MC_ARG(uint32_t, u32Src, 2);
5715 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5716
5717 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5718 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5719 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5720 IEM_MC_REF_EFLAGS(pEFlags);
5721 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5722 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5723 else
5724 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5725
5726 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5727 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5728 IEM_MC_ADVANCE_RIP();
5729 IEM_MC_END();
5730 return VINF_SUCCESS;
5731
5732 case IEMMODE_64BIT:
5733 IEM_MC_BEGIN(4, 0);
5734 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5735 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5736#ifdef RT_ARCH_X86
5737 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5738#else
5739 IEM_MC_ARG(uint64_t, u64Src, 2);
5740#endif
5741 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5742
5743 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5744 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5745 IEM_MC_REF_EFLAGS(pEFlags);
5746#ifdef RT_ARCH_X86
5747 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5748 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5749 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5750 else
5751 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5752#else
5753 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5754 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5755 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5756 else
5757 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5758#endif
5759
5760 IEM_MC_ADVANCE_RIP();
5761 IEM_MC_END();
5762 return VINF_SUCCESS;
5763
5764 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5765 }
5766 }
5767 else
5768 {
5769 switch (pVCpu->iem.s.enmEffOpSize)
5770 {
5771 case IEMMODE_16BIT:
5772 IEM_MC_BEGIN(4, 3);
5773 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5774 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5775 IEM_MC_ARG(uint16_t, u16Src, 2);
5776 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5778 IEM_MC_LOCAL(uint16_t, u16Ax);
5779
5780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5781 IEMOP_HLP_DONE_DECODING();
5782 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5783 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5784 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5785 IEM_MC_FETCH_EFLAGS(EFlags);
5786 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5787 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5788 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5789 else
5790 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5791
5792 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5793 IEM_MC_COMMIT_EFLAGS(EFlags);
5794 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5795 IEM_MC_ADVANCE_RIP();
5796 IEM_MC_END();
5797 return VINF_SUCCESS;
5798
5799 case IEMMODE_32BIT:
5800 IEM_MC_BEGIN(4, 3);
5801 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5802 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5803 IEM_MC_ARG(uint32_t, u32Src, 2);
5804 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5806 IEM_MC_LOCAL(uint32_t, u32Eax);
5807
5808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5809 IEMOP_HLP_DONE_DECODING();
5810 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5811 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5812 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5813 IEM_MC_FETCH_EFLAGS(EFlags);
5814 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5815 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5816 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5817 else
5818 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5819
5820 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5821 IEM_MC_COMMIT_EFLAGS(EFlags);
5822 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5823 IEM_MC_ADVANCE_RIP();
5824 IEM_MC_END();
5825 return VINF_SUCCESS;
5826
5827 case IEMMODE_64BIT:
5828 IEM_MC_BEGIN(4, 3);
5829 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5830 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5831#ifdef RT_ARCH_X86
5832 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5833#else
5834 IEM_MC_ARG(uint64_t, u64Src, 2);
5835#endif
5836 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5838 IEM_MC_LOCAL(uint64_t, u64Rax);
5839
5840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5841 IEMOP_HLP_DONE_DECODING();
5842 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5843 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5844 IEM_MC_FETCH_EFLAGS(EFlags);
5845 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5846#ifdef RT_ARCH_X86
5847 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5848 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5849 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5850 else
5851 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5852#else
5853 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5854 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5855 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5856 else
5857 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5858#endif
5859
5860 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5861 IEM_MC_COMMIT_EFLAGS(EFlags);
5862 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5863 IEM_MC_ADVANCE_RIP();
5864 IEM_MC_END();
5865 return VINF_SUCCESS;
5866
5867 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5868 }
5869 }
5870}
5871
5872
5873FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5874{
5875 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5876 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
5877
5878 switch (pVCpu->iem.s.enmEffOpSize)
5879 {
5880 case IEMMODE_16BIT:
5881 IEM_MC_BEGIN(5, 1);
5882 IEM_MC_ARG(uint16_t, uSel, 0);
5883 IEM_MC_ARG(uint16_t, offSeg, 1);
5884 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5885 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5886 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5887 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5890 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5891 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
5892 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5893 IEM_MC_END();
5894 return VINF_SUCCESS;
5895
5896 case IEMMODE_32BIT:
5897 IEM_MC_BEGIN(5, 1);
5898 IEM_MC_ARG(uint16_t, uSel, 0);
5899 IEM_MC_ARG(uint32_t, offSeg, 1);
5900 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5901 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5902 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5903 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5906 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5907 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
5908 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5909 IEM_MC_END();
5910 return VINF_SUCCESS;
5911
5912 case IEMMODE_64BIT:
5913 IEM_MC_BEGIN(5, 1);
5914 IEM_MC_ARG(uint16_t, uSel, 0);
5915 IEM_MC_ARG(uint64_t, offSeg, 1);
5916 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5917 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5918 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5919 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5922 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5923 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5924 else
5925 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5926 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
5927 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5928 IEM_MC_END();
5929 return VINF_SUCCESS;
5930
5931 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5932 }
5933}
5934
5935
5936/** Opcode 0x0f 0xb2. */
5937FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5938{
5939 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
5940 IEMOP_HLP_MIN_386();
5941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5942 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5943 return IEMOP_RAISE_INVALID_OPCODE();
5944 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5945}
5946
5947
5948/** Opcode 0x0f 0xb3. */
5949FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5950{
5951 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
5952 IEMOP_HLP_MIN_386();
5953 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5954}
5955
5956
5957/** Opcode 0x0f 0xb4. */
5958FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5959{
5960 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
5961 IEMOP_HLP_MIN_386();
5962 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5963 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5964 return IEMOP_RAISE_INVALID_OPCODE();
5965 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5966}
5967
5968
5969/** Opcode 0x0f 0xb5. */
5970FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5971{
5972 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
5973 IEMOP_HLP_MIN_386();
5974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5975 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5976 return IEMOP_RAISE_INVALID_OPCODE();
5977 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5978}
5979
5980
5981/** Opcode 0x0f 0xb6. */
5982FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5983{
5984 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
5985 IEMOP_HLP_MIN_386();
5986
5987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5988
5989 /*
5990 * If rm is denoting a register, no more instruction bytes.
5991 */
5992 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5993 {
5994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5995 switch (pVCpu->iem.s.enmEffOpSize)
5996 {
5997 case IEMMODE_16BIT:
5998 IEM_MC_BEGIN(0, 1);
5999 IEM_MC_LOCAL(uint16_t, u16Value);
6000 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6001 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6002 IEM_MC_ADVANCE_RIP();
6003 IEM_MC_END();
6004 return VINF_SUCCESS;
6005
6006 case IEMMODE_32BIT:
6007 IEM_MC_BEGIN(0, 1);
6008 IEM_MC_LOCAL(uint32_t, u32Value);
6009 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6010 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6011 IEM_MC_ADVANCE_RIP();
6012 IEM_MC_END();
6013 return VINF_SUCCESS;
6014
6015 case IEMMODE_64BIT:
6016 IEM_MC_BEGIN(0, 1);
6017 IEM_MC_LOCAL(uint64_t, u64Value);
6018 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6019 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6020 IEM_MC_ADVANCE_RIP();
6021 IEM_MC_END();
6022 return VINF_SUCCESS;
6023
6024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6025 }
6026 }
6027 else
6028 {
6029 /*
6030 * We're loading a register from memory.
6031 */
6032 switch (pVCpu->iem.s.enmEffOpSize)
6033 {
6034 case IEMMODE_16BIT:
6035 IEM_MC_BEGIN(0, 2);
6036 IEM_MC_LOCAL(uint16_t, u16Value);
6037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6040 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6041 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6042 IEM_MC_ADVANCE_RIP();
6043 IEM_MC_END();
6044 return VINF_SUCCESS;
6045
6046 case IEMMODE_32BIT:
6047 IEM_MC_BEGIN(0, 2);
6048 IEM_MC_LOCAL(uint32_t, u32Value);
6049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6052 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6053 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6054 IEM_MC_ADVANCE_RIP();
6055 IEM_MC_END();
6056 return VINF_SUCCESS;
6057
6058 case IEMMODE_64BIT:
6059 IEM_MC_BEGIN(0, 2);
6060 IEM_MC_LOCAL(uint64_t, u64Value);
6061 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6064 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6065 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6066 IEM_MC_ADVANCE_RIP();
6067 IEM_MC_END();
6068 return VINF_SUCCESS;
6069
6070 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6071 }
6072 }
6073}
6074
6075
6076/** Opcode 0x0f 0xb7. */
6077FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6078{
6079 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6080 IEMOP_HLP_MIN_386();
6081
6082 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6083
6084 /** @todo Not entirely sure how the operand size prefix is handled here,
6085 * assuming that it will be ignored. Would be nice to have a few
6086 * test for this. */
6087 /*
6088 * If rm is denoting a register, no more instruction bytes.
6089 */
6090 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6091 {
6092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6093 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6094 {
6095 IEM_MC_BEGIN(0, 1);
6096 IEM_MC_LOCAL(uint32_t, u32Value);
6097 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6098 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6099 IEM_MC_ADVANCE_RIP();
6100 IEM_MC_END();
6101 }
6102 else
6103 {
6104 IEM_MC_BEGIN(0, 1);
6105 IEM_MC_LOCAL(uint64_t, u64Value);
6106 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6107 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6108 IEM_MC_ADVANCE_RIP();
6109 IEM_MC_END();
6110 }
6111 }
6112 else
6113 {
6114 /*
6115 * We're loading a register from memory.
6116 */
6117 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6118 {
6119 IEM_MC_BEGIN(0, 2);
6120 IEM_MC_LOCAL(uint32_t, u32Value);
6121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6124 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6125 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6126 IEM_MC_ADVANCE_RIP();
6127 IEM_MC_END();
6128 }
6129 else
6130 {
6131 IEM_MC_BEGIN(0, 2);
6132 IEM_MC_LOCAL(uint64_t, u64Value);
6133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6136 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6137 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6138 IEM_MC_ADVANCE_RIP();
6139 IEM_MC_END();
6140 }
6141 }
6142 return VINF_SUCCESS;
6143}
6144
6145
6146/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6147FNIEMOP_UD_STUB(iemOp_jmpe);
6148/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6149FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6150
6151
6152/** Opcode 0x0f 0xb9. */
6153FNIEMOP_DEF(iemOp_Grp10)
6154{
6155 Log(("iemOp_Grp10 -> #UD\n"));
6156 return IEMOP_RAISE_INVALID_OPCODE();
6157}
6158
6159
6160/** Opcode 0x0f 0xba. */
6161FNIEMOP_DEF(iemOp_Grp8)
6162{
6163 IEMOP_HLP_MIN_386();
6164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6165 PCIEMOPBINSIZES pImpl;
6166 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6167 {
6168 case 0: case 1: case 2: case 3:
6169 return IEMOP_RAISE_INVALID_OPCODE();
6170 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6171 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6172 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6173 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6174 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6175 }
6176 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6177
6178 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6179 {
6180 /* register destination. */
6181 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6183
6184 switch (pVCpu->iem.s.enmEffOpSize)
6185 {
6186 case IEMMODE_16BIT:
6187 IEM_MC_BEGIN(3, 0);
6188 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6189 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6190 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6191
6192 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6193 IEM_MC_REF_EFLAGS(pEFlags);
6194 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6195
6196 IEM_MC_ADVANCE_RIP();
6197 IEM_MC_END();
6198 return VINF_SUCCESS;
6199
6200 case IEMMODE_32BIT:
6201 IEM_MC_BEGIN(3, 0);
6202 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6203 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6204 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6205
6206 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6207 IEM_MC_REF_EFLAGS(pEFlags);
6208 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6209
6210 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6211 IEM_MC_ADVANCE_RIP();
6212 IEM_MC_END();
6213 return VINF_SUCCESS;
6214
6215 case IEMMODE_64BIT:
6216 IEM_MC_BEGIN(3, 0);
6217 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6218 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6219 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6220
6221 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6222 IEM_MC_REF_EFLAGS(pEFlags);
6223 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6224
6225 IEM_MC_ADVANCE_RIP();
6226 IEM_MC_END();
6227 return VINF_SUCCESS;
6228
6229 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6230 }
6231 }
6232 else
6233 {
6234 /* memory destination. */
6235
6236 uint32_t fAccess;
6237 if (pImpl->pfnLockedU16)
6238 fAccess = IEM_ACCESS_DATA_RW;
6239 else /* BT */
6240 fAccess = IEM_ACCESS_DATA_R;
6241
6242 /** @todo test negative bit offsets! */
6243 switch (pVCpu->iem.s.enmEffOpSize)
6244 {
6245 case IEMMODE_16BIT:
6246 IEM_MC_BEGIN(3, 1);
6247 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6248 IEM_MC_ARG(uint16_t, u16Src, 1);
6249 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6251
6252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6253 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6254 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6255 if (pImpl->pfnLockedU16)
6256 IEMOP_HLP_DONE_DECODING();
6257 else
6258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6259 IEM_MC_FETCH_EFLAGS(EFlags);
6260 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6261 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6262 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6263 else
6264 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6265 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6266
6267 IEM_MC_COMMIT_EFLAGS(EFlags);
6268 IEM_MC_ADVANCE_RIP();
6269 IEM_MC_END();
6270 return VINF_SUCCESS;
6271
6272 case IEMMODE_32BIT:
6273 IEM_MC_BEGIN(3, 1);
6274 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6275 IEM_MC_ARG(uint32_t, u32Src, 1);
6276 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6278
6279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6280 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6281 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6282 if (pImpl->pfnLockedU16)
6283 IEMOP_HLP_DONE_DECODING();
6284 else
6285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6286 IEM_MC_FETCH_EFLAGS(EFlags);
6287 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6288 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6289 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6290 else
6291 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6292 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6293
6294 IEM_MC_COMMIT_EFLAGS(EFlags);
6295 IEM_MC_ADVANCE_RIP();
6296 IEM_MC_END();
6297 return VINF_SUCCESS;
6298
6299 case IEMMODE_64BIT:
6300 IEM_MC_BEGIN(3, 1);
6301 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6302 IEM_MC_ARG(uint64_t, u64Src, 1);
6303 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6305
6306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6307 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6308 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6309 if (pImpl->pfnLockedU16)
6310 IEMOP_HLP_DONE_DECODING();
6311 else
6312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6313 IEM_MC_FETCH_EFLAGS(EFlags);
6314 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6315 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6316 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6317 else
6318 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6319 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6320
6321 IEM_MC_COMMIT_EFLAGS(EFlags);
6322 IEM_MC_ADVANCE_RIP();
6323 IEM_MC_END();
6324 return VINF_SUCCESS;
6325
6326 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6327 }
6328 }
6329
6330}
6331
6332
6333/** Opcode 0x0f 0xbb. */
6334FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6335{
6336 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6337 IEMOP_HLP_MIN_386();
6338 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6339}
6340
6341
6342/** Opcode 0x0f 0xbc. */
6343FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6344{
6345 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6346 IEMOP_HLP_MIN_386();
6347 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6348 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6349}
6350
6351
6352/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6353FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6354
6355
6356/** Opcode 0x0f 0xbd. */
6357FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6358{
6359 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6360 IEMOP_HLP_MIN_386();
6361 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6362 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6363}
6364
6365
6366/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6367FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6368
6369
6370/** Opcode 0x0f 0xbe. */
6371FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6372{
6373 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6374 IEMOP_HLP_MIN_386();
6375
6376 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6377
6378 /*
6379 * If rm is denoting a register, no more instruction bytes.
6380 */
6381 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6382 {
6383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6384 switch (pVCpu->iem.s.enmEffOpSize)
6385 {
6386 case IEMMODE_16BIT:
6387 IEM_MC_BEGIN(0, 1);
6388 IEM_MC_LOCAL(uint16_t, u16Value);
6389 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6390 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6391 IEM_MC_ADVANCE_RIP();
6392 IEM_MC_END();
6393 return VINF_SUCCESS;
6394
6395 case IEMMODE_32BIT:
6396 IEM_MC_BEGIN(0, 1);
6397 IEM_MC_LOCAL(uint32_t, u32Value);
6398 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6399 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6400 IEM_MC_ADVANCE_RIP();
6401 IEM_MC_END();
6402 return VINF_SUCCESS;
6403
6404 case IEMMODE_64BIT:
6405 IEM_MC_BEGIN(0, 1);
6406 IEM_MC_LOCAL(uint64_t, u64Value);
6407 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6408 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6409 IEM_MC_ADVANCE_RIP();
6410 IEM_MC_END();
6411 return VINF_SUCCESS;
6412
6413 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6414 }
6415 }
6416 else
6417 {
6418 /*
6419 * We're loading a register from memory.
6420 */
6421 switch (pVCpu->iem.s.enmEffOpSize)
6422 {
6423 case IEMMODE_16BIT:
6424 IEM_MC_BEGIN(0, 2);
6425 IEM_MC_LOCAL(uint16_t, u16Value);
6426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6429 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6430 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6431 IEM_MC_ADVANCE_RIP();
6432 IEM_MC_END();
6433 return VINF_SUCCESS;
6434
6435 case IEMMODE_32BIT:
6436 IEM_MC_BEGIN(0, 2);
6437 IEM_MC_LOCAL(uint32_t, u32Value);
6438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6441 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6442 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6443 IEM_MC_ADVANCE_RIP();
6444 IEM_MC_END();
6445 return VINF_SUCCESS;
6446
6447 case IEMMODE_64BIT:
6448 IEM_MC_BEGIN(0, 2);
6449 IEM_MC_LOCAL(uint64_t, u64Value);
6450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6453 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6454 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6455 IEM_MC_ADVANCE_RIP();
6456 IEM_MC_END();
6457 return VINF_SUCCESS;
6458
6459 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6460 }
6461 }
6462}
6463
6464
6465/** Opcode 0x0f 0xbf. */
6466FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6467{
6468 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6469 IEMOP_HLP_MIN_386();
6470
6471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6472
6473 /** @todo Not entirely sure how the operand size prefix is handled here,
6474 * assuming that it will be ignored. Would be nice to have a few
6475 * test for this. */
6476 /*
6477 * If rm is denoting a register, no more instruction bytes.
6478 */
6479 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6480 {
6481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6482 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6483 {
6484 IEM_MC_BEGIN(0, 1);
6485 IEM_MC_LOCAL(uint32_t, u32Value);
6486 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6487 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6488 IEM_MC_ADVANCE_RIP();
6489 IEM_MC_END();
6490 }
6491 else
6492 {
6493 IEM_MC_BEGIN(0, 1);
6494 IEM_MC_LOCAL(uint64_t, u64Value);
6495 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6496 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6497 IEM_MC_ADVANCE_RIP();
6498 IEM_MC_END();
6499 }
6500 }
6501 else
6502 {
6503 /*
6504 * We're loading a register from memory.
6505 */
6506 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6507 {
6508 IEM_MC_BEGIN(0, 2);
6509 IEM_MC_LOCAL(uint32_t, u32Value);
6510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6513 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6514 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6515 IEM_MC_ADVANCE_RIP();
6516 IEM_MC_END();
6517 }
6518 else
6519 {
6520 IEM_MC_BEGIN(0, 2);
6521 IEM_MC_LOCAL(uint64_t, u64Value);
6522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6523 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6525 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6526 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6527 IEM_MC_ADVANCE_RIP();
6528 IEM_MC_END();
6529 }
6530 }
6531 return VINF_SUCCESS;
6532}
6533
6534
6535/** Opcode 0x0f 0xc0. */
6536FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6537{
6538 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6539 IEMOP_HLP_MIN_486();
6540 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6541
6542 /*
6543 * If rm is denoting a register, no more instruction bytes.
6544 */
6545 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6546 {
6547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6548
6549 IEM_MC_BEGIN(3, 0);
6550 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6551 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6552 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6553
6554 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6555 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6556 IEM_MC_REF_EFLAGS(pEFlags);
6557 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6558
6559 IEM_MC_ADVANCE_RIP();
6560 IEM_MC_END();
6561 }
6562 else
6563 {
6564 /*
6565 * We're accessing memory.
6566 */
6567 IEM_MC_BEGIN(3, 3);
6568 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6569 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6570 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6571 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6573
6574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6575 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6576 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6577 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6578 IEM_MC_FETCH_EFLAGS(EFlags);
6579 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6580 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6581 else
6582 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6583
6584 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6585 IEM_MC_COMMIT_EFLAGS(EFlags);
6586 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6587 IEM_MC_ADVANCE_RIP();
6588 IEM_MC_END();
6589 return VINF_SUCCESS;
6590 }
6591 return VINF_SUCCESS;
6592}
6593
6594
6595/** Opcode 0x0f 0xc1. */
6596FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6597{
6598 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6599 IEMOP_HLP_MIN_486();
6600 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6601
6602 /*
6603 * If rm is denoting a register, no more instruction bytes.
6604 */
6605 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6606 {
6607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6608
6609 switch (pVCpu->iem.s.enmEffOpSize)
6610 {
6611 case IEMMODE_16BIT:
6612 IEM_MC_BEGIN(3, 0);
6613 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6614 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6615 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6616
6617 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6618 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6619 IEM_MC_REF_EFLAGS(pEFlags);
6620 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6621
6622 IEM_MC_ADVANCE_RIP();
6623 IEM_MC_END();
6624 return VINF_SUCCESS;
6625
6626 case IEMMODE_32BIT:
6627 IEM_MC_BEGIN(3, 0);
6628 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6629 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6630 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6631
6632 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6633 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6634 IEM_MC_REF_EFLAGS(pEFlags);
6635 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6636
6637 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6638 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6639 IEM_MC_ADVANCE_RIP();
6640 IEM_MC_END();
6641 return VINF_SUCCESS;
6642
6643 case IEMMODE_64BIT:
6644 IEM_MC_BEGIN(3, 0);
6645 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6646 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6647 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6648
6649 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6650 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6651 IEM_MC_REF_EFLAGS(pEFlags);
6652 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6653
6654 IEM_MC_ADVANCE_RIP();
6655 IEM_MC_END();
6656 return VINF_SUCCESS;
6657
6658 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6659 }
6660 }
6661 else
6662 {
6663 /*
6664 * We're accessing memory.
6665 */
6666 switch (pVCpu->iem.s.enmEffOpSize)
6667 {
6668 case IEMMODE_16BIT:
6669 IEM_MC_BEGIN(3, 3);
6670 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6671 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6672 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6673 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6675
6676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6677 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6678 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6679 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6680 IEM_MC_FETCH_EFLAGS(EFlags);
6681 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6682 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6683 else
6684 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6685
6686 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6687 IEM_MC_COMMIT_EFLAGS(EFlags);
6688 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
6689 IEM_MC_ADVANCE_RIP();
6690 IEM_MC_END();
6691 return VINF_SUCCESS;
6692
6693 case IEMMODE_32BIT:
6694 IEM_MC_BEGIN(3, 3);
6695 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6696 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6697 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6698 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6700
6701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6702 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6703 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6704 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6705 IEM_MC_FETCH_EFLAGS(EFlags);
6706 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6707 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6708 else
6709 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6710
6711 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6712 IEM_MC_COMMIT_EFLAGS(EFlags);
6713 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
6714 IEM_MC_ADVANCE_RIP();
6715 IEM_MC_END();
6716 return VINF_SUCCESS;
6717
6718 case IEMMODE_64BIT:
6719 IEM_MC_BEGIN(3, 3);
6720 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6721 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6722 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6723 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6725
6726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6727 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6728 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6729 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6730 IEM_MC_FETCH_EFLAGS(EFlags);
6731 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6732 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6733 else
6734 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6735
6736 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6737 IEM_MC_COMMIT_EFLAGS(EFlags);
6738 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
6739 IEM_MC_ADVANCE_RIP();
6740 IEM_MC_END();
6741 return VINF_SUCCESS;
6742
6743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6744 }
6745 }
6746}
6747
6748
6749/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
6750FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
6751/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
6752FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
6753/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
6754FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
6755/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
6756FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
6757
6758
6759/** Opcode 0x0f 0xc3. */
6760FNIEMOP_DEF(iemOp_movnti_My_Gy)
6761{
6762 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
6763
6764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6765
6766 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6767 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6768 {
6769 switch (pVCpu->iem.s.enmEffOpSize)
6770 {
6771 case IEMMODE_32BIT:
6772 IEM_MC_BEGIN(0, 2);
6773 IEM_MC_LOCAL(uint32_t, u32Value);
6774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6775
6776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6778 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6779 return IEMOP_RAISE_INVALID_OPCODE();
6780
6781 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6782 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6783 IEM_MC_ADVANCE_RIP();
6784 IEM_MC_END();
6785 break;
6786
6787 case IEMMODE_64BIT:
6788 IEM_MC_BEGIN(0, 2);
6789 IEM_MC_LOCAL(uint64_t, u64Value);
6790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6791
6792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6794 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6795 return IEMOP_RAISE_INVALID_OPCODE();
6796
6797 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6798 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
6799 IEM_MC_ADVANCE_RIP();
6800 IEM_MC_END();
6801 break;
6802
6803 case IEMMODE_16BIT:
6804 /** @todo check this form. */
6805 return IEMOP_RAISE_INVALID_OPCODE();
6806 }
6807 }
6808 else
6809 return IEMOP_RAISE_INVALID_OPCODE();
6810 return VINF_SUCCESS;
6811}
6812/* Opcode 0x66 0x0f 0xc3 - invalid */
6813/* Opcode 0xf3 0x0f 0xc3 - invalid */
6814/* Opcode 0xf2 0x0f 0xc3 - invalid */
6815
6816/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
6817FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
6818/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
6819FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
6820/* Opcode 0xf3 0x0f 0xc4 - invalid */
6821/* Opcode 0xf2 0x0f 0xc4 - invalid */
6822
6823/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
6824FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
6825/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
6826FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
6827/* Opcode 0xf3 0x0f 0xc5 - invalid */
6828/* Opcode 0xf2 0x0f 0xc5 - invalid */
6829
6830/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
6831FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
6832/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
6833FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
6834/* Opcode 0xf3 0x0f 0xc6 - invalid */
6835/* Opcode 0xf2 0x0f 0xc6 - invalid */
6836
6837
6838/** Opcode 0x0f 0xc7 !11/1. */
6839FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6840{
6841 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
6842
6843 IEM_MC_BEGIN(4, 3);
6844 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6845 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6846 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6847 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6848 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6849 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6851
6852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6853 IEMOP_HLP_DONE_DECODING();
6854 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6855
6856 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6857 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6858 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6859
6860 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6861 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6862 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6863
6864 IEM_MC_FETCH_EFLAGS(EFlags);
6865 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6866 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6867 else
6868 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6869
6870 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6871 IEM_MC_COMMIT_EFLAGS(EFlags);
6872 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6873 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6874 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6875 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6876 IEM_MC_ENDIF();
6877 IEM_MC_ADVANCE_RIP();
6878
6879 IEM_MC_END();
6880 return VINF_SUCCESS;
6881}
6882
6883
6884/** Opcode REX.W 0x0f 0xc7 !11/1. */
6885FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
6886{
6887 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
6888 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6889 {
6890#if 0
6891 RT_NOREF(bRm);
6892 IEMOP_BITCH_ABOUT_STUB();
6893 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6894#else
6895 IEM_MC_BEGIN(4, 3);
6896 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
6897 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
6898 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
6899 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6900 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
6901 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
6902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6903
6904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6905 IEMOP_HLP_DONE_DECODING();
6906 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
6907 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6908
6909 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
6910 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
6911 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
6912
6913 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
6914 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
6915 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
6916
6917 IEM_MC_FETCH_EFLAGS(EFlags);
6918# ifdef RT_ARCH_AMD64
6919 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6920 {
6921 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6922 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6923 else
6924 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6925 }
6926 else
6927# endif
6928 {
6929 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
6930 accesses and not all all atomic, which works fine on in UNI CPU guest
6931 configuration (ignoring DMA). If guest SMP is active we have no choice
6932 but to use a rendezvous callback here. Sigh. */
6933 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
6934 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6935 else
6936 {
6937 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6938 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
6939 }
6940 }
6941
6942 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
6943 IEM_MC_COMMIT_EFLAGS(EFlags);
6944 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6945 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
6946 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
6947 IEM_MC_ENDIF();
6948 IEM_MC_ADVANCE_RIP();
6949
6950 IEM_MC_END();
6951 return VINF_SUCCESS;
6952#endif
6953 }
6954 Log(("cmpxchg16b -> #UD\n"));
6955 return IEMOP_RAISE_INVALID_OPCODE();
6956}
6957
6958
6959/** Opcode 0x0f 0xc7 11/6. */
6960FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6961
6962/** Opcode 0x0f 0xc7 !11/6. */
6963FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6964
6965/** Opcode 0x66 0x0f 0xc7 !11/6. */
6966FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6967
6968/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6969FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6970
6971/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6972FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6973
6974
6975/** Opcode 0x0f 0xc7. */
6976FNIEMOP_DEF(iemOp_Grp9)
6977{
6978 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6979 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6980 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6981 {
6982 case 0: case 2: case 3: case 4: case 5:
6983 return IEMOP_RAISE_INVALID_OPCODE();
6984 case 1:
6985 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6986 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6987 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6988 return IEMOP_RAISE_INVALID_OPCODE();
6989 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6990 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6991 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6992 case 6:
6993 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6994 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6995 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6996 {
6997 case 0:
6998 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6999 case IEM_OP_PRF_SIZE_OP:
7000 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
7001 case IEM_OP_PRF_REPZ:
7002 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
7003 default:
7004 return IEMOP_RAISE_INVALID_OPCODE();
7005 }
7006 case 7:
7007 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7008 {
7009 case 0:
7010 case IEM_OP_PRF_REPZ:
7011 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
7012 default:
7013 return IEMOP_RAISE_INVALID_OPCODE();
7014 }
7015 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7016 }
7017}
7018
7019
7020/**
7021 * Common 'bswap register' helper.
7022 */
7023FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7024{
7025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7026 switch (pVCpu->iem.s.enmEffOpSize)
7027 {
7028 case IEMMODE_16BIT:
7029 IEM_MC_BEGIN(1, 0);
7030 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7031 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7032 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7033 IEM_MC_ADVANCE_RIP();
7034 IEM_MC_END();
7035 return VINF_SUCCESS;
7036
7037 case IEMMODE_32BIT:
7038 IEM_MC_BEGIN(1, 0);
7039 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7040 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7041 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7042 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7043 IEM_MC_ADVANCE_RIP();
7044 IEM_MC_END();
7045 return VINF_SUCCESS;
7046
7047 case IEMMODE_64BIT:
7048 IEM_MC_BEGIN(1, 0);
7049 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7050 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7051 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7052 IEM_MC_ADVANCE_RIP();
7053 IEM_MC_END();
7054 return VINF_SUCCESS;
7055
7056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7057 }
7058}
7059
7060
7061/** Opcode 0x0f 0xc8. */
7062FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7063{
7064 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7065 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7066 prefix. REX.B is the correct prefix it appears. For a parallel
7067 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7068 IEMOP_HLP_MIN_486();
7069 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7070}
7071
7072
7073/** Opcode 0x0f 0xc9. */
7074FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7075{
7076 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7077 IEMOP_HLP_MIN_486();
7078 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7079}
7080
7081
7082/** Opcode 0x0f 0xca. */
7083FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7084{
7085 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7086 IEMOP_HLP_MIN_486();
7087 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7088}
7089
7090
7091/** Opcode 0x0f 0xcb. */
7092FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7093{
7094 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7095 IEMOP_HLP_MIN_486();
7096 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7097}
7098
7099
7100/** Opcode 0x0f 0xcc. */
7101FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7102{
7103 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7104 IEMOP_HLP_MIN_486();
7105 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7106}
7107
7108
7109/** Opcode 0x0f 0xcd. */
7110FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7111{
7112 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7113 IEMOP_HLP_MIN_486();
7114 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7115}
7116
7117
7118/** Opcode 0x0f 0xce. */
7119FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7120{
7121 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7122 IEMOP_HLP_MIN_486();
7123 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7124}
7125
7126
7127/** Opcode 0x0f 0xcf. */
7128FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7129{
7130 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7131 IEMOP_HLP_MIN_486();
7132 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7133}
7134
7135
7136/* Opcode 0x0f 0xd0 - invalid */
7137/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7138FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7139/* Opcode 0xf3 0x0f 0xd0 - invalid */
7140/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7141FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7142
7143/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7144FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7145/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7146FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7147/* Opcode 0xf3 0x0f 0xd1 - invalid */
7148/* Opcode 0xf2 0x0f 0xd1 - invalid */
7149
7150/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7151FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7152/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7153FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7154/* Opcode 0xf3 0x0f 0xd2 - invalid */
7155/* Opcode 0xf2 0x0f 0xd2 - invalid */
7156
7157/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7158FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7159/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7160FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7161/* Opcode 0xf3 0x0f 0xd3 - invalid */
7162/* Opcode 0xf2 0x0f 0xd3 - invalid */
7163
7164/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7165FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7166/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7167FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7168/* Opcode 0xf3 0x0f 0xd4 - invalid */
7169/* Opcode 0xf2 0x0f 0xd4 - invalid */
7170
7171/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7172FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7173/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7174FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7175/* Opcode 0xf3 0x0f 0xd5 - invalid */
7176/* Opcode 0xf2 0x0f 0xd5 - invalid */
7177
7178/* Opcode 0x0f 0xd6 - invalid */
7179/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7180FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7181/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7182FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7183/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7184FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7185#if 0
7186FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7187{
7188 /* Docs says register only. */
7189 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7190
7191 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7192 {
7193 case IEM_OP_PRF_SIZE_OP: /* SSE */
7194 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7195 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7196 IEM_MC_BEGIN(2, 0);
7197 IEM_MC_ARG(uint64_t *, pDst, 0);
7198 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7199 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7200 IEM_MC_PREPARE_SSE_USAGE();
7201 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7202 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7203 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7204 IEM_MC_ADVANCE_RIP();
7205 IEM_MC_END();
7206 return VINF_SUCCESS;
7207
7208 case 0: /* MMX */
7209 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7210 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7211 IEM_MC_BEGIN(2, 0);
7212 IEM_MC_ARG(uint64_t *, pDst, 0);
7213 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7214 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7215 IEM_MC_PREPARE_FPU_USAGE();
7216 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7217 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7218 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7219 IEM_MC_ADVANCE_RIP();
7220 IEM_MC_END();
7221 return VINF_SUCCESS;
7222
7223 default:
7224 return IEMOP_RAISE_INVALID_OPCODE();
7225 }
7226}
7227#endif
7228
7229
7230/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7231FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7232{
7233 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7234 /** @todo testcase: Check that the instruction implicitly clears the high
7235 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7236 * and opcode modifications are made to work with the whole width (not
7237 * just 128). */
7238 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
7239 /* Docs says register only. */
7240 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7241 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7242 {
7243 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7244 IEM_MC_BEGIN(2, 0);
7245 IEM_MC_ARG(uint64_t *, pDst, 0);
7246 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7247 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7248 IEM_MC_PREPARE_FPU_USAGE();
7249 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7250 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7251 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7252 IEM_MC_ADVANCE_RIP();
7253 IEM_MC_END();
7254 return VINF_SUCCESS;
7255 }
7256 return IEMOP_RAISE_INVALID_OPCODE();
7257}
7258
7259/** Opcode 0x66 0x0f 0xd7 - */
7260FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
7261{
7262 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7263 /** @todo testcase: Check that the instruction implicitly clears the high
7264 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7265 * and opcode modifications are made to work with the whole width (not
7266 * just 128). */
7267 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
7268 /* Docs says register only. */
7269 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7270 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7271 {
7272 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7273 IEM_MC_BEGIN(2, 0);
7274 IEM_MC_ARG(uint64_t *, pDst, 0);
7275 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7276 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7277 IEM_MC_PREPARE_SSE_USAGE();
7278 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7279 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7280 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7281 IEM_MC_ADVANCE_RIP();
7282 IEM_MC_END();
7283 return VINF_SUCCESS;
7284 }
7285 return IEMOP_RAISE_INVALID_OPCODE();
7286}
7287
7288/* Opcode 0xf3 0x0f 0xd7 - invalid */
7289/* Opcode 0xf2 0x0f 0xd7 - invalid */
7290
7291
7292/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7293FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7294/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7295FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7296/* Opcode 0xf3 0x0f 0xd8 - invalid */
7297/* Opcode 0xf2 0x0f 0xd8 - invalid */
7298
7299/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7300FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7301/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7302FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7303/* Opcode 0xf3 0x0f 0xd9 - invalid */
7304/* Opcode 0xf2 0x0f 0xd9 - invalid */
7305
7306/** Opcode 0x0f 0xda - pminub Pq, Qq */
7307FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7308/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7309FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7310/* Opcode 0xf3 0x0f 0xda - invalid */
7311/* Opcode 0xf2 0x0f 0xda - invalid */
7312
7313/** Opcode 0x0f 0xdb - pand Pq, Qq */
7314FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7315/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7316FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7317/* Opcode 0xf3 0x0f 0xdb - invalid */
7318/* Opcode 0xf2 0x0f 0xdb - invalid */
7319
7320/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7321FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7322/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7323FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7324/* Opcode 0xf3 0x0f 0xdc - invalid */
7325/* Opcode 0xf2 0x0f 0xdc - invalid */
7326
7327/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7328FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7329/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7330FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7331/* Opcode 0xf3 0x0f 0xdd - invalid */
7332/* Opcode 0xf2 0x0f 0xdd - invalid */
7333
7334/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7335FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7336/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7337FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7338/* Opcode 0xf3 0x0f 0xde - invalid */
7339/* Opcode 0xf2 0x0f 0xde - invalid */
7340
7341/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7342FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7343/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7344FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7345/* Opcode 0xf3 0x0f 0xdf - invalid */
7346/* Opcode 0xf2 0x0f 0xdf - invalid */
7347
7348/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7349FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7350/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7351FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7352/* Opcode 0xf3 0x0f 0xe0 - invalid */
7353/* Opcode 0xf2 0x0f 0xe0 - invalid */
7354
7355/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7356FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7357/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7358FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7359/* Opcode 0xf3 0x0f 0xe1 - invalid */
7360/* Opcode 0xf2 0x0f 0xe1 - invalid */
7361
7362/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7363FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7364/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7365FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7366/* Opcode 0xf3 0x0f 0xe2 - invalid */
7367/* Opcode 0xf2 0x0f 0xe2 - invalid */
7368
7369/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7370FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7371/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7372FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7373/* Opcode 0xf3 0x0f 0xe3 - invalid */
7374/* Opcode 0xf2 0x0f 0xe3 - invalid */
7375
7376/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7377FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7378/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7379FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7380/* Opcode 0xf3 0x0f 0xe4 - invalid */
7381/* Opcode 0xf2 0x0f 0xe4 - invalid */
7382
7383/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7384FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7385/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7386FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7387/* Opcode 0xf3 0x0f 0xe5 - invalid */
7388/* Opcode 0xf2 0x0f 0xe5 - invalid */
7389
7390/* Opcode 0x0f 0xe6 - invalid */
7391/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7392FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7393/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7394FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7395/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7396FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7397
7398
7399/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
7400FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
7401{
7402 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7403 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7404 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7405 {
7406 /* Register, memory. */
7407 IEM_MC_BEGIN(0, 2);
7408 IEM_MC_LOCAL(uint64_t, uSrc);
7409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7410
7411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7413 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7414 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7415
7416 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7417 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7418
7419 IEM_MC_ADVANCE_RIP();
7420 IEM_MC_END();
7421 return VINF_SUCCESS;
7422 }
7423 /* The register, register encoding is invalid. */
7424 return IEMOP_RAISE_INVALID_OPCODE();
7425}
7426
7427/** Opcode 0x66 0x0f 0xe7 - vmovntdq Mx, Vx */
7428FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
7429{
7430 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7431 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7432 {
7433 /* Register, memory. */
7434 IEMOP_MNEMONIC(vmovntdq_Mx_Vx, "vmovntdq Mx,Vx");
7435 IEM_MC_BEGIN(0, 2);
7436 IEM_MC_LOCAL(uint128_t, uSrc);
7437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7438
7439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7441 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7442 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7443
7444 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7445 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7446
7447 IEM_MC_ADVANCE_RIP();
7448 IEM_MC_END();
7449 return VINF_SUCCESS;
7450 }
7451
7452 /* The register, register encoding is invalid. */
7453 return IEMOP_RAISE_INVALID_OPCODE();
7454}
7455
7456/* Opcode 0xf3 0x0f 0xe7 - invalid */
7457/* Opcode 0xf2 0x0f 0xe7 - invalid */
7458
7459
7460/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7461FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7462/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7463FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7464/* Opcode 0xf3 0x0f 0xe8 - invalid */
7465/* Opcode 0xf2 0x0f 0xe8 - invalid */
7466
7467/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7468FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7469/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7470FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7471/* Opcode 0xf3 0x0f 0xe9 - invalid */
7472/* Opcode 0xf2 0x0f 0xe9 - invalid */
7473
7474/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7475FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7476/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7477FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7478/* Opcode 0xf3 0x0f 0xea - invalid */
7479/* Opcode 0xf2 0x0f 0xea - invalid */
7480
7481/** Opcode 0x0f 0xeb - por Pq, Qq */
7482FNIEMOP_STUB(iemOp_por_Pq_Qq);
7483/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7484FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7485/* Opcode 0xf3 0x0f 0xeb - invalid */
7486/* Opcode 0xf2 0x0f 0xeb - invalid */
7487
7488/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7489FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7490/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7491FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7492/* Opcode 0xf3 0x0f 0xec - invalid */
7493/* Opcode 0xf2 0x0f 0xec - invalid */
7494
7495/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7496FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7497/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7498FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7499/* Opcode 0xf3 0x0f 0xed - invalid */
7500/* Opcode 0xf2 0x0f 0xed - invalid */
7501
7502/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7503FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7504/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7505FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7506/* Opcode 0xf3 0x0f 0xee - invalid */
7507/* Opcode 0xf2 0x0f 0xee - invalid */
7508
7509
7510/** Opcode 0x0f 0xef - pxor Pq, Qq */
7511FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
7512{
7513 IEMOP_MNEMONIC(pxor, "pxor");
7514 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
7515}
7516
7517/** Opcode 0x66 0x0f 0xef - vpxor Vx, Hx, Wx */
7518FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
7519{
7520 IEMOP_MNEMONIC(vpxor, "vpxor");
7521 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7522}
7523
7524/* Opcode 0xf3 0x0f 0xef - invalid */
7525/* Opcode 0xf2 0x0f 0xef - invalid */
7526
7527/* Opcode 0x0f 0xf0 - invalid */
7528/* Opcode 0x66 0x0f 0xf0 - invalid */
7529/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7530FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7531
7532/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7533FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7534/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7535FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7536/* Opcode 0xf2 0x0f 0xf1 - invalid */
7537
7538/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7539FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7540/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7541FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7542/* Opcode 0xf2 0x0f 0xf2 - invalid */
7543
7544/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7545FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7546/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7547FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7548/* Opcode 0xf2 0x0f 0xf3 - invalid */
7549
7550/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7551FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7552/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7553FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7554/* Opcode 0xf2 0x0f 0xf4 - invalid */
7555
7556/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7557FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7558/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7559FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7560/* Opcode 0xf2 0x0f 0xf5 - invalid */
7561
7562/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7563FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7564/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7565FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7566/* Opcode 0xf2 0x0f 0xf6 - invalid */
7567
7568/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7569FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7570/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7571FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7572/* Opcode 0xf2 0x0f 0xf7 - invalid */
7573
7574/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7575FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7576/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7577FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7578/* Opcode 0xf2 0x0f 0xf8 - invalid */
7579
7580/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7581FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7582/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7583FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7584/* Opcode 0xf2 0x0f 0xf9 - invalid */
7585
7586/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7587FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7588/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7589FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7590/* Opcode 0xf2 0x0f 0xfa - invalid */
7591
7592/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7593FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7594/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7595FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7596/* Opcode 0xf2 0x0f 0xfb - invalid */
7597
7598/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7599FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7600/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7601FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7602/* Opcode 0xf2 0x0f 0xfc - invalid */
7603
7604/** Opcode 0x0f 0xfd - paddw Pq, Qq */
7605FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
7606/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
7607FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
7608/* Opcode 0xf2 0x0f 0xfd - invalid */
7609
7610/** Opcode 0x0f 0xfe - paddd Pq, Qq */
7611FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
7612/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
7613FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
7614/* Opcode 0xf2 0x0f 0xfe - invalid */
7615
7616
7617/** Opcode **** 0x0f 0xff - UD0 */
7618FNIEMOP_DEF(iemOp_ud0)
7619{
7620 IEMOP_MNEMONIC(ud0, "ud0");
7621 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7622 {
7623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7624#ifndef TST_IEM_CHECK_MC
7625 RTGCPTR GCPtrEff;
7626 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
7627 if (rcStrict != VINF_SUCCESS)
7628 return rcStrict;
7629#endif
7630 IEMOP_HLP_DONE_DECODING();
7631 }
7632 return IEMOP_RAISE_INVALID_OPCODE();
7633}
7634
7635
7636
7637/** Repeats a_fn four times. For decoding tables. */
7638#define IEMOP_X4(a_fn) a_fn, a_fn, a_fn, a_fn
7639
7640/**
7641 * Two byte opcode map, first byte 0x0f.
7642 *
7643 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
7644 * check if it needs updating as well when making changes.
7645 */
7646IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
7647{
7648 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7649 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
7650 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
7651 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
7652 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
7653 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
7654 /* 0x05 */ IEMOP_X4(iemOp_syscall),
7655 /* 0x06 */ IEMOP_X4(iemOp_clts),
7656 /* 0x07 */ IEMOP_X4(iemOp_sysret),
7657 /* 0x08 */ IEMOP_X4(iemOp_invd),
7658 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
7659 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
7660 /* 0x0b */ IEMOP_X4(iemOp_ud2),
7661 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
7662 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
7663 /* 0x0e */ IEMOP_X4(iemOp_femms),
7664 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
7665
7666 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7667 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7668 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7669 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7670 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7671 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7672 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7673 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7674 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
7675 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
7676 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
7677 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
7678 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
7679 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
7680 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
7681 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
7682
7683 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
7684 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
7685 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
7686 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
7687 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
7688 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7689 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
7690 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7691 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7692 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7693 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7694 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7695 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7696 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7697 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7698 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7699
7700 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
7701 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
7702 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
7703 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
7704 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
7705 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
7706 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
7707 /* 0x37 */ IEMOP_X4(iemOp_getsec),
7708 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
7709 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7710 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
7711 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7712 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7713 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7714 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7715 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7716
7717 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
7718 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
7719 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
7720 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
7721 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
7722 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
7723 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
7724 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
7725 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
7726 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
7727 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
7728 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
7729 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
7730 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
7731 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
7732 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
7733
7734 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7735 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
7736 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7737 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7738 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7739 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7740 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7741 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7742 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
7743 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
7744 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
7745 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
7746 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
7747 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
7748 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
7749 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
7750
7751 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7752 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7753 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7754 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7755 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7756 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7757 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7758 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7759 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7760 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7761 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7762 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7763 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7764 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7765 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7766 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
7767
7768 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
7769 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
7770 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
7771 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
7772 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7773 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7774 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7775 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7776
7777 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7778 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7779 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7780 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7781 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
7782 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
7783 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
7784 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
7785
7786 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
7787 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
7788 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
7789 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
7790 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
7791 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
7792 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
7793 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
7794 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
7795 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
7796 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
7797 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
7798 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
7799 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
7800 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
7801 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
7802
7803 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
7804 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
7805 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
7806 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
7807 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
7808 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
7809 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
7810 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
7811 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
7812 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
7813 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
7814 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
7815 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
7816 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
7817 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
7818 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
7819
7820 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
7821 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
7822 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
7823 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
7824 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
7825 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
7826 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
7827 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7828 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
7829 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
7830 /* 0xaa */ IEMOP_X4(iemOp_rsm),
7831 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
7832 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
7833 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
7834 /* 0xae */ IEMOP_X4(iemOp_Grp15),
7835 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
7836
7837 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
7838 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
7839 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
7840 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
7841 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
7842 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
7843 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
7844 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
7845 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
7846 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
7847 /* 0xba */ IEMOP_X4(iemOp_Grp8),
7848 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
7849 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
7850 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
7851 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
7852 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
7853
7854 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
7855 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
7856 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
7857 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7858 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7859 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7860 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
7861 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
7862 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
7863 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
7864 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
7865 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
7866 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
7867 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
7868 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
7869 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
7870
7871 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
7872 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7873 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7874 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7875 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7876 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7877 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
7878 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7879 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7880 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7881 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7882 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7883 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7884 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7885 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7886 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7887
7888 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7889 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7890 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7891 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7892 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7893 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7894 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
7895 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7896 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7897 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7898 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7899 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7900 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7901 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7902 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7903 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7904
7905 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
7906 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7907 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7908 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7909 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7910 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7911 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7912 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7913 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7914 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7915 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7916 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7917 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7918 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7919 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7920 /* 0xff */ IEMOP_X4(iemOp_ud0),
7921};
7922AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
7923
7924
7925/**
7926 * VEX opcode map \#1.
7927 *
7928 * @remarks This is (currently) a subset of g_apfnTwoByteMap, so please check if
7929 * it it needs updating too when making changes.
7930 */
7931IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
7932{
7933 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7934 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
7935 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
7936 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
7937 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
7938 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
7939 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
7940 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
7941 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
7942 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
7943 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
7944 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
7945 /* 0x0b */ IEMOP_X4(iemOp_InvalidNeedRM),
7946 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
7947 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
7948 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
7949 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
7950
7951 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7952 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7953 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7954 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7955 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7956 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7957 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7958 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7959 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
7960 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
7961 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
7962 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
7963 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
7964 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
7965 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
7966 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
7967
7968 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
7969 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
7970 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
7971 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
7972 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
7973 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
7974 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
7975 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
7976 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7977 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7978 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7979 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7980 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7981 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7982 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7983 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7984
7985 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
7986 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
7987 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
7988 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
7989 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
7990 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
7991 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
7992 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
7993 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7994 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7995 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7996 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7997 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7998 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7999 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8000 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8001
8002 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
8003 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
8004 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
8005 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
8006 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
8007 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
8008 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
8009 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
8010 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
8011 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
8012 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
8013 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
8014 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
8015 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
8016 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
8017 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
8018
8019 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8020 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8021 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8022 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8023 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8024 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8025 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8026 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8027 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8028 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8029 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8030 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8031 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8032 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8033 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8034 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8035
8036 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8037 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8038 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8039 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8040 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8041 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8042 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8043 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8044 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8045 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8046 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8047 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8048 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8049 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8050 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8051 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8052
8053 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8054 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_Grp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8055 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_Grp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8056 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_Grp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8057 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8058 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8059 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8060 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8061 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
8062 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
8063 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
8064 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
8065 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8066 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8067 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
8068 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
8069
8070 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
8071 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
8072 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
8073 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
8074 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
8075 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
8076 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
8077 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
8078 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
8079 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
8080 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
8081 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
8082 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
8083 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
8084 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
8085 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
8086 IEMOP_X4(iemOp_InvalidNeedRM),
8087 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
8088 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
8089 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
8090 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
8091 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
8092 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
8093 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
8094 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
8095 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
8096 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
8097 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
8098 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
8099 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
8100 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
8101 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
8102 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
8103
8104 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8105 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8106 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8107 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8108 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8109 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8110 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8111 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8112 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8113 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8114 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
8115 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
8116 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
8117 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
8118 /* 0xae */ IEMOP_X4(iemOp_Grp15), /** @todo groups and vex */
8119 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
8120
8121 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8122 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8123 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8124 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8125 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8126 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8127 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8128 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8129 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8130 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8131 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
8132 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
8133 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
8134 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
8135 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
8136 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
8137
8138 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8139 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8140 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8141 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8142 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8143 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8144 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8145 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8146 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8147 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8148 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
8149 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
8150 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
8151 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
8152 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
8153 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
8154
8155 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8156 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8157 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8158 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8159 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8160 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8161 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8162 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8163 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8164 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8165 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8166 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8167 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8168 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8169 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8170 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8171
8172 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8173 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8174 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8175 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8176 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8177 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8178 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8179 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8180 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8181 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8182 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8183 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8184 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8185 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8186 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8187 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8188
8189 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8190 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8191 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8192 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8193 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8194 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8195 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8196 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8197 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8198 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8199 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8200 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8201 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8202 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8203 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8204 /* 0xff */ IEMOP_X4(iemOp_ud0),
8205};
8206AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8207/** @} */
8208
8209
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette