VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 65779

Last change on this file since 65779 was 65779, checked in by vboxsync, 8 years ago

IEM: Correted invalid opcode decoding in group 12, 13, and 14.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 305.8 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 65779 2017-02-13 17:39:00Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/** @name ..... opcodes.
26 *
27 * @{
28 */
29
30/** @} */
31
32
33/** @name Two byte opcodes (first byte 0x0f).
34 *
35 * @{
36 */
37
38/** Opcode 0x0f 0x00 /0. */
39FNIEMOPRM_DEF(iemOp_Grp6_sldt)
40{
41 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
42 IEMOP_HLP_MIN_286();
43 IEMOP_HLP_NO_REAL_OR_V86_MODE();
44
45 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
46 {
47 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
48 switch (pVCpu->iem.s.enmEffOpSize)
49 {
50 case IEMMODE_16BIT:
51 IEM_MC_BEGIN(0, 1);
52 IEM_MC_LOCAL(uint16_t, u16Ldtr);
53 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
54 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
55 IEM_MC_ADVANCE_RIP();
56 IEM_MC_END();
57 break;
58
59 case IEMMODE_32BIT:
60 IEM_MC_BEGIN(0, 1);
61 IEM_MC_LOCAL(uint32_t, u32Ldtr);
62 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
63 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
64 IEM_MC_ADVANCE_RIP();
65 IEM_MC_END();
66 break;
67
68 case IEMMODE_64BIT:
69 IEM_MC_BEGIN(0, 1);
70 IEM_MC_LOCAL(uint64_t, u64Ldtr);
71 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
72 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
73 IEM_MC_ADVANCE_RIP();
74 IEM_MC_END();
75 break;
76
77 IEM_NOT_REACHED_DEFAULT_CASE_RET();
78 }
79 }
80 else
81 {
82 IEM_MC_BEGIN(0, 2);
83 IEM_MC_LOCAL(uint16_t, u16Ldtr);
84 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
85 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
86 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
87 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
88 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
89 IEM_MC_ADVANCE_RIP();
90 IEM_MC_END();
91 }
92 return VINF_SUCCESS;
93}
94
95
96/** Opcode 0x0f 0x00 /1. */
97FNIEMOPRM_DEF(iemOp_Grp6_str)
98{
99 IEMOP_MNEMONIC(str, "str Rv/Mw");
100 IEMOP_HLP_MIN_286();
101 IEMOP_HLP_NO_REAL_OR_V86_MODE();
102
103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
104 {
105 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
106 switch (pVCpu->iem.s.enmEffOpSize)
107 {
108 case IEMMODE_16BIT:
109 IEM_MC_BEGIN(0, 1);
110 IEM_MC_LOCAL(uint16_t, u16Tr);
111 IEM_MC_FETCH_TR_U16(u16Tr);
112 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 break;
116
117 case IEMMODE_32BIT:
118 IEM_MC_BEGIN(0, 1);
119 IEM_MC_LOCAL(uint32_t, u32Tr);
120 IEM_MC_FETCH_TR_U32(u32Tr);
121 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
122 IEM_MC_ADVANCE_RIP();
123 IEM_MC_END();
124 break;
125
126 case IEMMODE_64BIT:
127 IEM_MC_BEGIN(0, 1);
128 IEM_MC_LOCAL(uint64_t, u64Tr);
129 IEM_MC_FETCH_TR_U64(u64Tr);
130 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
131 IEM_MC_ADVANCE_RIP();
132 IEM_MC_END();
133 break;
134
135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
136 }
137 }
138 else
139 {
140 IEM_MC_BEGIN(0, 2);
141 IEM_MC_LOCAL(uint16_t, u16Tr);
142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
144 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
145 IEM_MC_FETCH_TR_U16(u16Tr);
146 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
147 IEM_MC_ADVANCE_RIP();
148 IEM_MC_END();
149 }
150 return VINF_SUCCESS;
151}
152
153
154/** Opcode 0x0f 0x00 /2. */
155FNIEMOPRM_DEF(iemOp_Grp6_lldt)
156{
157 IEMOP_MNEMONIC(lldt, "lldt Ew");
158 IEMOP_HLP_MIN_286();
159 IEMOP_HLP_NO_REAL_OR_V86_MODE();
160
161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
162 {
163 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
164 IEM_MC_BEGIN(1, 0);
165 IEM_MC_ARG(uint16_t, u16Sel, 0);
166 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
167 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
168 IEM_MC_END();
169 }
170 else
171 {
172 IEM_MC_BEGIN(1, 1);
173 IEM_MC_ARG(uint16_t, u16Sel, 0);
174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
176 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
177 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
178 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
179 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
180 IEM_MC_END();
181 }
182 return VINF_SUCCESS;
183}
184
185
186/** Opcode 0x0f 0x00 /3. */
187FNIEMOPRM_DEF(iemOp_Grp6_ltr)
188{
189 IEMOP_MNEMONIC(ltr, "ltr Ew");
190 IEMOP_HLP_MIN_286();
191 IEMOP_HLP_NO_REAL_OR_V86_MODE();
192
193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
194 {
195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
196 IEM_MC_BEGIN(1, 0);
197 IEM_MC_ARG(uint16_t, u16Sel, 0);
198 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
199 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
200 IEM_MC_END();
201 }
202 else
203 {
204 IEM_MC_BEGIN(1, 1);
205 IEM_MC_ARG(uint16_t, u16Sel, 0);
206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
209 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
210 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
211 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
212 IEM_MC_END();
213 }
214 return VINF_SUCCESS;
215}
216
217
218/** Opcode 0x0f 0x00 /3. */
219FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
220{
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
223
224 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
225 {
226 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
227 IEM_MC_BEGIN(2, 0);
228 IEM_MC_ARG(uint16_t, u16Sel, 0);
229 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
230 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
231 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
232 IEM_MC_END();
233 }
234 else
235 {
236 IEM_MC_BEGIN(2, 1);
237 IEM_MC_ARG(uint16_t, u16Sel, 0);
238 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
241 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
242 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
243 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
244 IEM_MC_END();
245 }
246 return VINF_SUCCESS;
247}
248
249
250/** Opcode 0x0f 0x00 /4. */
251FNIEMOPRM_DEF(iemOp_Grp6_verr)
252{
253 IEMOP_MNEMONIC(verr, "verr Ew");
254 IEMOP_HLP_MIN_286();
255 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
256}
257
258
259/** Opcode 0x0f 0x00 /5. */
260FNIEMOPRM_DEF(iemOp_Grp6_verw)
261{
262 IEMOP_MNEMONIC(verw, "verw Ew");
263 IEMOP_HLP_MIN_286();
264 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
265}
266
267
268/**
269 * Group 6 jump table.
270 */
271IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
272{
273 iemOp_Grp6_sldt,
274 iemOp_Grp6_str,
275 iemOp_Grp6_lldt,
276 iemOp_Grp6_ltr,
277 iemOp_Grp6_verr,
278 iemOp_Grp6_verw,
279 iemOp_InvalidWithRM,
280 iemOp_InvalidWithRM
281};
282
283/** Opcode 0x0f 0x00. */
284FNIEMOP_DEF(iemOp_Grp6)
285{
286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
287 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
288}
289
290
291/** Opcode 0x0f 0x01 /0. */
292FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
293{
294 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
295 IEMOP_HLP_MIN_286();
296 IEMOP_HLP_64BIT_OP_SIZE();
297 IEM_MC_BEGIN(2, 1);
298 IEM_MC_ARG(uint8_t, iEffSeg, 0);
299 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
302 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
303 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
304 IEM_MC_END();
305 return VINF_SUCCESS;
306}
307
308
309/** Opcode 0x0f 0x01 /0. */
310FNIEMOP_DEF(iemOp_Grp7_vmcall)
311{
312 IEMOP_BITCH_ABOUT_STUB();
313 return IEMOP_RAISE_INVALID_OPCODE();
314}
315
316
317/** Opcode 0x0f 0x01 /0. */
318FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
319{
320 IEMOP_BITCH_ABOUT_STUB();
321 return IEMOP_RAISE_INVALID_OPCODE();
322}
323
324
325/** Opcode 0x0f 0x01 /0. */
326FNIEMOP_DEF(iemOp_Grp7_vmresume)
327{
328 IEMOP_BITCH_ABOUT_STUB();
329 return IEMOP_RAISE_INVALID_OPCODE();
330}
331
332
333/** Opcode 0x0f 0x01 /0. */
334FNIEMOP_DEF(iemOp_Grp7_vmxoff)
335{
336 IEMOP_BITCH_ABOUT_STUB();
337 return IEMOP_RAISE_INVALID_OPCODE();
338}
339
340
341/** Opcode 0x0f 0x01 /1. */
342FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
343{
344 IEMOP_MNEMONIC(sidt, "sidt Ms");
345 IEMOP_HLP_MIN_286();
346 IEMOP_HLP_64BIT_OP_SIZE();
347 IEM_MC_BEGIN(2, 1);
348 IEM_MC_ARG(uint8_t, iEffSeg, 0);
349 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
352 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
353 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
354 IEM_MC_END();
355 return VINF_SUCCESS;
356}
357
358
359/** Opcode 0x0f 0x01 /1. */
360FNIEMOP_DEF(iemOp_Grp7_monitor)
361{
362 IEMOP_MNEMONIC(monitor, "monitor");
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
364 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
365}
366
367
368/** Opcode 0x0f 0x01 /1. */
369FNIEMOP_DEF(iemOp_Grp7_mwait)
370{
371 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
374}
375
376
377/** Opcode 0x0f 0x01 /2. */
378FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
379{
380 IEMOP_MNEMONIC(lgdt, "lgdt");
381 IEMOP_HLP_64BIT_OP_SIZE();
382 IEM_MC_BEGIN(3, 1);
383 IEM_MC_ARG(uint8_t, iEffSeg, 0);
384 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
385 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
389 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
390 IEM_MC_END();
391 return VINF_SUCCESS;
392}
393
394
395/** Opcode 0x0f 0x01 0xd0. */
396FNIEMOP_DEF(iemOp_Grp7_xgetbv)
397{
398 IEMOP_MNEMONIC(xgetbv, "xgetbv");
399 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
400 {
401 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
402 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
403 }
404 return IEMOP_RAISE_INVALID_OPCODE();
405}
406
407
408/** Opcode 0x0f 0x01 0xd1. */
409FNIEMOP_DEF(iemOp_Grp7_xsetbv)
410{
411 IEMOP_MNEMONIC(xsetbv, "xsetbv");
412 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
413 {
414 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
416 }
417 return IEMOP_RAISE_INVALID_OPCODE();
418}
419
420
421/** Opcode 0x0f 0x01 /3. */
422FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
423{
424 IEMOP_MNEMONIC(lidt, "lidt");
425 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
426 ? IEMMODE_64BIT
427 : pVCpu->iem.s.enmEffOpSize;
428 IEM_MC_BEGIN(3, 1);
429 IEM_MC_ARG(uint8_t, iEffSeg, 0);
430 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
431 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
434 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
435 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
436 IEM_MC_END();
437 return VINF_SUCCESS;
438}
439
440
441/** Opcode 0x0f 0x01 0xd8. */
442FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
443
444/** Opcode 0x0f 0x01 0xd9. */
445FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
446
447/** Opcode 0x0f 0x01 0xda. */
448FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
449
450/** Opcode 0x0f 0x01 0xdb. */
451FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
452
453/** Opcode 0x0f 0x01 0xdc. */
454FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
455
456/** Opcode 0x0f 0x01 0xdd. */
457FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
458
459/** Opcode 0x0f 0x01 0xde. */
460FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
461
462/** Opcode 0x0f 0x01 0xdf. */
463FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
464
465/** Opcode 0x0f 0x01 /4. */
466FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
467{
468 IEMOP_MNEMONIC(smsw, "smsw");
469 IEMOP_HLP_MIN_286();
470 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
471 {
472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
473 switch (pVCpu->iem.s.enmEffOpSize)
474 {
475 case IEMMODE_16BIT:
476 IEM_MC_BEGIN(0, 1);
477 IEM_MC_LOCAL(uint16_t, u16Tmp);
478 IEM_MC_FETCH_CR0_U16(u16Tmp);
479 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
480 { /* likely */ }
481 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
482 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
483 else
484 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
485 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
486 IEM_MC_ADVANCE_RIP();
487 IEM_MC_END();
488 return VINF_SUCCESS;
489
490 case IEMMODE_32BIT:
491 IEM_MC_BEGIN(0, 1);
492 IEM_MC_LOCAL(uint32_t, u32Tmp);
493 IEM_MC_FETCH_CR0_U32(u32Tmp);
494 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
495 IEM_MC_ADVANCE_RIP();
496 IEM_MC_END();
497 return VINF_SUCCESS;
498
499 case IEMMODE_64BIT:
500 IEM_MC_BEGIN(0, 1);
501 IEM_MC_LOCAL(uint64_t, u64Tmp);
502 IEM_MC_FETCH_CR0_U64(u64Tmp);
503 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
504 IEM_MC_ADVANCE_RIP();
505 IEM_MC_END();
506 return VINF_SUCCESS;
507
508 IEM_NOT_REACHED_DEFAULT_CASE_RET();
509 }
510 }
511 else
512 {
513 /* Ignore operand size here, memory refs are always 16-bit. */
514 IEM_MC_BEGIN(0, 2);
515 IEM_MC_LOCAL(uint16_t, u16Tmp);
516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
519 IEM_MC_FETCH_CR0_U16(u16Tmp);
520 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
521 { /* likely */ }
522 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
523 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
524 else
525 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
526 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
527 IEM_MC_ADVANCE_RIP();
528 IEM_MC_END();
529 return VINF_SUCCESS;
530 }
531}
532
533
534/** Opcode 0x0f 0x01 /6. */
535FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
536{
537 /* The operand size is effectively ignored, all is 16-bit and only the
538 lower 3-bits are used. */
539 IEMOP_MNEMONIC(lmsw, "lmsw");
540 IEMOP_HLP_MIN_286();
541 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
542 {
543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
544 IEM_MC_BEGIN(1, 0);
545 IEM_MC_ARG(uint16_t, u16Tmp, 0);
546 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
547 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
548 IEM_MC_END();
549 }
550 else
551 {
552 IEM_MC_BEGIN(1, 1);
553 IEM_MC_ARG(uint16_t, u16Tmp, 0);
554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
557 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
558 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
559 IEM_MC_END();
560 }
561 return VINF_SUCCESS;
562}
563
564
565/** Opcode 0x0f 0x01 /7. */
566FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
567{
568 IEMOP_MNEMONIC(invlpg, "invlpg");
569 IEMOP_HLP_MIN_486();
570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
571 IEM_MC_BEGIN(1, 1);
572 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
574 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
575 IEM_MC_END();
576 return VINF_SUCCESS;
577}
578
579
580/** Opcode 0x0f 0x01 /7. */
581FNIEMOP_DEF(iemOp_Grp7_swapgs)
582{
583 IEMOP_MNEMONIC(swapgs, "swapgs");
584 IEMOP_HLP_ONLY_64BIT();
585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
586 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
587}
588
589
590/** Opcode 0x0f 0x01 /7. */
591FNIEMOP_DEF(iemOp_Grp7_rdtscp)
592{
593 NOREF(pVCpu);
594 IEMOP_BITCH_ABOUT_STUB();
595 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
596}
597
598
599/**
600 * Group 7 jump table, memory variant.
601 */
602IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
603{
604 iemOp_Grp7_sgdt,
605 iemOp_Grp7_sidt,
606 iemOp_Grp7_lgdt,
607 iemOp_Grp7_lidt,
608 iemOp_Grp7_smsw,
609 iemOp_InvalidWithRM,
610 iemOp_Grp7_lmsw,
611 iemOp_Grp7_invlpg
612};
613
614
615/** Opcode 0x0f 0x01. */
616FNIEMOP_DEF(iemOp_Grp7)
617{
618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
619 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
620 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
621
622 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
623 {
624 case 0:
625 switch (bRm & X86_MODRM_RM_MASK)
626 {
627 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
628 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
629 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
630 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
631 }
632 return IEMOP_RAISE_INVALID_OPCODE();
633
634 case 1:
635 switch (bRm & X86_MODRM_RM_MASK)
636 {
637 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
638 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
639 }
640 return IEMOP_RAISE_INVALID_OPCODE();
641
642 case 2:
643 switch (bRm & X86_MODRM_RM_MASK)
644 {
645 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
646 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
647 }
648 return IEMOP_RAISE_INVALID_OPCODE();
649
650 case 3:
651 switch (bRm & X86_MODRM_RM_MASK)
652 {
653 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
654 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
655 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
656 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
657 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
658 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
659 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
660 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
661 IEM_NOT_REACHED_DEFAULT_CASE_RET();
662 }
663
664 case 4:
665 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
666
667 case 5:
668 return IEMOP_RAISE_INVALID_OPCODE();
669
670 case 6:
671 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
672
673 case 7:
674 switch (bRm & X86_MODRM_RM_MASK)
675 {
676 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
677 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
678 }
679 return IEMOP_RAISE_INVALID_OPCODE();
680
681 IEM_NOT_REACHED_DEFAULT_CASE_RET();
682 }
683}
684
685/** Opcode 0x0f 0x00 /3. */
686FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
687{
688 IEMOP_HLP_NO_REAL_OR_V86_MODE();
689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
690
691 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
692 {
693 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
694 switch (pVCpu->iem.s.enmEffOpSize)
695 {
696 case IEMMODE_16BIT:
697 {
698 IEM_MC_BEGIN(3, 0);
699 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
700 IEM_MC_ARG(uint16_t, u16Sel, 1);
701 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
702
703 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
704 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
705 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
706
707 IEM_MC_END();
708 return VINF_SUCCESS;
709 }
710
711 case IEMMODE_32BIT:
712 case IEMMODE_64BIT:
713 {
714 IEM_MC_BEGIN(3, 0);
715 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
716 IEM_MC_ARG(uint16_t, u16Sel, 1);
717 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
718
719 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
720 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
721 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
722
723 IEM_MC_END();
724 return VINF_SUCCESS;
725 }
726
727 IEM_NOT_REACHED_DEFAULT_CASE_RET();
728 }
729 }
730 else
731 {
732 switch (pVCpu->iem.s.enmEffOpSize)
733 {
734 case IEMMODE_16BIT:
735 {
736 IEM_MC_BEGIN(3, 1);
737 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
738 IEM_MC_ARG(uint16_t, u16Sel, 1);
739 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
741
742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
743 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
744
745 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
746 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
747 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
748
749 IEM_MC_END();
750 return VINF_SUCCESS;
751 }
752
753 case IEMMODE_32BIT:
754 case IEMMODE_64BIT:
755 {
756 IEM_MC_BEGIN(3, 1);
757 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
758 IEM_MC_ARG(uint16_t, u16Sel, 1);
759 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
761
762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
763 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
764/** @todo testcase: make sure it's a 16-bit read. */
765
766 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
767 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
768 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
769
770 IEM_MC_END();
771 return VINF_SUCCESS;
772 }
773
774 IEM_NOT_REACHED_DEFAULT_CASE_RET();
775 }
776 }
777}
778
779
780
781/** Opcode 0x0f 0x02. */
782FNIEMOP_DEF(iemOp_lar_Gv_Ew)
783{
784 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
785 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
786}
787
788
789/** Opcode 0x0f 0x03. */
790FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
791{
792 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
793 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
794}
795
796
797/** Opcode 0x0f 0x05. */
798FNIEMOP_DEF(iemOp_syscall)
799{
800 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
802 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
803}
804
805
806/** Opcode 0x0f 0x06. */
807FNIEMOP_DEF(iemOp_clts)
808{
809 IEMOP_MNEMONIC(clts, "clts");
810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
811 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
812}
813
814
815/** Opcode 0x0f 0x07. */
816FNIEMOP_DEF(iemOp_sysret)
817{
818 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
820 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
821}
822
823
824/** Opcode 0x0f 0x08. */
825FNIEMOP_STUB(iemOp_invd);
826// IEMOP_HLP_MIN_486();
827
828
829/** Opcode 0x0f 0x09. */
830FNIEMOP_DEF(iemOp_wbinvd)
831{
832 IEMOP_MNEMONIC(wbinvd, "wbinvd");
833 IEMOP_HLP_MIN_486();
834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
835 IEM_MC_BEGIN(0, 0);
836 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
837 IEM_MC_ADVANCE_RIP();
838 IEM_MC_END();
839 return VINF_SUCCESS; /* ignore for now */
840}
841
842
843/** Opcode 0x0f 0x0b. */
844FNIEMOP_DEF(iemOp_ud2)
845{
846 IEMOP_MNEMONIC(ud2, "ud2");
847 return IEMOP_RAISE_INVALID_OPCODE();
848}
849
850/** Opcode 0x0f 0x0d. */
851FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
852{
853 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
854 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
855 {
856 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
857 return IEMOP_RAISE_INVALID_OPCODE();
858 }
859
860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
861 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
862 {
863 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
864 return IEMOP_RAISE_INVALID_OPCODE();
865 }
866
867 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
868 {
869 case 2: /* Aliased to /0 for the time being. */
870 case 4: /* Aliased to /0 for the time being. */
871 case 5: /* Aliased to /0 for the time being. */
872 case 6: /* Aliased to /0 for the time being. */
873 case 7: /* Aliased to /0 for the time being. */
874 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
875 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
876 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
878 }
879
880 IEM_MC_BEGIN(0, 1);
881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
884 /* Currently a NOP. */
885 NOREF(GCPtrEffSrc);
886 IEM_MC_ADVANCE_RIP();
887 IEM_MC_END();
888 return VINF_SUCCESS;
889}
890
891
892/** Opcode 0x0f 0x0e. */
893FNIEMOP_STUB(iemOp_femms);
894
895
896/** Opcode 0x0f 0x0f 0x0c. */
897FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
898
899/** Opcode 0x0f 0x0f 0x0d. */
900FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
901
902/** Opcode 0x0f 0x0f 0x1c. */
903FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
904
905/** Opcode 0x0f 0x0f 0x1d. */
906FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
907
908/** Opcode 0x0f 0x0f 0x8a. */
909FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
910
911/** Opcode 0x0f 0x0f 0x8e. */
912FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
913
914/** Opcode 0x0f 0x0f 0x90. */
915FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
916
917/** Opcode 0x0f 0x0f 0x94. */
918FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
919
920/** Opcode 0x0f 0x0f 0x96. */
921FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
922
923/** Opcode 0x0f 0x0f 0x97. */
924FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
925
926/** Opcode 0x0f 0x0f 0x9a. */
927FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
928
929/** Opcode 0x0f 0x0f 0x9e. */
930FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
931
932/** Opcode 0x0f 0x0f 0xa0. */
933FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
934
935/** Opcode 0x0f 0x0f 0xa4. */
936FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
937
938/** Opcode 0x0f 0x0f 0xa6. */
939FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
940
941/** Opcode 0x0f 0x0f 0xa7. */
942FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
943
944/** Opcode 0x0f 0x0f 0xaa. */
945FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
946
947/** Opcode 0x0f 0x0f 0xae. */
948FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
949
950/** Opcode 0x0f 0x0f 0xb0. */
951FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
952
953/** Opcode 0x0f 0x0f 0xb4. */
954FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
955
956/** Opcode 0x0f 0x0f 0xb6. */
957FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
958
959/** Opcode 0x0f 0x0f 0xb7. */
960FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
961
962/** Opcode 0x0f 0x0f 0xbb. */
963FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
964
965/** Opcode 0x0f 0x0f 0xbf. */
966FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
967
968
969/** Opcode 0x0f 0x0f. */
970FNIEMOP_DEF(iemOp_3Dnow)
971{
972 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
973 {
974 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
975 return IEMOP_RAISE_INVALID_OPCODE();
976 }
977
978 /* This is pretty sparse, use switch instead of table. */
979 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
980 switch (b)
981 {
982 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
983 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
984 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
985 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
986 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
987 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
988 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
989 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
990 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
991 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
992 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
993 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
994 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
995 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
996 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
997 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
998 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
999 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1000 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1001 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1002 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1003 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1004 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1005 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1006 default:
1007 return IEMOP_RAISE_INVALID_OPCODE();
1008 }
1009}
1010
1011
1012/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1013FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1014/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1015FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1016/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1017FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1018/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1019FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1020
1021
1022/** Opcode 0x0f 0x11 - vmovups Wps, Vps */
1023FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1024{
1025 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1027 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1028 {
1029 /*
1030 * Register, register.
1031 */
1032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1033 IEM_MC_BEGIN(0, 0);
1034 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1035 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1036 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1037 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1038 IEM_MC_ADVANCE_RIP();
1039 IEM_MC_END();
1040 }
1041 else
1042 {
1043 /*
1044 * Memory, register.
1045 */
1046 IEM_MC_BEGIN(0, 2);
1047 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1049
1050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1052 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1053 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1054
1055 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1056 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1057
1058 IEM_MC_ADVANCE_RIP();
1059 IEM_MC_END();
1060 }
1061 return VINF_SUCCESS;
1062}
1063
1064
1065/** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */
1066FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
1067
1068/** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */
1069FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
1070
1071/** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */
1072FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1073{
1074 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1076 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1077 {
1078 /*
1079 * Register, register.
1080 */
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_BEGIN(0, 1);
1083 IEM_MC_LOCAL(uint64_t, uSrc);
1084
1085 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1086 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1087 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1088 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1089
1090 IEM_MC_ADVANCE_RIP();
1091 IEM_MC_END();
1092 }
1093 else
1094 {
1095 /*
1096 * Memory, register.
1097 */
1098 IEM_MC_BEGIN(0, 2);
1099 IEM_MC_LOCAL(uint64_t, uSrc);
1100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1101
1102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1104 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1105 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1106
1107 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1108 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1109
1110 IEM_MC_ADVANCE_RIP();
1111 IEM_MC_END();
1112 }
1113 return VINF_SUCCESS;
1114}
1115
1116
1117/** Opcode 0x0f 0x12. */
1118FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1119
1120/** Opcode 0x66 0x0f 0x12. */
1121FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1122
1123/** Opcode 0xf3 0x0f 0x12. */
1124FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1125
1126/** Opcode 0xf2 0x0f 0x12. */
1127FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1128
1129/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1130FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1131
1132/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1133FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1134{
1135 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1136 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1137 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1138 {
1139#if 0
1140 /*
1141 * Register, register.
1142 */
1143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1144 IEM_MC_BEGIN(0, 1);
1145 IEM_MC_LOCAL(uint64_t, uSrc);
1146 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1147 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1148 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1149 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1150 IEM_MC_ADVANCE_RIP();
1151 IEM_MC_END();
1152#else
1153 return IEMOP_RAISE_INVALID_OPCODE();
1154#endif
1155 }
1156 else
1157 {
1158 /*
1159 * Memory, register.
1160 */
1161 IEM_MC_BEGIN(0, 2);
1162 IEM_MC_LOCAL(uint64_t, uSrc);
1163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1164
1165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1167 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1168 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1169
1170 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1171 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1172
1173 IEM_MC_ADVANCE_RIP();
1174 IEM_MC_END();
1175 }
1176 return VINF_SUCCESS;
1177}
1178
1179/* Opcode 0xf3 0x0f 0x13 - invalid */
1180/* Opcode 0xf2 0x0f 0x13 - invalid */
1181
1182/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1183FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1184/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1185FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1186/* Opcode 0xf3 0x0f 0x14 - invalid */
1187/* Opcode 0xf2 0x0f 0x14 - invalid */
1188/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1189FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1190/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1191FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1192/* Opcode 0xf3 0x0f 0x15 - invalid */
1193/* Opcode 0xf2 0x0f 0x15 - invalid */
1194/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1195FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1196/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1197FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1198/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1199FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1200/* Opcode 0xf2 0x0f 0x16 - invalid */
1201/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1202FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1203/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1204FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1205/* Opcode 0xf3 0x0f 0x17 - invalid */
1206/* Opcode 0xf2 0x0f 0x17 - invalid */
1207
1208
1209/** Opcode 0x0f 0x18. */
1210FNIEMOP_DEF(iemOp_prefetch_Grp16)
1211{
1212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1213 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1214 {
1215 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1216 {
1217 case 4: /* Aliased to /0 for the time being according to AMD. */
1218 case 5: /* Aliased to /0 for the time being according to AMD. */
1219 case 6: /* Aliased to /0 for the time being according to AMD. */
1220 case 7: /* Aliased to /0 for the time being according to AMD. */
1221 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1222 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1223 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1224 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1226 }
1227
1228 IEM_MC_BEGIN(0, 1);
1229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1232 /* Currently a NOP. */
1233 NOREF(GCPtrEffSrc);
1234 IEM_MC_ADVANCE_RIP();
1235 IEM_MC_END();
1236 return VINF_SUCCESS;
1237 }
1238
1239 return IEMOP_RAISE_INVALID_OPCODE();
1240}
1241
1242
1243/** Opcode 0x0f 0x19..0x1f. */
1244FNIEMOP_DEF(iemOp_nop_Ev)
1245{
1246 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1247 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1248 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1249 {
1250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1251 IEM_MC_BEGIN(0, 0);
1252 IEM_MC_ADVANCE_RIP();
1253 IEM_MC_END();
1254 }
1255 else
1256 {
1257 IEM_MC_BEGIN(0, 1);
1258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1261 /* Currently a NOP. */
1262 NOREF(GCPtrEffSrc);
1263 IEM_MC_ADVANCE_RIP();
1264 IEM_MC_END();
1265 }
1266 return VINF_SUCCESS;
1267}
1268
1269
1270/** Opcode 0x0f 0x20. */
1271FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1272{
1273 /* mod is ignored, as is operand size overrides. */
1274 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1275 IEMOP_HLP_MIN_386();
1276 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1277 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1278 else
1279 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1280
1281 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1282 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1283 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1284 {
1285 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1286 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1287 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1288 iCrReg |= 8;
1289 }
1290 switch (iCrReg)
1291 {
1292 case 0: case 2: case 3: case 4: case 8:
1293 break;
1294 default:
1295 return IEMOP_RAISE_INVALID_OPCODE();
1296 }
1297 IEMOP_HLP_DONE_DECODING();
1298
1299 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1300}
1301
1302
1303/** Opcode 0x0f 0x21. */
1304FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1305{
1306 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1307 IEMOP_HLP_MIN_386();
1308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1310 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1311 return IEMOP_RAISE_INVALID_OPCODE();
1312 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1313 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1314 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1315}
1316
1317
1318/** Opcode 0x0f 0x22. */
1319FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1320{
1321 /* mod is ignored, as is operand size overrides. */
1322 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1323 IEMOP_HLP_MIN_386();
1324 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1325 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1326 else
1327 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1328
1329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1330 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1331 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1332 {
1333 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1334 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1335 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1336 iCrReg |= 8;
1337 }
1338 switch (iCrReg)
1339 {
1340 case 0: case 2: case 3: case 4: case 8:
1341 break;
1342 default:
1343 return IEMOP_RAISE_INVALID_OPCODE();
1344 }
1345 IEMOP_HLP_DONE_DECODING();
1346
1347 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1348}
1349
1350
1351/** Opcode 0x0f 0x23. */
1352FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1353{
1354 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1355 IEMOP_HLP_MIN_386();
1356 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1358 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1359 return IEMOP_RAISE_INVALID_OPCODE();
1360 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1361 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1362 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1363}
1364
1365
1366/** Opcode 0x0f 0x24. */
1367FNIEMOP_DEF(iemOp_mov_Rd_Td)
1368{
1369 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1370 /** @todo works on 386 and 486. */
1371 /* The RM byte is not considered, see testcase. */
1372 return IEMOP_RAISE_INVALID_OPCODE();
1373}
1374
1375
1376/** Opcode 0x0f 0x26. */
1377FNIEMOP_DEF(iemOp_mov_Td_Rd)
1378{
1379 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1380 /** @todo works on 386 and 486. */
1381 /* The RM byte is not considered, see testcase. */
1382 return IEMOP_RAISE_INVALID_OPCODE();
1383}
1384
1385
1386/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
1387FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1388{
1389 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1391 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1392 {
1393 /*
1394 * Register, register.
1395 */
1396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1397 IEM_MC_BEGIN(0, 0);
1398 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1399 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1400 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1401 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1402 IEM_MC_ADVANCE_RIP();
1403 IEM_MC_END();
1404 }
1405 else
1406 {
1407 /*
1408 * Register, memory.
1409 */
1410 IEM_MC_BEGIN(0, 2);
1411 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1413
1414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1416 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1417 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1418
1419 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1420 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1421
1422 IEM_MC_ADVANCE_RIP();
1423 IEM_MC_END();
1424 }
1425 return VINF_SUCCESS;
1426}
1427
1428/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
1429FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1430{
1431 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1433 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1434 {
1435 /*
1436 * Register, register.
1437 */
1438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1439 IEM_MC_BEGIN(0, 0);
1440 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1441 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1442 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1443 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1444 IEM_MC_ADVANCE_RIP();
1445 IEM_MC_END();
1446 }
1447 else
1448 {
1449 /*
1450 * Register, memory.
1451 */
1452 IEM_MC_BEGIN(0, 2);
1453 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1455
1456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1458 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1459 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1460
1461 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1462 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1463
1464 IEM_MC_ADVANCE_RIP();
1465 IEM_MC_END();
1466 }
1467 return VINF_SUCCESS;
1468}
1469
1470/* Opcode 0xf3 0x0f 0x28 - invalid */
1471/* Opcode 0xf2 0x0f 0x28 - invalid */
1472
1473/** Opcode 0x0f 0x29 - vmovaps Wps, Vps */
1474FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1475{
1476 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1477 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1478 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1479 {
1480 /*
1481 * Register, register.
1482 */
1483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1484 IEM_MC_BEGIN(0, 0);
1485 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1486 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1487 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1488 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1489 IEM_MC_ADVANCE_RIP();
1490 IEM_MC_END();
1491 }
1492 else
1493 {
1494 /*
1495 * Memory, register.
1496 */
1497 IEM_MC_BEGIN(0, 2);
1498 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1500
1501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1503 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1504 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1505
1506 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1507 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1508
1509 IEM_MC_ADVANCE_RIP();
1510 IEM_MC_END();
1511 }
1512 return VINF_SUCCESS;
1513}
1514
1515/** Opcode 0x66 0x0f 0x29 - vmovapd Wpd,Vpd */
1516FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
1517{
1518 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1521 {
1522 /*
1523 * Register, register.
1524 */
1525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1526 IEM_MC_BEGIN(0, 0);
1527 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1528 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1529 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1530 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1531 IEM_MC_ADVANCE_RIP();
1532 IEM_MC_END();
1533 }
1534 else
1535 {
1536 /*
1537 * Memory, register.
1538 */
1539 IEM_MC_BEGIN(0, 2);
1540 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1542
1543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1545 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1546 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1547
1548 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1549 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1550
1551 IEM_MC_ADVANCE_RIP();
1552 IEM_MC_END();
1553 }
1554 return VINF_SUCCESS;
1555}
1556
1557/* Opcode 0xf3 0x0f 0x29 - invalid */
1558/* Opcode 0xf2 0x0f 0x29 - invalid */
1559
1560
1561/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
1562FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
1563/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
1564FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
1565/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
1566FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
1567/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1568FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
1569
1570
1571/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
1572FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
1573{
1574 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1576 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1577 {
1578 /*
1579 * memory, register.
1580 */
1581 IEM_MC_BEGIN(0, 2);
1582 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1584
1585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1587 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1588 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1589
1590 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1591 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1592
1593 IEM_MC_ADVANCE_RIP();
1594 IEM_MC_END();
1595 }
1596 /* The register, register encoding is invalid. */
1597 else
1598 return IEMOP_RAISE_INVALID_OPCODE();
1599 return VINF_SUCCESS;
1600}
1601
1602/** Opcode 0x66 0x0f 0x2b - vmovntpd Mpd, Vpd */
1603FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
1604{
1605 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1607 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1608 {
1609 /*
1610 * memory, register.
1611 */
1612 IEM_MC_BEGIN(0, 2);
1613 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1615
1616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1618 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1619 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1620
1621 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1622 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1623
1624 IEM_MC_ADVANCE_RIP();
1625 IEM_MC_END();
1626 }
1627 /* The register, register encoding is invalid. */
1628 else
1629 return IEMOP_RAISE_INVALID_OPCODE();
1630 return VINF_SUCCESS;
1631}
1632/* Opcode 0xf3 0x0f 0x2b - invalid */
1633/* Opcode 0xf2 0x0f 0x2b - invalid */
1634
1635
1636/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
1637FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
1638/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
1639FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
1640/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
1641FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
1642/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
1643FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
1644
1645/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
1646FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
1647/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
1648FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
1649/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
1650FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
1651/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
1652FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
1653
1654/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
1655FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
1656/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
1657FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
1658/* Opcode 0xf3 0x0f 0x2e - invalid */
1659/* Opcode 0xf2 0x0f 0x2e - invalid */
1660
1661/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
1662FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
1663/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
1664FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
1665/* Opcode 0xf3 0x0f 0x2f - invalid */
1666/* Opcode 0xf2 0x0f 0x2f - invalid */
1667
1668/** Opcode 0x0f 0x30. */
1669FNIEMOP_DEF(iemOp_wrmsr)
1670{
1671 IEMOP_MNEMONIC(wrmsr, "wrmsr");
1672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1673 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1674}
1675
1676
1677/** Opcode 0x0f 0x31. */
1678FNIEMOP_DEF(iemOp_rdtsc)
1679{
1680 IEMOP_MNEMONIC(rdtsc, "rdtsc");
1681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1682 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1683}
1684
1685
1686/** Opcode 0x0f 0x33. */
1687FNIEMOP_DEF(iemOp_rdmsr)
1688{
1689 IEMOP_MNEMONIC(rdmsr, "rdmsr");
1690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1691 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1692}
1693
1694
1695/** Opcode 0x0f 0x34. */
1696FNIEMOP_STUB(iemOp_rdpmc);
1697/** Opcode 0x0f 0x34. */
1698FNIEMOP_STUB(iemOp_sysenter);
1699/** Opcode 0x0f 0x35. */
1700FNIEMOP_STUB(iemOp_sysexit);
1701/** Opcode 0x0f 0x37. */
1702FNIEMOP_STUB(iemOp_getsec);
1703/** Opcode 0x0f 0x38. */
1704FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1705/** Opcode 0x0f 0x3a. */
1706FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1707
1708
1709/**
1710 * Implements a conditional move.
1711 *
1712 * Wish there was an obvious way to do this where we could share and reduce
1713 * code bloat.
1714 *
1715 * @param a_Cnd The conditional "microcode" operation.
1716 */
1717#define CMOV_X(a_Cnd) \
1718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1719 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1720 { \
1721 switch (pVCpu->iem.s.enmEffOpSize) \
1722 { \
1723 case IEMMODE_16BIT: \
1724 IEM_MC_BEGIN(0, 1); \
1725 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1726 a_Cnd { \
1727 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1728 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
1729 } IEM_MC_ENDIF(); \
1730 IEM_MC_ADVANCE_RIP(); \
1731 IEM_MC_END(); \
1732 return VINF_SUCCESS; \
1733 \
1734 case IEMMODE_32BIT: \
1735 IEM_MC_BEGIN(0, 1); \
1736 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1737 a_Cnd { \
1738 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1739 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
1740 } IEM_MC_ELSE() { \
1741 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
1742 } IEM_MC_ENDIF(); \
1743 IEM_MC_ADVANCE_RIP(); \
1744 IEM_MC_END(); \
1745 return VINF_SUCCESS; \
1746 \
1747 case IEMMODE_64BIT: \
1748 IEM_MC_BEGIN(0, 1); \
1749 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1750 a_Cnd { \
1751 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1752 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
1753 } IEM_MC_ENDIF(); \
1754 IEM_MC_ADVANCE_RIP(); \
1755 IEM_MC_END(); \
1756 return VINF_SUCCESS; \
1757 \
1758 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1759 } \
1760 } \
1761 else \
1762 { \
1763 switch (pVCpu->iem.s.enmEffOpSize) \
1764 { \
1765 case IEMMODE_16BIT: \
1766 IEM_MC_BEGIN(0, 2); \
1767 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1768 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1770 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1771 a_Cnd { \
1772 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
1773 } IEM_MC_ENDIF(); \
1774 IEM_MC_ADVANCE_RIP(); \
1775 IEM_MC_END(); \
1776 return VINF_SUCCESS; \
1777 \
1778 case IEMMODE_32BIT: \
1779 IEM_MC_BEGIN(0, 2); \
1780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1781 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1783 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1784 a_Cnd { \
1785 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
1786 } IEM_MC_ELSE() { \
1787 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
1788 } IEM_MC_ENDIF(); \
1789 IEM_MC_ADVANCE_RIP(); \
1790 IEM_MC_END(); \
1791 return VINF_SUCCESS; \
1792 \
1793 case IEMMODE_64BIT: \
1794 IEM_MC_BEGIN(0, 2); \
1795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1796 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1798 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1799 a_Cnd { \
1800 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
1801 } IEM_MC_ENDIF(); \
1802 IEM_MC_ADVANCE_RIP(); \
1803 IEM_MC_END(); \
1804 return VINF_SUCCESS; \
1805 \
1806 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1807 } \
1808 } do {} while (0)
1809
1810
1811
1812/** Opcode 0x0f 0x40. */
1813FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1814{
1815 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
1816 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1817}
1818
1819
1820/** Opcode 0x0f 0x41. */
1821FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1822{
1823 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
1824 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1825}
1826
1827
1828/** Opcode 0x0f 0x42. */
1829FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1830{
1831 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
1832 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1833}
1834
1835
1836/** Opcode 0x0f 0x43. */
1837FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1838{
1839 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
1840 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1841}
1842
1843
1844/** Opcode 0x0f 0x44. */
1845FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1846{
1847 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
1848 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1849}
1850
1851
1852/** Opcode 0x0f 0x45. */
1853FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1854{
1855 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
1856 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1857}
1858
1859
1860/** Opcode 0x0f 0x46. */
1861FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1862{
1863 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
1864 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1865}
1866
1867
1868/** Opcode 0x0f 0x47. */
1869FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1870{
1871 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
1872 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1873}
1874
1875
1876/** Opcode 0x0f 0x48. */
1877FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1878{
1879 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
1880 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1881}
1882
1883
1884/** Opcode 0x0f 0x49. */
1885FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1886{
1887 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
1888 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1889}
1890
1891
1892/** Opcode 0x0f 0x4a. */
1893FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1894{
1895 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
1896 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1897}
1898
1899
1900/** Opcode 0x0f 0x4b. */
1901FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1902{
1903 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
1904 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1905}
1906
1907
1908/** Opcode 0x0f 0x4c. */
1909FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1910{
1911 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
1912 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1913}
1914
1915
1916/** Opcode 0x0f 0x4d. */
1917FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1918{
1919 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
1920 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1921}
1922
1923
1924/** Opcode 0x0f 0x4e. */
1925FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1926{
1927 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
1928 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1929}
1930
1931
1932/** Opcode 0x0f 0x4f. */
1933FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1934{
1935 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
1936 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1937}
1938
1939#undef CMOV_X
1940
1941/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
1942FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
1943/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
1944FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
1945/* Opcode 0xf3 0x0f 0x50 - invalid */
1946/* Opcode 0xf2 0x0f 0x50 - invalid */
1947
1948/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
1949FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
1950/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
1951FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
1952/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
1953FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
1954/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
1955FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
1956
1957/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
1958FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
1959/* Opcode 0x66 0x0f 0x52 - invalid */
1960/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
1961FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
1962/* Opcode 0xf2 0x0f 0x52 - invalid */
1963
1964/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
1965FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
1966/* Opcode 0x66 0x0f 0x53 - invalid */
1967/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
1968FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
1969/* Opcode 0xf2 0x0f 0x53 - invalid */
1970
1971/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
1972FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
1973/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
1974FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
1975/* Opcode 0xf3 0x0f 0x54 - invalid */
1976/* Opcode 0xf2 0x0f 0x54 - invalid */
1977
1978/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
1979FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
1980/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
1981FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
1982/* Opcode 0xf3 0x0f 0x55 - invalid */
1983/* Opcode 0xf2 0x0f 0x55 - invalid */
1984
1985/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
1986FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
1987/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
1988FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
1989/* Opcode 0xf3 0x0f 0x56 - invalid */
1990/* Opcode 0xf2 0x0f 0x56 - invalid */
1991
1992/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
1993FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
1994/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
1995FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
1996/* Opcode 0xf3 0x0f 0x57 - invalid */
1997/* Opcode 0xf2 0x0f 0x57 - invalid */
1998
1999/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2000FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2001/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2002FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2003/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2004FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2005/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2006FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2007
2008/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2009FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2010/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2011FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2012/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2013FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2014/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2015FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2016
2017/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2018FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2019/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2020FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2021/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2022FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2023/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2024FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2025
2026/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2027FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2028/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2029FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2030/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2031FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2032/* Opcode 0xf2 0x0f 0x5b - invalid */
2033
2034/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2035FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2036/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2037FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2038/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2039FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2040/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2041FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2042
2043/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2044FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2045/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2046FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2047/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2048FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2049/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2050FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2051
2052/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2053FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2054/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2055FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2056/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2057FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2058/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2059FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2060
2061/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2062FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2063/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2064FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2065/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2066FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2067/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2068FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2069
2070/**
2071 * Common worker for MMX instructions on the forms:
2072 * pxxxx mm1, mm2/mem32
2073 *
2074 * The 2nd operand is the first half of a register, which in the memory case
2075 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2076 * memory accessed for MMX.
2077 *
2078 * Exceptions type 4.
2079 */
2080FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2081{
2082 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2083 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2084 {
2085 /*
2086 * Register, register.
2087 */
2088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2089 IEM_MC_BEGIN(2, 0);
2090 IEM_MC_ARG(uint128_t *, pDst, 0);
2091 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2092 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2093 IEM_MC_PREPARE_SSE_USAGE();
2094 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2095 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2096 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2097 IEM_MC_ADVANCE_RIP();
2098 IEM_MC_END();
2099 }
2100 else
2101 {
2102 /*
2103 * Register, memory.
2104 */
2105 IEM_MC_BEGIN(2, 2);
2106 IEM_MC_ARG(uint128_t *, pDst, 0);
2107 IEM_MC_LOCAL(uint64_t, uSrc);
2108 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2110
2111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2113 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2114 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2115
2116 IEM_MC_PREPARE_SSE_USAGE();
2117 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2118 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2119
2120 IEM_MC_ADVANCE_RIP();
2121 IEM_MC_END();
2122 }
2123 return VINF_SUCCESS;
2124}
2125
2126
2127/**
2128 * Common worker for SSE2 instructions on the forms:
2129 * pxxxx xmm1, xmm2/mem128
2130 *
2131 * The 2nd operand is the first half of a register, which in the memory case
2132 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2133 * memory accessed for MMX.
2134 *
2135 * Exceptions type 4.
2136 */
2137FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2138{
2139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2140 if (!pImpl->pfnU64)
2141 return IEMOP_RAISE_INVALID_OPCODE();
2142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2143 {
2144 /*
2145 * Register, register.
2146 */
2147 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2148 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2150 IEM_MC_BEGIN(2, 0);
2151 IEM_MC_ARG(uint64_t *, pDst, 0);
2152 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2153 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2154 IEM_MC_PREPARE_FPU_USAGE();
2155 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2156 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2157 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2158 IEM_MC_ADVANCE_RIP();
2159 IEM_MC_END();
2160 }
2161 else
2162 {
2163 /*
2164 * Register, memory.
2165 */
2166 IEM_MC_BEGIN(2, 2);
2167 IEM_MC_ARG(uint64_t *, pDst, 0);
2168 IEM_MC_LOCAL(uint32_t, uSrc);
2169 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2171
2172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2174 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2175 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2176
2177 IEM_MC_PREPARE_FPU_USAGE();
2178 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2179 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2180
2181 IEM_MC_ADVANCE_RIP();
2182 IEM_MC_END();
2183 }
2184 return VINF_SUCCESS;
2185}
2186
2187
2188/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2189FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2190{
2191 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2192 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2193}
2194
2195/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2196FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2197{
2198 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2199 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2200}
2201
2202/* Opcode 0xf3 0x0f 0x60 - invalid */
2203
2204
2205/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2206FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2207{
2208 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2209 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2210}
2211
2212/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2213FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2214{
2215 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2216 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2217}
2218
2219/* Opcode 0xf3 0x0f 0x61 - invalid */
2220
2221
2222/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2223FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2224{
2225 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2226 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2227}
2228
2229/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2230FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2231{
2232 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2233 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2234}
2235
2236/* Opcode 0xf3 0x0f 0x62 - invalid */
2237
2238
2239
2240/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2241FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2242/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2243FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2244/* Opcode 0xf3 0x0f 0x63 - invalid */
2245
2246/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2247FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2248/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2249FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2250/* Opcode 0xf3 0x0f 0x64 - invalid */
2251
2252/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2253FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2254/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2255FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2256/* Opcode 0xf3 0x0f 0x65 - invalid */
2257
2258/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2259FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2260/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2261FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2262/* Opcode 0xf3 0x0f 0x66 - invalid */
2263
2264/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2265FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2266/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2267FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2268/* Opcode 0xf3 0x0f 0x67 - invalid */
2269
2270
2271/**
2272 * Common worker for MMX instructions on the form:
2273 * pxxxx mm1, mm2/mem64
2274 *
2275 * The 2nd operand is the second half of a register, which in the memory case
2276 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2277 * where it may read the full 128 bits or only the upper 64 bits.
2278 *
2279 * Exceptions type 4.
2280 */
2281FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2282{
2283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2284 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2285 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2286 {
2287 /*
2288 * Register, register.
2289 */
2290 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2291 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2293 IEM_MC_BEGIN(2, 0);
2294 IEM_MC_ARG(uint64_t *, pDst, 0);
2295 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2296 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2297 IEM_MC_PREPARE_FPU_USAGE();
2298 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2299 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2300 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2301 IEM_MC_ADVANCE_RIP();
2302 IEM_MC_END();
2303 }
2304 else
2305 {
2306 /*
2307 * Register, memory.
2308 */
2309 IEM_MC_BEGIN(2, 2);
2310 IEM_MC_ARG(uint64_t *, pDst, 0);
2311 IEM_MC_LOCAL(uint64_t, uSrc);
2312 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2314
2315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2317 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2318 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2319
2320 IEM_MC_PREPARE_FPU_USAGE();
2321 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2322 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2323
2324 IEM_MC_ADVANCE_RIP();
2325 IEM_MC_END();
2326 }
2327 return VINF_SUCCESS;
2328}
2329
2330
2331/**
2332 * Common worker for SSE2 instructions on the form:
2333 * pxxxx xmm1, xmm2/mem128
2334 *
2335 * The 2nd operand is the second half of a register, which in the memory case
2336 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2337 * where it may read the full 128 bits or only the upper 64 bits.
2338 *
2339 * Exceptions type 4.
2340 */
2341FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2342{
2343 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2344 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2345 {
2346 /*
2347 * Register, register.
2348 */
2349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2350 IEM_MC_BEGIN(2, 0);
2351 IEM_MC_ARG(uint128_t *, pDst, 0);
2352 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2353 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2354 IEM_MC_PREPARE_SSE_USAGE();
2355 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2356 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2357 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2358 IEM_MC_ADVANCE_RIP();
2359 IEM_MC_END();
2360 }
2361 else
2362 {
2363 /*
2364 * Register, memory.
2365 */
2366 IEM_MC_BEGIN(2, 2);
2367 IEM_MC_ARG(uint128_t *, pDst, 0);
2368 IEM_MC_LOCAL(uint128_t, uSrc);
2369 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2371
2372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2374 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2375 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2376
2377 IEM_MC_PREPARE_SSE_USAGE();
2378 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2379 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2380
2381 IEM_MC_ADVANCE_RIP();
2382 IEM_MC_END();
2383 }
2384 return VINF_SUCCESS;
2385}
2386
2387
2388/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2389FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2390{
2391 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2392 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2393}
2394
2395/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2396FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2397{
2398 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2399 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2400}
2401/* Opcode 0xf3 0x0f 0x68 - invalid */
2402
2403
2404/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2405FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2406{
2407 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2408 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2409}
2410
2411/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2412FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2413{
2414 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2415 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2416
2417}
2418/* Opcode 0xf3 0x0f 0x69 - invalid */
2419
2420
2421/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2422FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2423{
2424 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2425 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2426}
2427
2428/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2429FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2430{
2431 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2432 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2433}
2434/* Opcode 0xf3 0x0f 0x6a - invalid */
2435
2436
2437/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2438FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2439/** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */
2440FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2441/* Opcode 0xf3 0x0f 0x6b - invalid */
2442
2443
2444/* Opcode 0x0f 0x6c - invalid */
2445
2446/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2447FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2448{
2449 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
2450 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2451}
2452
2453/* Opcode 0xf3 0x0f 0x6c - invalid */
2454/* Opcode 0xf2 0x0f 0x6c - invalid */
2455
2456
2457/* Opcode 0x0f 0x6d - invalid */
2458
2459/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
2460FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
2461{
2462 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2463 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2464}
2465
2466/* Opcode 0xf3 0x0f 0x6d - invalid */
2467
2468
2469/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
2470FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
2471{
2472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2473 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2474 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2475 else
2476 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2477 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2478 {
2479 /* MMX, greg */
2480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2481 IEM_MC_BEGIN(0, 1);
2482 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2483 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2484 IEM_MC_LOCAL(uint64_t, u64Tmp);
2485 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2486 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2487 else
2488 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2489 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2490 IEM_MC_ADVANCE_RIP();
2491 IEM_MC_END();
2492 }
2493 else
2494 {
2495 /* MMX, [mem] */
2496 IEM_MC_BEGIN(0, 2);
2497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2498 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2501 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2502 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2503 {
2504 IEM_MC_LOCAL(uint64_t, u64Tmp);
2505 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2506 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2507 }
2508 else
2509 {
2510 IEM_MC_LOCAL(uint32_t, u32Tmp);
2511 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2512 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2513 }
2514 IEM_MC_ADVANCE_RIP();
2515 IEM_MC_END();
2516 }
2517 return VINF_SUCCESS;
2518}
2519
2520/** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */
2521FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
2522{
2523 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2524 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2525 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
2526 else
2527 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
2528 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2529 {
2530 /* XMM, greg*/
2531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2532 IEM_MC_BEGIN(0, 1);
2533 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2534 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2535 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2536 {
2537 IEM_MC_LOCAL(uint64_t, u64Tmp);
2538 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2539 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2540 }
2541 else
2542 {
2543 IEM_MC_LOCAL(uint32_t, u32Tmp);
2544 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2545 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2546 }
2547 IEM_MC_ADVANCE_RIP();
2548 IEM_MC_END();
2549 }
2550 else
2551 {
2552 /* XMM, [mem] */
2553 IEM_MC_BEGIN(0, 2);
2554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2555 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2558 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2559 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2560 {
2561 IEM_MC_LOCAL(uint64_t, u64Tmp);
2562 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2563 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2564 }
2565 else
2566 {
2567 IEM_MC_LOCAL(uint32_t, u32Tmp);
2568 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2569 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2570 }
2571 IEM_MC_ADVANCE_RIP();
2572 IEM_MC_END();
2573 }
2574 return VINF_SUCCESS;
2575}
2576
2577/* Opcode 0xf3 0x0f 0x6e - invalid */
2578
2579
2580/** Opcode 0x0f 0x6f - movq Pq, Qq */
2581FNIEMOP_DEF(iemOp_movq_Pq_Qq)
2582{
2583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2584 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2585 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2586 {
2587 /*
2588 * Register, register.
2589 */
2590 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2591 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2593 IEM_MC_BEGIN(0, 1);
2594 IEM_MC_LOCAL(uint64_t, u64Tmp);
2595 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2596 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2597 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2598 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2599 IEM_MC_ADVANCE_RIP();
2600 IEM_MC_END();
2601 }
2602 else
2603 {
2604 /*
2605 * Register, memory.
2606 */
2607 IEM_MC_BEGIN(0, 2);
2608 IEM_MC_LOCAL(uint64_t, u64Tmp);
2609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2610
2611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2613 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2614 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2615 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2616 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2617
2618 IEM_MC_ADVANCE_RIP();
2619 IEM_MC_END();
2620 }
2621 return VINF_SUCCESS;
2622}
2623
2624/** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */
2625FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
2626{
2627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2628 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
2629 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2630 {
2631 /*
2632 * Register, register.
2633 */
2634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2635 IEM_MC_BEGIN(0, 0);
2636 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2637 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2638 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2639 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2640 IEM_MC_ADVANCE_RIP();
2641 IEM_MC_END();
2642 }
2643 else
2644 {
2645 /*
2646 * Register, memory.
2647 */
2648 IEM_MC_BEGIN(0, 2);
2649 IEM_MC_LOCAL(uint128_t, u128Tmp);
2650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2651
2652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2654 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2655 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2656 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2657 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2658
2659 IEM_MC_ADVANCE_RIP();
2660 IEM_MC_END();
2661 }
2662 return VINF_SUCCESS;
2663}
2664
2665/** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */
2666FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
2667{
2668 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2669 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
2670 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2671 {
2672 /*
2673 * Register, register.
2674 */
2675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2676 IEM_MC_BEGIN(0, 0);
2677 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2678 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2679 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2680 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2681 IEM_MC_ADVANCE_RIP();
2682 IEM_MC_END();
2683 }
2684 else
2685 {
2686 /*
2687 * Register, memory.
2688 */
2689 IEM_MC_BEGIN(0, 2);
2690 IEM_MC_LOCAL(uint128_t, u128Tmp);
2691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2692
2693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2695 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2696 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2697 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2698 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2699
2700 IEM_MC_ADVANCE_RIP();
2701 IEM_MC_END();
2702 }
2703 return VINF_SUCCESS;
2704}
2705
2706
2707/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
2708FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
2709{
2710 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
2711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2712 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2713 {
2714 /*
2715 * Register, register.
2716 */
2717 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2719
2720 IEM_MC_BEGIN(3, 0);
2721 IEM_MC_ARG(uint64_t *, pDst, 0);
2722 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2723 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2724 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2725 IEM_MC_PREPARE_FPU_USAGE();
2726 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2727 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2728 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2729 IEM_MC_ADVANCE_RIP();
2730 IEM_MC_END();
2731 }
2732 else
2733 {
2734 /*
2735 * Register, memory.
2736 */
2737 IEM_MC_BEGIN(3, 2);
2738 IEM_MC_ARG(uint64_t *, pDst, 0);
2739 IEM_MC_LOCAL(uint64_t, uSrc);
2740 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2742
2743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2744 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2745 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2747 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2748
2749 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2750 IEM_MC_PREPARE_FPU_USAGE();
2751 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2752 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2753
2754 IEM_MC_ADVANCE_RIP();
2755 IEM_MC_END();
2756 }
2757 return VINF_SUCCESS;
2758}
2759
2760/** Opcode 0x66 0x0f 0x70 - vpshufd Vx, Wx, Ib */
2761FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
2762{
2763 IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
2764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2765 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2766 {
2767 /*
2768 * Register, register.
2769 */
2770 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2772
2773 IEM_MC_BEGIN(3, 0);
2774 IEM_MC_ARG(uint128_t *, pDst, 0);
2775 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2776 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2777 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2778 IEM_MC_PREPARE_SSE_USAGE();
2779 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2780 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2781 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2782 IEM_MC_ADVANCE_RIP();
2783 IEM_MC_END();
2784 }
2785 else
2786 {
2787 /*
2788 * Register, memory.
2789 */
2790 IEM_MC_BEGIN(3, 2);
2791 IEM_MC_ARG(uint128_t *, pDst, 0);
2792 IEM_MC_LOCAL(uint128_t, uSrc);
2793 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2795
2796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2797 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2798 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2800 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2801
2802 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2803 IEM_MC_PREPARE_SSE_USAGE();
2804 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2805 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2806
2807 IEM_MC_ADVANCE_RIP();
2808 IEM_MC_END();
2809 }
2810 return VINF_SUCCESS;
2811}
2812
2813/** Opcode 0xf3 0x0f 0x70 - vpshufhw Vx, Wx, Ib */
2814FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
2815{
2816 IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
2817 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2818 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2819 {
2820 /*
2821 * Register, register.
2822 */
2823 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2825
2826 IEM_MC_BEGIN(3, 0);
2827 IEM_MC_ARG(uint128_t *, pDst, 0);
2828 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2829 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2830 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2831 IEM_MC_PREPARE_SSE_USAGE();
2832 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2833 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2834 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2835 IEM_MC_ADVANCE_RIP();
2836 IEM_MC_END();
2837 }
2838 else
2839 {
2840 /*
2841 * Register, memory.
2842 */
2843 IEM_MC_BEGIN(3, 2);
2844 IEM_MC_ARG(uint128_t *, pDst, 0);
2845 IEM_MC_LOCAL(uint128_t, uSrc);
2846 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2848
2849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2850 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2851 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2853 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2854
2855 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2856 IEM_MC_PREPARE_SSE_USAGE();
2857 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2858 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2859
2860 IEM_MC_ADVANCE_RIP();
2861 IEM_MC_END();
2862 }
2863 return VINF_SUCCESS;
2864}
2865
2866/** Opcode 0xf2 0x0f 0x70 - vpshuflw Vx, Wx, Ib */
2867FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
2868{
2869 IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
2870 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2871 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2872 {
2873 /*
2874 * Register, register.
2875 */
2876 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2878
2879 IEM_MC_BEGIN(3, 0);
2880 IEM_MC_ARG(uint128_t *, pDst, 0);
2881 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2882 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2883 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2884 IEM_MC_PREPARE_SSE_USAGE();
2885 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2886 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2887 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2888 IEM_MC_ADVANCE_RIP();
2889 IEM_MC_END();
2890 }
2891 else
2892 {
2893 /*
2894 * Register, memory.
2895 */
2896 IEM_MC_BEGIN(3, 2);
2897 IEM_MC_ARG(uint128_t *, pDst, 0);
2898 IEM_MC_LOCAL(uint128_t, uSrc);
2899 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2901
2902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2903 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2904 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2906 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2907
2908 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2909 IEM_MC_PREPARE_SSE_USAGE();
2910 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2911 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2912
2913 IEM_MC_ADVANCE_RIP();
2914 IEM_MC_END();
2915 }
2916 return VINF_SUCCESS;
2917}
2918
2919
2920/** Opcode 0x0f 0x71 11/2. */
2921FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2922
2923/** Opcode 0x66 0x0f 0x71 11/2. */
2924FNIEMOP_STUB_1(iemOp_Grp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
2925
2926/** Opcode 0x0f 0x71 11/4. */
2927FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2928
2929/** Opcode 0x66 0x0f 0x71 11/4. */
2930FNIEMOP_STUB_1(iemOp_Grp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
2931
2932/** Opcode 0x0f 0x71 11/6. */
2933FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2934
2935/** Opcode 0x66 0x0f 0x71 11/6. */
2936FNIEMOP_STUB_1(iemOp_Grp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
2937
2938
2939/**
2940 * Group 12 jump table for register variant.
2941 */
2942IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[8*4] =
2943{
2944 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2945 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2946 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2947 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2948 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2949 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2950 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2951 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
2952};
2953
2954
2955/** Opcode 0x0f 0x71. */
2956FNIEMOP_DEF(iemOp_Grp12)
2957{
2958 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2959 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2960 /* register, register */
2961 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
2962 + pVCpu->iem.s.idxPrefix], bRm);
2963 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
2964}
2965
2966
2967/** Opcode 0x0f 0x72 11/2. */
2968FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2969
2970/** Opcode 0x66 0x0f 0x72 11/2. */
2971FNIEMOP_STUB_1(iemOp_Grp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
2972
2973/** Opcode 0x0f 0x72 11/4. */
2974FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2975
2976/** Opcode 0x66 0x0f 0x72 11/4. */
2977FNIEMOP_STUB_1(iemOp_Grp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
2978
2979/** Opcode 0x0f 0x72 11/6. */
2980FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2981
2982/** Opcode 0x66 0x0f 0x72 11/6. */
2983FNIEMOP_STUB_1(iemOp_Grp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
2984
2985
2986/**
2987 * Group 13 jump table for register variant.
2988 */
2989IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[8*4] =
2990{
2991 /** @todo decode imm8? */
2992 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2993 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2994 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2995 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2996 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2997 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2998 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2999 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3000};
3001
3002/** Opcode 0x0f 0x72. */
3003FNIEMOP_DEF(iemOp_Grp13)
3004{
3005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3006 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3007 /* register, register */
3008 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3009 + pVCpu->iem.s.idxPrefix], bRm);
3010 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3011}
3012
3013
3014/** Opcode 0x0f 0x73 11/2. */
3015FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3016
3017/** Opcode 0x66 0x0f 0x73 11/2. */
3018FNIEMOP_STUB_1(iemOp_Grp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
3019
3020/** Opcode 0x66 0x0f 0x73 11/3. */
3021FNIEMOP_STUB_1(iemOp_Grp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3022
3023/** Opcode 0x0f 0x73 11/6. */
3024FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3025
3026/** Opcode 0x66 0x0f 0x73 11/6. */
3027FNIEMOP_STUB_1(iemOp_Grp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
3028
3029/** Opcode 0x66 0x0f 0x73 11/7. */
3030FNIEMOP_STUB_1(iemOp_Grp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3031
3032/**
3033 * Group 14 jump table for register variant.
3034 */
3035IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[8*4] =
3036{
3037 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3038 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3039 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3040 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3041 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3042 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3043 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3044 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3045};
3046
3047
3048/** Opcode 0x0f 0x73. */
3049FNIEMOP_DEF(iemOp_Grp14)
3050{
3051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3052 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3053 /* register, register */
3054 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3055 + pVCpu->iem.s.idxPrefix], bRm);
3056 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3057}
3058
3059
3060/**
3061 * Common worker for MMX instructions on the form:
3062 * pxxx mm1, mm2/mem64
3063 */
3064FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3065{
3066 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3067 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3068 {
3069 /*
3070 * Register, register.
3071 */
3072 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3073 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3075 IEM_MC_BEGIN(2, 0);
3076 IEM_MC_ARG(uint64_t *, pDst, 0);
3077 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3078 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3079 IEM_MC_PREPARE_FPU_USAGE();
3080 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3081 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3082 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3083 IEM_MC_ADVANCE_RIP();
3084 IEM_MC_END();
3085 }
3086 else
3087 {
3088 /*
3089 * Register, memory.
3090 */
3091 IEM_MC_BEGIN(2, 2);
3092 IEM_MC_ARG(uint64_t *, pDst, 0);
3093 IEM_MC_LOCAL(uint64_t, uSrc);
3094 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3096
3097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3099 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3100 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3101
3102 IEM_MC_PREPARE_FPU_USAGE();
3103 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3104 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3105
3106 IEM_MC_ADVANCE_RIP();
3107 IEM_MC_END();
3108 }
3109 return VINF_SUCCESS;
3110}
3111
3112
3113/**
3114 * Common worker for SSE2 instructions on the forms:
3115 * pxxx xmm1, xmm2/mem128
3116 *
3117 * Proper alignment of the 128-bit operand is enforced.
3118 * Exceptions type 4. SSE2 cpuid checks.
3119 */
3120FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3121{
3122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3123 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3124 {
3125 /*
3126 * Register, register.
3127 */
3128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3129 IEM_MC_BEGIN(2, 0);
3130 IEM_MC_ARG(uint128_t *, pDst, 0);
3131 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3132 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3133 IEM_MC_PREPARE_SSE_USAGE();
3134 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3135 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3136 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3137 IEM_MC_ADVANCE_RIP();
3138 IEM_MC_END();
3139 }
3140 else
3141 {
3142 /*
3143 * Register, memory.
3144 */
3145 IEM_MC_BEGIN(2, 2);
3146 IEM_MC_ARG(uint128_t *, pDst, 0);
3147 IEM_MC_LOCAL(uint128_t, uSrc);
3148 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3150
3151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3153 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3154 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3155
3156 IEM_MC_PREPARE_SSE_USAGE();
3157 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3158 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3159
3160 IEM_MC_ADVANCE_RIP();
3161 IEM_MC_END();
3162 }
3163 return VINF_SUCCESS;
3164}
3165
3166
3167/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3168FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3169{
3170 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3171 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3172}
3173
3174/** Opcode 0x66 0x0f 0x74 - vpcmpeqb Vx, Hx, Wx */
3175FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3176{
3177 IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb");
3178 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3179}
3180
3181/* Opcode 0xf3 0x0f 0x74 - invalid */
3182/* Opcode 0xf2 0x0f 0x74 - invalid */
3183
3184
3185/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3186FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3187{
3188 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3189 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3190}
3191
3192/** Opcode 0x66 0x0f 0x75 - vpcmpeqw Vx, Hx, Wx */
3193FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3194{
3195 IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw");
3196 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3197}
3198
3199/* Opcode 0xf3 0x0f 0x75 - invalid */
3200/* Opcode 0xf2 0x0f 0x75 - invalid */
3201
3202
3203/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3204FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3205{
3206 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3207 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3208}
3209
3210/** Opcode 0x66 0x0f 0x76 - vpcmpeqd Vx, Hx, Wx */
3211FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3212{
3213 IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd");
3214 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3215}
3216
3217/* Opcode 0xf3 0x0f 0x76 - invalid */
3218/* Opcode 0xf2 0x0f 0x76 - invalid */
3219
3220
3221/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3222FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3223/* Opcode 0x66 0x0f 0x77 - invalid */
3224/* Opcode 0xf3 0x0f 0x77 - invalid */
3225/* Opcode 0xf2 0x0f 0x77 - invalid */
3226
3227/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3228FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3229/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3230FNIEMOP_STUB(iemOp_AmdGrp17);
3231/* Opcode 0xf3 0x0f 0x78 - invalid */
3232/* Opcode 0xf2 0x0f 0x78 - invalid */
3233
3234/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3235FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3236/* Opcode 0x66 0x0f 0x79 - invalid */
3237/* Opcode 0xf3 0x0f 0x79 - invalid */
3238/* Opcode 0xf2 0x0f 0x79 - invalid */
3239
3240/* Opcode 0x0f 0x7a - invalid */
3241/* Opcode 0x66 0x0f 0x7a - invalid */
3242/* Opcode 0xf3 0x0f 0x7a - invalid */
3243/* Opcode 0xf2 0x0f 0x7a - invalid */
3244
3245/* Opcode 0x0f 0x7b - invalid */
3246/* Opcode 0x66 0x0f 0x7b - invalid */
3247/* Opcode 0xf3 0x0f 0x7b - invalid */
3248/* Opcode 0xf2 0x0f 0x7b - invalid */
3249
3250/* Opcode 0x0f 0x7c - invalid */
3251/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3252FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3253/* Opcode 0xf3 0x0f 0x7c - invalid */
3254/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3255FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3256
3257/* Opcode 0x0f 0x7d - invalid */
3258/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3259FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3260/* Opcode 0xf3 0x0f 0x7d - invalid */
3261/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3262FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3263
3264
3265/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3266FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3267{
3268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3269 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3270 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3271 else
3272 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3273 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3274 {
3275 /* greg, MMX */
3276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3277 IEM_MC_BEGIN(0, 1);
3278 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3279 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3280 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3281 {
3282 IEM_MC_LOCAL(uint64_t, u64Tmp);
3283 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3284 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3285 }
3286 else
3287 {
3288 IEM_MC_LOCAL(uint32_t, u32Tmp);
3289 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3290 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3291 }
3292 IEM_MC_ADVANCE_RIP();
3293 IEM_MC_END();
3294 }
3295 else
3296 {
3297 /* [mem], MMX */
3298 IEM_MC_BEGIN(0, 2);
3299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3300 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3303 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3304 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3305 {
3306 IEM_MC_LOCAL(uint64_t, u64Tmp);
3307 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3308 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3309 }
3310 else
3311 {
3312 IEM_MC_LOCAL(uint32_t, u32Tmp);
3313 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3314 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3315 }
3316 IEM_MC_ADVANCE_RIP();
3317 IEM_MC_END();
3318 }
3319 return VINF_SUCCESS;
3320}
3321
3322/** Opcode 0x66 0x0f 0x7e - vmovd_q Ey, Vy */
3323FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3324{
3325 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3326 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3327 IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq");
3328 else
3329 IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd");
3330 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3331 {
3332 /* greg, XMM */
3333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3334 IEM_MC_BEGIN(0, 1);
3335 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3336 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3337 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3338 {
3339 IEM_MC_LOCAL(uint64_t, u64Tmp);
3340 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3341 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3342 }
3343 else
3344 {
3345 IEM_MC_LOCAL(uint32_t, u32Tmp);
3346 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3347 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3348 }
3349 IEM_MC_ADVANCE_RIP();
3350 IEM_MC_END();
3351 }
3352 else
3353 {
3354 /* [mem], XMM */
3355 IEM_MC_BEGIN(0, 2);
3356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3357 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3360 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3361 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3362 {
3363 IEM_MC_LOCAL(uint64_t, u64Tmp);
3364 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3365 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3366 }
3367 else
3368 {
3369 IEM_MC_LOCAL(uint32_t, u32Tmp);
3370 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3371 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3372 }
3373 IEM_MC_ADVANCE_RIP();
3374 IEM_MC_END();
3375 }
3376 return VINF_SUCCESS;
3377}
3378
3379/** Opcode 0xf3 0x0f 0x7e - vmovq Vq, Wq */
3380FNIEMOP_STUB(iemOp_vmovq_Vq_Wq);
3381/* Opcode 0xf2 0x0f 0x7e - invalid */
3382
3383
3384/** Opcode 0x0f 0x7f - movq Qq, Pq */
3385FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3386{
3387 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3389 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3390 {
3391 /*
3392 * Register, register.
3393 */
3394 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3395 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3397 IEM_MC_BEGIN(0, 1);
3398 IEM_MC_LOCAL(uint64_t, u64Tmp);
3399 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3400 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3401 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3402 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3403 IEM_MC_ADVANCE_RIP();
3404 IEM_MC_END();
3405 }
3406 else
3407 {
3408 /*
3409 * Register, memory.
3410 */
3411 IEM_MC_BEGIN(0, 2);
3412 IEM_MC_LOCAL(uint64_t, u64Tmp);
3413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3414
3415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3417 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3418 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3419
3420 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3421 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3422
3423 IEM_MC_ADVANCE_RIP();
3424 IEM_MC_END();
3425 }
3426 return VINF_SUCCESS;
3427}
3428
3429/** Opcode 0x66 0x0f 0x7f - vmovdqa Wx,Vx */
3430FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3431{
3432 IEMOP_MNEMONIC(vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx");
3433 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3434 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3435 {
3436 /*
3437 * Register, register.
3438 */
3439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3440 IEM_MC_BEGIN(0, 0);
3441 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3442 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3443 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3444 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3445 IEM_MC_ADVANCE_RIP();
3446 IEM_MC_END();
3447 }
3448 else
3449 {
3450 /*
3451 * Register, memory.
3452 */
3453 IEM_MC_BEGIN(0, 2);
3454 IEM_MC_LOCAL(uint128_t, u128Tmp);
3455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3456
3457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3459 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3460 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3461
3462 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3463 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3464
3465 IEM_MC_ADVANCE_RIP();
3466 IEM_MC_END();
3467 }
3468 return VINF_SUCCESS;
3469}
3470
3471/** Opcode 0xf3 0x0f 0x7f - vmovdqu Wx,Vx */
3472FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
3473{
3474 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3475 IEMOP_MNEMONIC(vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx");
3476 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3477 {
3478 /*
3479 * Register, register.
3480 */
3481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3482 IEM_MC_BEGIN(0, 0);
3483 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3484 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3485 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3486 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3487 IEM_MC_ADVANCE_RIP();
3488 IEM_MC_END();
3489 }
3490 else
3491 {
3492 /*
3493 * Register, memory.
3494 */
3495 IEM_MC_BEGIN(0, 2);
3496 IEM_MC_LOCAL(uint128_t, u128Tmp);
3497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3498
3499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3501 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3502 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3503
3504 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3505 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3506
3507 IEM_MC_ADVANCE_RIP();
3508 IEM_MC_END();
3509 }
3510 return VINF_SUCCESS;
3511}
3512
3513/* Opcode 0xf2 0x0f 0x7f - invalid */
3514
3515
3516
3517/** Opcode 0x0f 0x80. */
3518FNIEMOP_DEF(iemOp_jo_Jv)
3519{
3520 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3521 IEMOP_HLP_MIN_386();
3522 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3523 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3524 {
3525 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3527
3528 IEM_MC_BEGIN(0, 0);
3529 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3530 IEM_MC_REL_JMP_S16(i16Imm);
3531 } IEM_MC_ELSE() {
3532 IEM_MC_ADVANCE_RIP();
3533 } IEM_MC_ENDIF();
3534 IEM_MC_END();
3535 }
3536 else
3537 {
3538 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3540
3541 IEM_MC_BEGIN(0, 0);
3542 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3543 IEM_MC_REL_JMP_S32(i32Imm);
3544 } IEM_MC_ELSE() {
3545 IEM_MC_ADVANCE_RIP();
3546 } IEM_MC_ENDIF();
3547 IEM_MC_END();
3548 }
3549 return VINF_SUCCESS;
3550}
3551
3552
3553/** Opcode 0x0f 0x81. */
3554FNIEMOP_DEF(iemOp_jno_Jv)
3555{
3556 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3557 IEMOP_HLP_MIN_386();
3558 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3559 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3560 {
3561 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3563
3564 IEM_MC_BEGIN(0, 0);
3565 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3566 IEM_MC_ADVANCE_RIP();
3567 } IEM_MC_ELSE() {
3568 IEM_MC_REL_JMP_S16(i16Imm);
3569 } IEM_MC_ENDIF();
3570 IEM_MC_END();
3571 }
3572 else
3573 {
3574 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3576
3577 IEM_MC_BEGIN(0, 0);
3578 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3579 IEM_MC_ADVANCE_RIP();
3580 } IEM_MC_ELSE() {
3581 IEM_MC_REL_JMP_S32(i32Imm);
3582 } IEM_MC_ENDIF();
3583 IEM_MC_END();
3584 }
3585 return VINF_SUCCESS;
3586}
3587
3588
3589/** Opcode 0x0f 0x82. */
3590FNIEMOP_DEF(iemOp_jc_Jv)
3591{
3592 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3593 IEMOP_HLP_MIN_386();
3594 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3595 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3596 {
3597 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3599
3600 IEM_MC_BEGIN(0, 0);
3601 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3602 IEM_MC_REL_JMP_S16(i16Imm);
3603 } IEM_MC_ELSE() {
3604 IEM_MC_ADVANCE_RIP();
3605 } IEM_MC_ENDIF();
3606 IEM_MC_END();
3607 }
3608 else
3609 {
3610 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3612
3613 IEM_MC_BEGIN(0, 0);
3614 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3615 IEM_MC_REL_JMP_S32(i32Imm);
3616 } IEM_MC_ELSE() {
3617 IEM_MC_ADVANCE_RIP();
3618 } IEM_MC_ENDIF();
3619 IEM_MC_END();
3620 }
3621 return VINF_SUCCESS;
3622}
3623
3624
3625/** Opcode 0x0f 0x83. */
3626FNIEMOP_DEF(iemOp_jnc_Jv)
3627{
3628 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3629 IEMOP_HLP_MIN_386();
3630 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3631 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3632 {
3633 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3635
3636 IEM_MC_BEGIN(0, 0);
3637 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3638 IEM_MC_ADVANCE_RIP();
3639 } IEM_MC_ELSE() {
3640 IEM_MC_REL_JMP_S16(i16Imm);
3641 } IEM_MC_ENDIF();
3642 IEM_MC_END();
3643 }
3644 else
3645 {
3646 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3648
3649 IEM_MC_BEGIN(0, 0);
3650 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3651 IEM_MC_ADVANCE_RIP();
3652 } IEM_MC_ELSE() {
3653 IEM_MC_REL_JMP_S32(i32Imm);
3654 } IEM_MC_ENDIF();
3655 IEM_MC_END();
3656 }
3657 return VINF_SUCCESS;
3658}
3659
3660
3661/** Opcode 0x0f 0x84. */
3662FNIEMOP_DEF(iemOp_je_Jv)
3663{
3664 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
3665 IEMOP_HLP_MIN_386();
3666 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3667 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3668 {
3669 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3671
3672 IEM_MC_BEGIN(0, 0);
3673 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3674 IEM_MC_REL_JMP_S16(i16Imm);
3675 } IEM_MC_ELSE() {
3676 IEM_MC_ADVANCE_RIP();
3677 } IEM_MC_ENDIF();
3678 IEM_MC_END();
3679 }
3680 else
3681 {
3682 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3684
3685 IEM_MC_BEGIN(0, 0);
3686 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3687 IEM_MC_REL_JMP_S32(i32Imm);
3688 } IEM_MC_ELSE() {
3689 IEM_MC_ADVANCE_RIP();
3690 } IEM_MC_ENDIF();
3691 IEM_MC_END();
3692 }
3693 return VINF_SUCCESS;
3694}
3695
3696
3697/** Opcode 0x0f 0x85. */
3698FNIEMOP_DEF(iemOp_jne_Jv)
3699{
3700 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
3701 IEMOP_HLP_MIN_386();
3702 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3703 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3704 {
3705 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3707
3708 IEM_MC_BEGIN(0, 0);
3709 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3710 IEM_MC_ADVANCE_RIP();
3711 } IEM_MC_ELSE() {
3712 IEM_MC_REL_JMP_S16(i16Imm);
3713 } IEM_MC_ENDIF();
3714 IEM_MC_END();
3715 }
3716 else
3717 {
3718 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3720
3721 IEM_MC_BEGIN(0, 0);
3722 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3723 IEM_MC_ADVANCE_RIP();
3724 } IEM_MC_ELSE() {
3725 IEM_MC_REL_JMP_S32(i32Imm);
3726 } IEM_MC_ENDIF();
3727 IEM_MC_END();
3728 }
3729 return VINF_SUCCESS;
3730}
3731
3732
3733/** Opcode 0x0f 0x86. */
3734FNIEMOP_DEF(iemOp_jbe_Jv)
3735{
3736 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
3737 IEMOP_HLP_MIN_386();
3738 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3739 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3740 {
3741 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3743
3744 IEM_MC_BEGIN(0, 0);
3745 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3746 IEM_MC_REL_JMP_S16(i16Imm);
3747 } IEM_MC_ELSE() {
3748 IEM_MC_ADVANCE_RIP();
3749 } IEM_MC_ENDIF();
3750 IEM_MC_END();
3751 }
3752 else
3753 {
3754 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3756
3757 IEM_MC_BEGIN(0, 0);
3758 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3759 IEM_MC_REL_JMP_S32(i32Imm);
3760 } IEM_MC_ELSE() {
3761 IEM_MC_ADVANCE_RIP();
3762 } IEM_MC_ENDIF();
3763 IEM_MC_END();
3764 }
3765 return VINF_SUCCESS;
3766}
3767
3768
3769/** Opcode 0x0f 0x87. */
3770FNIEMOP_DEF(iemOp_jnbe_Jv)
3771{
3772 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
3773 IEMOP_HLP_MIN_386();
3774 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3775 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3776 {
3777 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3779
3780 IEM_MC_BEGIN(0, 0);
3781 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3782 IEM_MC_ADVANCE_RIP();
3783 } IEM_MC_ELSE() {
3784 IEM_MC_REL_JMP_S16(i16Imm);
3785 } IEM_MC_ENDIF();
3786 IEM_MC_END();
3787 }
3788 else
3789 {
3790 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3792
3793 IEM_MC_BEGIN(0, 0);
3794 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3795 IEM_MC_ADVANCE_RIP();
3796 } IEM_MC_ELSE() {
3797 IEM_MC_REL_JMP_S32(i32Imm);
3798 } IEM_MC_ENDIF();
3799 IEM_MC_END();
3800 }
3801 return VINF_SUCCESS;
3802}
3803
3804
3805/** Opcode 0x0f 0x88. */
3806FNIEMOP_DEF(iemOp_js_Jv)
3807{
3808 IEMOP_MNEMONIC(js_Jv, "js Jv");
3809 IEMOP_HLP_MIN_386();
3810 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3811 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3812 {
3813 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3815
3816 IEM_MC_BEGIN(0, 0);
3817 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3818 IEM_MC_REL_JMP_S16(i16Imm);
3819 } IEM_MC_ELSE() {
3820 IEM_MC_ADVANCE_RIP();
3821 } IEM_MC_ENDIF();
3822 IEM_MC_END();
3823 }
3824 else
3825 {
3826 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3828
3829 IEM_MC_BEGIN(0, 0);
3830 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3831 IEM_MC_REL_JMP_S32(i32Imm);
3832 } IEM_MC_ELSE() {
3833 IEM_MC_ADVANCE_RIP();
3834 } IEM_MC_ENDIF();
3835 IEM_MC_END();
3836 }
3837 return VINF_SUCCESS;
3838}
3839
3840
3841/** Opcode 0x0f 0x89. */
3842FNIEMOP_DEF(iemOp_jns_Jv)
3843{
3844 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
3845 IEMOP_HLP_MIN_386();
3846 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3847 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3848 {
3849 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3851
3852 IEM_MC_BEGIN(0, 0);
3853 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3854 IEM_MC_ADVANCE_RIP();
3855 } IEM_MC_ELSE() {
3856 IEM_MC_REL_JMP_S16(i16Imm);
3857 } IEM_MC_ENDIF();
3858 IEM_MC_END();
3859 }
3860 else
3861 {
3862 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3864
3865 IEM_MC_BEGIN(0, 0);
3866 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3867 IEM_MC_ADVANCE_RIP();
3868 } IEM_MC_ELSE() {
3869 IEM_MC_REL_JMP_S32(i32Imm);
3870 } IEM_MC_ENDIF();
3871 IEM_MC_END();
3872 }
3873 return VINF_SUCCESS;
3874}
3875
3876
3877/** Opcode 0x0f 0x8a. */
3878FNIEMOP_DEF(iemOp_jp_Jv)
3879{
3880 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
3881 IEMOP_HLP_MIN_386();
3882 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3883 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3884 {
3885 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3887
3888 IEM_MC_BEGIN(0, 0);
3889 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3890 IEM_MC_REL_JMP_S16(i16Imm);
3891 } IEM_MC_ELSE() {
3892 IEM_MC_ADVANCE_RIP();
3893 } IEM_MC_ENDIF();
3894 IEM_MC_END();
3895 }
3896 else
3897 {
3898 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3900
3901 IEM_MC_BEGIN(0, 0);
3902 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3903 IEM_MC_REL_JMP_S32(i32Imm);
3904 } IEM_MC_ELSE() {
3905 IEM_MC_ADVANCE_RIP();
3906 } IEM_MC_ENDIF();
3907 IEM_MC_END();
3908 }
3909 return VINF_SUCCESS;
3910}
3911
3912
3913/** Opcode 0x0f 0x8b. */
3914FNIEMOP_DEF(iemOp_jnp_Jv)
3915{
3916 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
3917 IEMOP_HLP_MIN_386();
3918 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3919 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3920 {
3921 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3923
3924 IEM_MC_BEGIN(0, 0);
3925 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3926 IEM_MC_ADVANCE_RIP();
3927 } IEM_MC_ELSE() {
3928 IEM_MC_REL_JMP_S16(i16Imm);
3929 } IEM_MC_ENDIF();
3930 IEM_MC_END();
3931 }
3932 else
3933 {
3934 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3936
3937 IEM_MC_BEGIN(0, 0);
3938 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3939 IEM_MC_ADVANCE_RIP();
3940 } IEM_MC_ELSE() {
3941 IEM_MC_REL_JMP_S32(i32Imm);
3942 } IEM_MC_ENDIF();
3943 IEM_MC_END();
3944 }
3945 return VINF_SUCCESS;
3946}
3947
3948
3949/** Opcode 0x0f 0x8c. */
3950FNIEMOP_DEF(iemOp_jl_Jv)
3951{
3952 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
3953 IEMOP_HLP_MIN_386();
3954 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3955 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3956 {
3957 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3959
3960 IEM_MC_BEGIN(0, 0);
3961 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3962 IEM_MC_REL_JMP_S16(i16Imm);
3963 } IEM_MC_ELSE() {
3964 IEM_MC_ADVANCE_RIP();
3965 } IEM_MC_ENDIF();
3966 IEM_MC_END();
3967 }
3968 else
3969 {
3970 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3972
3973 IEM_MC_BEGIN(0, 0);
3974 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3975 IEM_MC_REL_JMP_S32(i32Imm);
3976 } IEM_MC_ELSE() {
3977 IEM_MC_ADVANCE_RIP();
3978 } IEM_MC_ENDIF();
3979 IEM_MC_END();
3980 }
3981 return VINF_SUCCESS;
3982}
3983
3984
3985/** Opcode 0x0f 0x8d. */
3986FNIEMOP_DEF(iemOp_jnl_Jv)
3987{
3988 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
3989 IEMOP_HLP_MIN_386();
3990 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3991 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3992 {
3993 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3995
3996 IEM_MC_BEGIN(0, 0);
3997 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3998 IEM_MC_ADVANCE_RIP();
3999 } IEM_MC_ELSE() {
4000 IEM_MC_REL_JMP_S16(i16Imm);
4001 } IEM_MC_ENDIF();
4002 IEM_MC_END();
4003 }
4004 else
4005 {
4006 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4008
4009 IEM_MC_BEGIN(0, 0);
4010 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4011 IEM_MC_ADVANCE_RIP();
4012 } IEM_MC_ELSE() {
4013 IEM_MC_REL_JMP_S32(i32Imm);
4014 } IEM_MC_ENDIF();
4015 IEM_MC_END();
4016 }
4017 return VINF_SUCCESS;
4018}
4019
4020
4021/** Opcode 0x0f 0x8e. */
4022FNIEMOP_DEF(iemOp_jle_Jv)
4023{
4024 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4025 IEMOP_HLP_MIN_386();
4026 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4027 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4028 {
4029 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4031
4032 IEM_MC_BEGIN(0, 0);
4033 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4034 IEM_MC_REL_JMP_S16(i16Imm);
4035 } IEM_MC_ELSE() {
4036 IEM_MC_ADVANCE_RIP();
4037 } IEM_MC_ENDIF();
4038 IEM_MC_END();
4039 }
4040 else
4041 {
4042 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4044
4045 IEM_MC_BEGIN(0, 0);
4046 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4047 IEM_MC_REL_JMP_S32(i32Imm);
4048 } IEM_MC_ELSE() {
4049 IEM_MC_ADVANCE_RIP();
4050 } IEM_MC_ENDIF();
4051 IEM_MC_END();
4052 }
4053 return VINF_SUCCESS;
4054}
4055
4056
4057/** Opcode 0x0f 0x8f. */
4058FNIEMOP_DEF(iemOp_jnle_Jv)
4059{
4060 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4061 IEMOP_HLP_MIN_386();
4062 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4063 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4064 {
4065 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4067
4068 IEM_MC_BEGIN(0, 0);
4069 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4070 IEM_MC_ADVANCE_RIP();
4071 } IEM_MC_ELSE() {
4072 IEM_MC_REL_JMP_S16(i16Imm);
4073 } IEM_MC_ENDIF();
4074 IEM_MC_END();
4075 }
4076 else
4077 {
4078 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4080
4081 IEM_MC_BEGIN(0, 0);
4082 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4083 IEM_MC_ADVANCE_RIP();
4084 } IEM_MC_ELSE() {
4085 IEM_MC_REL_JMP_S32(i32Imm);
4086 } IEM_MC_ENDIF();
4087 IEM_MC_END();
4088 }
4089 return VINF_SUCCESS;
4090}
4091
4092
4093/** Opcode 0x0f 0x90. */
4094FNIEMOP_DEF(iemOp_seto_Eb)
4095{
4096 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4097 IEMOP_HLP_MIN_386();
4098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4099
4100 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4101 * any way. AMD says it's "unused", whatever that means. We're
4102 * ignoring for now. */
4103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4104 {
4105 /* register target */
4106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4107 IEM_MC_BEGIN(0, 0);
4108 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4109 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4110 } IEM_MC_ELSE() {
4111 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4112 } IEM_MC_ENDIF();
4113 IEM_MC_ADVANCE_RIP();
4114 IEM_MC_END();
4115 }
4116 else
4117 {
4118 /* memory target */
4119 IEM_MC_BEGIN(0, 1);
4120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4123 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4124 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4125 } IEM_MC_ELSE() {
4126 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4127 } IEM_MC_ENDIF();
4128 IEM_MC_ADVANCE_RIP();
4129 IEM_MC_END();
4130 }
4131 return VINF_SUCCESS;
4132}
4133
4134
4135/** Opcode 0x0f 0x91. */
4136FNIEMOP_DEF(iemOp_setno_Eb)
4137{
4138 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4139 IEMOP_HLP_MIN_386();
4140 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4141
4142 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4143 * any way. AMD says it's "unused", whatever that means. We're
4144 * ignoring for now. */
4145 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4146 {
4147 /* register target */
4148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4149 IEM_MC_BEGIN(0, 0);
4150 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4151 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4152 } IEM_MC_ELSE() {
4153 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4154 } IEM_MC_ENDIF();
4155 IEM_MC_ADVANCE_RIP();
4156 IEM_MC_END();
4157 }
4158 else
4159 {
4160 /* memory target */
4161 IEM_MC_BEGIN(0, 1);
4162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4165 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4166 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4167 } IEM_MC_ELSE() {
4168 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4169 } IEM_MC_ENDIF();
4170 IEM_MC_ADVANCE_RIP();
4171 IEM_MC_END();
4172 }
4173 return VINF_SUCCESS;
4174}
4175
4176
4177/** Opcode 0x0f 0x92. */
4178FNIEMOP_DEF(iemOp_setc_Eb)
4179{
4180 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4181 IEMOP_HLP_MIN_386();
4182 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4183
4184 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4185 * any way. AMD says it's "unused", whatever that means. We're
4186 * ignoring for now. */
4187 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4188 {
4189 /* register target */
4190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4191 IEM_MC_BEGIN(0, 0);
4192 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4193 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4194 } IEM_MC_ELSE() {
4195 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4196 } IEM_MC_ENDIF();
4197 IEM_MC_ADVANCE_RIP();
4198 IEM_MC_END();
4199 }
4200 else
4201 {
4202 /* memory target */
4203 IEM_MC_BEGIN(0, 1);
4204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4207 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4208 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4209 } IEM_MC_ELSE() {
4210 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4211 } IEM_MC_ENDIF();
4212 IEM_MC_ADVANCE_RIP();
4213 IEM_MC_END();
4214 }
4215 return VINF_SUCCESS;
4216}
4217
4218
4219/** Opcode 0x0f 0x93. */
4220FNIEMOP_DEF(iemOp_setnc_Eb)
4221{
4222 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4223 IEMOP_HLP_MIN_386();
4224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4225
4226 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4227 * any way. AMD says it's "unused", whatever that means. We're
4228 * ignoring for now. */
4229 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4230 {
4231 /* register target */
4232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4233 IEM_MC_BEGIN(0, 0);
4234 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4235 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4236 } IEM_MC_ELSE() {
4237 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4238 } IEM_MC_ENDIF();
4239 IEM_MC_ADVANCE_RIP();
4240 IEM_MC_END();
4241 }
4242 else
4243 {
4244 /* memory target */
4245 IEM_MC_BEGIN(0, 1);
4246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4249 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4250 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4251 } IEM_MC_ELSE() {
4252 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4253 } IEM_MC_ENDIF();
4254 IEM_MC_ADVANCE_RIP();
4255 IEM_MC_END();
4256 }
4257 return VINF_SUCCESS;
4258}
4259
4260
4261/** Opcode 0x0f 0x94. */
4262FNIEMOP_DEF(iemOp_sete_Eb)
4263{
4264 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4265 IEMOP_HLP_MIN_386();
4266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4267
4268 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4269 * any way. AMD says it's "unused", whatever that means. We're
4270 * ignoring for now. */
4271 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4272 {
4273 /* register target */
4274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4275 IEM_MC_BEGIN(0, 0);
4276 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4277 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4278 } IEM_MC_ELSE() {
4279 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4280 } IEM_MC_ENDIF();
4281 IEM_MC_ADVANCE_RIP();
4282 IEM_MC_END();
4283 }
4284 else
4285 {
4286 /* memory target */
4287 IEM_MC_BEGIN(0, 1);
4288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4291 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4292 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4293 } IEM_MC_ELSE() {
4294 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4295 } IEM_MC_ENDIF();
4296 IEM_MC_ADVANCE_RIP();
4297 IEM_MC_END();
4298 }
4299 return VINF_SUCCESS;
4300}
4301
4302
4303/** Opcode 0x0f 0x95. */
4304FNIEMOP_DEF(iemOp_setne_Eb)
4305{
4306 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4307 IEMOP_HLP_MIN_386();
4308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4309
4310 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4311 * any way. AMD says it's "unused", whatever that means. We're
4312 * ignoring for now. */
4313 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4314 {
4315 /* register target */
4316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4317 IEM_MC_BEGIN(0, 0);
4318 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4319 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4320 } IEM_MC_ELSE() {
4321 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4322 } IEM_MC_ENDIF();
4323 IEM_MC_ADVANCE_RIP();
4324 IEM_MC_END();
4325 }
4326 else
4327 {
4328 /* memory target */
4329 IEM_MC_BEGIN(0, 1);
4330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4333 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4334 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4335 } IEM_MC_ELSE() {
4336 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4337 } IEM_MC_ENDIF();
4338 IEM_MC_ADVANCE_RIP();
4339 IEM_MC_END();
4340 }
4341 return VINF_SUCCESS;
4342}
4343
4344
4345/** Opcode 0x0f 0x96. */
4346FNIEMOP_DEF(iemOp_setbe_Eb)
4347{
4348 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4349 IEMOP_HLP_MIN_386();
4350 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4351
4352 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4353 * any way. AMD says it's "unused", whatever that means. We're
4354 * ignoring for now. */
4355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4356 {
4357 /* register target */
4358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4359 IEM_MC_BEGIN(0, 0);
4360 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4361 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4362 } IEM_MC_ELSE() {
4363 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4364 } IEM_MC_ENDIF();
4365 IEM_MC_ADVANCE_RIP();
4366 IEM_MC_END();
4367 }
4368 else
4369 {
4370 /* memory target */
4371 IEM_MC_BEGIN(0, 1);
4372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4375 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4376 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4377 } IEM_MC_ELSE() {
4378 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4379 } IEM_MC_ENDIF();
4380 IEM_MC_ADVANCE_RIP();
4381 IEM_MC_END();
4382 }
4383 return VINF_SUCCESS;
4384}
4385
4386
4387/** Opcode 0x0f 0x97. */
4388FNIEMOP_DEF(iemOp_setnbe_Eb)
4389{
4390 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4391 IEMOP_HLP_MIN_386();
4392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4393
4394 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4395 * any way. AMD says it's "unused", whatever that means. We're
4396 * ignoring for now. */
4397 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4398 {
4399 /* register target */
4400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4401 IEM_MC_BEGIN(0, 0);
4402 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4403 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4404 } IEM_MC_ELSE() {
4405 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4406 } IEM_MC_ENDIF();
4407 IEM_MC_ADVANCE_RIP();
4408 IEM_MC_END();
4409 }
4410 else
4411 {
4412 /* memory target */
4413 IEM_MC_BEGIN(0, 1);
4414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4417 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4418 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4419 } IEM_MC_ELSE() {
4420 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4421 } IEM_MC_ENDIF();
4422 IEM_MC_ADVANCE_RIP();
4423 IEM_MC_END();
4424 }
4425 return VINF_SUCCESS;
4426}
4427
4428
4429/** Opcode 0x0f 0x98. */
4430FNIEMOP_DEF(iemOp_sets_Eb)
4431{
4432 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4433 IEMOP_HLP_MIN_386();
4434 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4435
4436 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4437 * any way. AMD says it's "unused", whatever that means. We're
4438 * ignoring for now. */
4439 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4440 {
4441 /* register target */
4442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4443 IEM_MC_BEGIN(0, 0);
4444 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4445 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4446 } IEM_MC_ELSE() {
4447 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4448 } IEM_MC_ENDIF();
4449 IEM_MC_ADVANCE_RIP();
4450 IEM_MC_END();
4451 }
4452 else
4453 {
4454 /* memory target */
4455 IEM_MC_BEGIN(0, 1);
4456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4459 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4460 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4461 } IEM_MC_ELSE() {
4462 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4463 } IEM_MC_ENDIF();
4464 IEM_MC_ADVANCE_RIP();
4465 IEM_MC_END();
4466 }
4467 return VINF_SUCCESS;
4468}
4469
4470
4471/** Opcode 0x0f 0x99. */
4472FNIEMOP_DEF(iemOp_setns_Eb)
4473{
4474 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4475 IEMOP_HLP_MIN_386();
4476 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4477
4478 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4479 * any way. AMD says it's "unused", whatever that means. We're
4480 * ignoring for now. */
4481 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4482 {
4483 /* register target */
4484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4485 IEM_MC_BEGIN(0, 0);
4486 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4487 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4488 } IEM_MC_ELSE() {
4489 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4490 } IEM_MC_ENDIF();
4491 IEM_MC_ADVANCE_RIP();
4492 IEM_MC_END();
4493 }
4494 else
4495 {
4496 /* memory target */
4497 IEM_MC_BEGIN(0, 1);
4498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4501 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4502 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4503 } IEM_MC_ELSE() {
4504 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4505 } IEM_MC_ENDIF();
4506 IEM_MC_ADVANCE_RIP();
4507 IEM_MC_END();
4508 }
4509 return VINF_SUCCESS;
4510}
4511
4512
4513/** Opcode 0x0f 0x9a. */
4514FNIEMOP_DEF(iemOp_setp_Eb)
4515{
4516 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4517 IEMOP_HLP_MIN_386();
4518 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4519
4520 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4521 * any way. AMD says it's "unused", whatever that means. We're
4522 * ignoring for now. */
4523 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4524 {
4525 /* register target */
4526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4527 IEM_MC_BEGIN(0, 0);
4528 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4529 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4530 } IEM_MC_ELSE() {
4531 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4532 } IEM_MC_ENDIF();
4533 IEM_MC_ADVANCE_RIP();
4534 IEM_MC_END();
4535 }
4536 else
4537 {
4538 /* memory target */
4539 IEM_MC_BEGIN(0, 1);
4540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4543 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4544 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4545 } IEM_MC_ELSE() {
4546 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4547 } IEM_MC_ENDIF();
4548 IEM_MC_ADVANCE_RIP();
4549 IEM_MC_END();
4550 }
4551 return VINF_SUCCESS;
4552}
4553
4554
4555/** Opcode 0x0f 0x9b. */
4556FNIEMOP_DEF(iemOp_setnp_Eb)
4557{
4558 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4559 IEMOP_HLP_MIN_386();
4560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4561
4562 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4563 * any way. AMD says it's "unused", whatever that means. We're
4564 * ignoring for now. */
4565 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4566 {
4567 /* register target */
4568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4569 IEM_MC_BEGIN(0, 0);
4570 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4571 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4572 } IEM_MC_ELSE() {
4573 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4574 } IEM_MC_ENDIF();
4575 IEM_MC_ADVANCE_RIP();
4576 IEM_MC_END();
4577 }
4578 else
4579 {
4580 /* memory target */
4581 IEM_MC_BEGIN(0, 1);
4582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4585 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4586 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4587 } IEM_MC_ELSE() {
4588 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4589 } IEM_MC_ENDIF();
4590 IEM_MC_ADVANCE_RIP();
4591 IEM_MC_END();
4592 }
4593 return VINF_SUCCESS;
4594}
4595
4596
4597/** Opcode 0x0f 0x9c. */
4598FNIEMOP_DEF(iemOp_setl_Eb)
4599{
4600 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4601 IEMOP_HLP_MIN_386();
4602 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4603
4604 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4605 * any way. AMD says it's "unused", whatever that means. We're
4606 * ignoring for now. */
4607 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4608 {
4609 /* register target */
4610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4611 IEM_MC_BEGIN(0, 0);
4612 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4613 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4614 } IEM_MC_ELSE() {
4615 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4616 } IEM_MC_ENDIF();
4617 IEM_MC_ADVANCE_RIP();
4618 IEM_MC_END();
4619 }
4620 else
4621 {
4622 /* memory target */
4623 IEM_MC_BEGIN(0, 1);
4624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4627 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4628 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4629 } IEM_MC_ELSE() {
4630 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4631 } IEM_MC_ENDIF();
4632 IEM_MC_ADVANCE_RIP();
4633 IEM_MC_END();
4634 }
4635 return VINF_SUCCESS;
4636}
4637
4638
4639/** Opcode 0x0f 0x9d. */
4640FNIEMOP_DEF(iemOp_setnl_Eb)
4641{
4642 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4643 IEMOP_HLP_MIN_386();
4644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4645
4646 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4647 * any way. AMD says it's "unused", whatever that means. We're
4648 * ignoring for now. */
4649 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4650 {
4651 /* register target */
4652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4653 IEM_MC_BEGIN(0, 0);
4654 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4655 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4656 } IEM_MC_ELSE() {
4657 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4658 } IEM_MC_ENDIF();
4659 IEM_MC_ADVANCE_RIP();
4660 IEM_MC_END();
4661 }
4662 else
4663 {
4664 /* memory target */
4665 IEM_MC_BEGIN(0, 1);
4666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4669 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4670 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4671 } IEM_MC_ELSE() {
4672 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4673 } IEM_MC_ENDIF();
4674 IEM_MC_ADVANCE_RIP();
4675 IEM_MC_END();
4676 }
4677 return VINF_SUCCESS;
4678}
4679
4680
4681/** Opcode 0x0f 0x9e. */
4682FNIEMOP_DEF(iemOp_setle_Eb)
4683{
4684 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
4685 IEMOP_HLP_MIN_386();
4686 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4687
4688 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4689 * any way. AMD says it's "unused", whatever that means. We're
4690 * ignoring for now. */
4691 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4692 {
4693 /* register target */
4694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4695 IEM_MC_BEGIN(0, 0);
4696 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4697 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4698 } IEM_MC_ELSE() {
4699 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4700 } IEM_MC_ENDIF();
4701 IEM_MC_ADVANCE_RIP();
4702 IEM_MC_END();
4703 }
4704 else
4705 {
4706 /* memory target */
4707 IEM_MC_BEGIN(0, 1);
4708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4711 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4712 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4713 } IEM_MC_ELSE() {
4714 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4715 } IEM_MC_ENDIF();
4716 IEM_MC_ADVANCE_RIP();
4717 IEM_MC_END();
4718 }
4719 return VINF_SUCCESS;
4720}
4721
4722
4723/** Opcode 0x0f 0x9f. */
4724FNIEMOP_DEF(iemOp_setnle_Eb)
4725{
4726 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
4727 IEMOP_HLP_MIN_386();
4728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4729
4730 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4731 * any way. AMD says it's "unused", whatever that means. We're
4732 * ignoring for now. */
4733 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4734 {
4735 /* register target */
4736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4737 IEM_MC_BEGIN(0, 0);
4738 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4739 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4740 } IEM_MC_ELSE() {
4741 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4742 } IEM_MC_ENDIF();
4743 IEM_MC_ADVANCE_RIP();
4744 IEM_MC_END();
4745 }
4746 else
4747 {
4748 /* memory target */
4749 IEM_MC_BEGIN(0, 1);
4750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4753 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4754 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4755 } IEM_MC_ELSE() {
4756 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4757 } IEM_MC_ENDIF();
4758 IEM_MC_ADVANCE_RIP();
4759 IEM_MC_END();
4760 }
4761 return VINF_SUCCESS;
4762}
4763
4764
4765/**
4766 * Common 'push segment-register' helper.
4767 */
4768FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4769{
4770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4771 if (iReg < X86_SREG_FS)
4772 IEMOP_HLP_NO_64BIT();
4773 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4774
4775 switch (pVCpu->iem.s.enmEffOpSize)
4776 {
4777 case IEMMODE_16BIT:
4778 IEM_MC_BEGIN(0, 1);
4779 IEM_MC_LOCAL(uint16_t, u16Value);
4780 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4781 IEM_MC_PUSH_U16(u16Value);
4782 IEM_MC_ADVANCE_RIP();
4783 IEM_MC_END();
4784 break;
4785
4786 case IEMMODE_32BIT:
4787 IEM_MC_BEGIN(0, 1);
4788 IEM_MC_LOCAL(uint32_t, u32Value);
4789 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4790 IEM_MC_PUSH_U32_SREG(u32Value);
4791 IEM_MC_ADVANCE_RIP();
4792 IEM_MC_END();
4793 break;
4794
4795 case IEMMODE_64BIT:
4796 IEM_MC_BEGIN(0, 1);
4797 IEM_MC_LOCAL(uint64_t, u64Value);
4798 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4799 IEM_MC_PUSH_U64(u64Value);
4800 IEM_MC_ADVANCE_RIP();
4801 IEM_MC_END();
4802 break;
4803 }
4804
4805 return VINF_SUCCESS;
4806}
4807
4808
4809/** Opcode 0x0f 0xa0. */
4810FNIEMOP_DEF(iemOp_push_fs)
4811{
4812 IEMOP_MNEMONIC(push_fs, "push fs");
4813 IEMOP_HLP_MIN_386();
4814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4815 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4816}
4817
4818
4819/** Opcode 0x0f 0xa1. */
4820FNIEMOP_DEF(iemOp_pop_fs)
4821{
4822 IEMOP_MNEMONIC(pop_fs, "pop fs");
4823 IEMOP_HLP_MIN_386();
4824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4825 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
4826}
4827
4828
4829/** Opcode 0x0f 0xa2. */
4830FNIEMOP_DEF(iemOp_cpuid)
4831{
4832 IEMOP_MNEMONIC(cpuid, "cpuid");
4833 IEMOP_HLP_MIN_486(); /* not all 486es. */
4834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4835 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4836}
4837
4838
4839/**
4840 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4841 * iemOp_bts_Ev_Gv.
4842 */
4843FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4844{
4845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4846 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4847
4848 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4849 {
4850 /* register destination. */
4851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4852 switch (pVCpu->iem.s.enmEffOpSize)
4853 {
4854 case IEMMODE_16BIT:
4855 IEM_MC_BEGIN(3, 0);
4856 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4857 IEM_MC_ARG(uint16_t, u16Src, 1);
4858 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4859
4860 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4861 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4862 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4863 IEM_MC_REF_EFLAGS(pEFlags);
4864 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4865
4866 IEM_MC_ADVANCE_RIP();
4867 IEM_MC_END();
4868 return VINF_SUCCESS;
4869
4870 case IEMMODE_32BIT:
4871 IEM_MC_BEGIN(3, 0);
4872 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4873 IEM_MC_ARG(uint32_t, u32Src, 1);
4874 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4875
4876 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4877 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4878 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4879 IEM_MC_REF_EFLAGS(pEFlags);
4880 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4881
4882 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4883 IEM_MC_ADVANCE_RIP();
4884 IEM_MC_END();
4885 return VINF_SUCCESS;
4886
4887 case IEMMODE_64BIT:
4888 IEM_MC_BEGIN(3, 0);
4889 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4890 IEM_MC_ARG(uint64_t, u64Src, 1);
4891 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4892
4893 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4894 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4895 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4896 IEM_MC_REF_EFLAGS(pEFlags);
4897 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4898
4899 IEM_MC_ADVANCE_RIP();
4900 IEM_MC_END();
4901 return VINF_SUCCESS;
4902
4903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4904 }
4905 }
4906 else
4907 {
4908 /* memory destination. */
4909
4910 uint32_t fAccess;
4911 if (pImpl->pfnLockedU16)
4912 fAccess = IEM_ACCESS_DATA_RW;
4913 else /* BT */
4914 fAccess = IEM_ACCESS_DATA_R;
4915
4916 /** @todo test negative bit offsets! */
4917 switch (pVCpu->iem.s.enmEffOpSize)
4918 {
4919 case IEMMODE_16BIT:
4920 IEM_MC_BEGIN(3, 2);
4921 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4922 IEM_MC_ARG(uint16_t, u16Src, 1);
4923 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4925 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4926
4927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4928 if (pImpl->pfnLockedU16)
4929 IEMOP_HLP_DONE_DECODING();
4930 else
4931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4932 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4933 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4934 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4935 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4936 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
4937 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4938 IEM_MC_FETCH_EFLAGS(EFlags);
4939
4940 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4941 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4942 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4943 else
4944 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4945 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
4946
4947 IEM_MC_COMMIT_EFLAGS(EFlags);
4948 IEM_MC_ADVANCE_RIP();
4949 IEM_MC_END();
4950 return VINF_SUCCESS;
4951
4952 case IEMMODE_32BIT:
4953 IEM_MC_BEGIN(3, 2);
4954 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4955 IEM_MC_ARG(uint32_t, u32Src, 1);
4956 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4958 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4959
4960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4961 if (pImpl->pfnLockedU16)
4962 IEMOP_HLP_DONE_DECODING();
4963 else
4964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4965 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4966 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4967 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4968 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4969 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4970 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4971 IEM_MC_FETCH_EFLAGS(EFlags);
4972
4973 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4974 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4975 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4976 else
4977 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4978 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
4979
4980 IEM_MC_COMMIT_EFLAGS(EFlags);
4981 IEM_MC_ADVANCE_RIP();
4982 IEM_MC_END();
4983 return VINF_SUCCESS;
4984
4985 case IEMMODE_64BIT:
4986 IEM_MC_BEGIN(3, 2);
4987 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4988 IEM_MC_ARG(uint64_t, u64Src, 1);
4989 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4991 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4992
4993 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4994 if (pImpl->pfnLockedU16)
4995 IEMOP_HLP_DONE_DECODING();
4996 else
4997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4998 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4999 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5000 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5001 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5002 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5003 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5004 IEM_MC_FETCH_EFLAGS(EFlags);
5005
5006 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5007 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5008 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5009 else
5010 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5011 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5012
5013 IEM_MC_COMMIT_EFLAGS(EFlags);
5014 IEM_MC_ADVANCE_RIP();
5015 IEM_MC_END();
5016 return VINF_SUCCESS;
5017
5018 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5019 }
5020 }
5021}
5022
5023
5024/** Opcode 0x0f 0xa3. */
5025FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5026{
5027 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5028 IEMOP_HLP_MIN_386();
5029 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5030}
5031
5032
5033/**
5034 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5035 */
5036FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5037{
5038 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5039 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5040
5041 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5042 {
5043 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5045
5046 switch (pVCpu->iem.s.enmEffOpSize)
5047 {
5048 case IEMMODE_16BIT:
5049 IEM_MC_BEGIN(4, 0);
5050 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5051 IEM_MC_ARG(uint16_t, u16Src, 1);
5052 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5053 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5054
5055 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5056 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5057 IEM_MC_REF_EFLAGS(pEFlags);
5058 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5059
5060 IEM_MC_ADVANCE_RIP();
5061 IEM_MC_END();
5062 return VINF_SUCCESS;
5063
5064 case IEMMODE_32BIT:
5065 IEM_MC_BEGIN(4, 0);
5066 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5067 IEM_MC_ARG(uint32_t, u32Src, 1);
5068 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5069 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5070
5071 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5072 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5073 IEM_MC_REF_EFLAGS(pEFlags);
5074 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5075
5076 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5077 IEM_MC_ADVANCE_RIP();
5078 IEM_MC_END();
5079 return VINF_SUCCESS;
5080
5081 case IEMMODE_64BIT:
5082 IEM_MC_BEGIN(4, 0);
5083 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5084 IEM_MC_ARG(uint64_t, u64Src, 1);
5085 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5086 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5087
5088 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5089 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5090 IEM_MC_REF_EFLAGS(pEFlags);
5091 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5092
5093 IEM_MC_ADVANCE_RIP();
5094 IEM_MC_END();
5095 return VINF_SUCCESS;
5096
5097 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5098 }
5099 }
5100 else
5101 {
5102 switch (pVCpu->iem.s.enmEffOpSize)
5103 {
5104 case IEMMODE_16BIT:
5105 IEM_MC_BEGIN(4, 2);
5106 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5107 IEM_MC_ARG(uint16_t, u16Src, 1);
5108 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5109 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5111
5112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5113 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5114 IEM_MC_ASSIGN(cShiftArg, cShift);
5115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5116 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5117 IEM_MC_FETCH_EFLAGS(EFlags);
5118 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5119 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5120
5121 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5122 IEM_MC_COMMIT_EFLAGS(EFlags);
5123 IEM_MC_ADVANCE_RIP();
5124 IEM_MC_END();
5125 return VINF_SUCCESS;
5126
5127 case IEMMODE_32BIT:
5128 IEM_MC_BEGIN(4, 2);
5129 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5130 IEM_MC_ARG(uint32_t, u32Src, 1);
5131 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5132 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5134
5135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5136 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5137 IEM_MC_ASSIGN(cShiftArg, cShift);
5138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5139 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5140 IEM_MC_FETCH_EFLAGS(EFlags);
5141 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5142 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5143
5144 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5145 IEM_MC_COMMIT_EFLAGS(EFlags);
5146 IEM_MC_ADVANCE_RIP();
5147 IEM_MC_END();
5148 return VINF_SUCCESS;
5149
5150 case IEMMODE_64BIT:
5151 IEM_MC_BEGIN(4, 2);
5152 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5153 IEM_MC_ARG(uint64_t, u64Src, 1);
5154 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5155 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5157
5158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5159 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5160 IEM_MC_ASSIGN(cShiftArg, cShift);
5161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5162 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5163 IEM_MC_FETCH_EFLAGS(EFlags);
5164 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5165 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5166
5167 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5168 IEM_MC_COMMIT_EFLAGS(EFlags);
5169 IEM_MC_ADVANCE_RIP();
5170 IEM_MC_END();
5171 return VINF_SUCCESS;
5172
5173 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5174 }
5175 }
5176}
5177
5178
5179/**
5180 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5181 */
5182FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5183{
5184 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5185 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5186
5187 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5188 {
5189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5190
5191 switch (pVCpu->iem.s.enmEffOpSize)
5192 {
5193 case IEMMODE_16BIT:
5194 IEM_MC_BEGIN(4, 0);
5195 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5196 IEM_MC_ARG(uint16_t, u16Src, 1);
5197 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5198 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5199
5200 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5201 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5202 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5203 IEM_MC_REF_EFLAGS(pEFlags);
5204 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5205
5206 IEM_MC_ADVANCE_RIP();
5207 IEM_MC_END();
5208 return VINF_SUCCESS;
5209
5210 case IEMMODE_32BIT:
5211 IEM_MC_BEGIN(4, 0);
5212 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5213 IEM_MC_ARG(uint32_t, u32Src, 1);
5214 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5215 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5216
5217 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5218 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5219 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5220 IEM_MC_REF_EFLAGS(pEFlags);
5221 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5222
5223 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5224 IEM_MC_ADVANCE_RIP();
5225 IEM_MC_END();
5226 return VINF_SUCCESS;
5227
5228 case IEMMODE_64BIT:
5229 IEM_MC_BEGIN(4, 0);
5230 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5231 IEM_MC_ARG(uint64_t, u64Src, 1);
5232 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5233 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5234
5235 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5236 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5237 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5238 IEM_MC_REF_EFLAGS(pEFlags);
5239 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5240
5241 IEM_MC_ADVANCE_RIP();
5242 IEM_MC_END();
5243 return VINF_SUCCESS;
5244
5245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5246 }
5247 }
5248 else
5249 {
5250 switch (pVCpu->iem.s.enmEffOpSize)
5251 {
5252 case IEMMODE_16BIT:
5253 IEM_MC_BEGIN(4, 2);
5254 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5255 IEM_MC_ARG(uint16_t, u16Src, 1);
5256 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5257 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5259
5260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5262 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5263 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5264 IEM_MC_FETCH_EFLAGS(EFlags);
5265 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5266 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5267
5268 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5269 IEM_MC_COMMIT_EFLAGS(EFlags);
5270 IEM_MC_ADVANCE_RIP();
5271 IEM_MC_END();
5272 return VINF_SUCCESS;
5273
5274 case IEMMODE_32BIT:
5275 IEM_MC_BEGIN(4, 2);
5276 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5277 IEM_MC_ARG(uint32_t, u32Src, 1);
5278 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5279 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5281
5282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5284 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5285 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5286 IEM_MC_FETCH_EFLAGS(EFlags);
5287 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5288 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5289
5290 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5291 IEM_MC_COMMIT_EFLAGS(EFlags);
5292 IEM_MC_ADVANCE_RIP();
5293 IEM_MC_END();
5294 return VINF_SUCCESS;
5295
5296 case IEMMODE_64BIT:
5297 IEM_MC_BEGIN(4, 2);
5298 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5299 IEM_MC_ARG(uint64_t, u64Src, 1);
5300 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5301 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5303
5304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5306 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5307 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5308 IEM_MC_FETCH_EFLAGS(EFlags);
5309 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5310 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5311
5312 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5313 IEM_MC_COMMIT_EFLAGS(EFlags);
5314 IEM_MC_ADVANCE_RIP();
5315 IEM_MC_END();
5316 return VINF_SUCCESS;
5317
5318 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5319 }
5320 }
5321}
5322
5323
5324
5325/** Opcode 0x0f 0xa4. */
5326FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5327{
5328 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5329 IEMOP_HLP_MIN_386();
5330 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5331}
5332
5333
5334/** Opcode 0x0f 0xa5. */
5335FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5336{
5337 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5338 IEMOP_HLP_MIN_386();
5339 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5340}
5341
5342
5343/** Opcode 0x0f 0xa8. */
5344FNIEMOP_DEF(iemOp_push_gs)
5345{
5346 IEMOP_MNEMONIC(push_gs, "push gs");
5347 IEMOP_HLP_MIN_386();
5348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5349 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5350}
5351
5352
5353/** Opcode 0x0f 0xa9. */
5354FNIEMOP_DEF(iemOp_pop_gs)
5355{
5356 IEMOP_MNEMONIC(pop_gs, "pop gs");
5357 IEMOP_HLP_MIN_386();
5358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5359 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5360}
5361
5362
5363/** Opcode 0x0f 0xaa. */
5364FNIEMOP_STUB(iemOp_rsm);
5365//IEMOP_HLP_MIN_386();
5366
5367
5368/** Opcode 0x0f 0xab. */
5369FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5370{
5371 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5372 IEMOP_HLP_MIN_386();
5373 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5374}
5375
5376
5377/** Opcode 0x0f 0xac. */
5378FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5379{
5380 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5381 IEMOP_HLP_MIN_386();
5382 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5383}
5384
5385
5386/** Opcode 0x0f 0xad. */
5387FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5388{
5389 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5390 IEMOP_HLP_MIN_386();
5391 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5392}
5393
5394
5395/** Opcode 0x0f 0xae mem/0. */
5396FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5397{
5398 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5399 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5400 return IEMOP_RAISE_INVALID_OPCODE();
5401
5402 IEM_MC_BEGIN(3, 1);
5403 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5404 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5405 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5408 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5409 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5410 IEM_MC_END();
5411 return VINF_SUCCESS;
5412}
5413
5414
5415/** Opcode 0x0f 0xae mem/1. */
5416FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5417{
5418 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5419 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5420 return IEMOP_RAISE_INVALID_OPCODE();
5421
5422 IEM_MC_BEGIN(3, 1);
5423 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5424 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5425 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5428 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5429 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5430 IEM_MC_END();
5431 return VINF_SUCCESS;
5432}
5433
5434
5435/** Opcode 0x0f 0xae mem/2. */
5436FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5437
5438/** Opcode 0x0f 0xae mem/3. */
5439FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5440
5441/** Opcode 0x0f 0xae mem/4. */
5442FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5443
5444/** Opcode 0x0f 0xae mem/5. */
5445FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5446
5447/** Opcode 0x0f 0xae mem/6. */
5448FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5449
5450/** Opcode 0x0f 0xae mem/7. */
5451FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5452
5453
5454/** Opcode 0x0f 0xae 11b/5. */
5455FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5456{
5457 RT_NOREF_PV(bRm);
5458 IEMOP_MNEMONIC(lfence, "lfence");
5459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5460 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5461 return IEMOP_RAISE_INVALID_OPCODE();
5462
5463 IEM_MC_BEGIN(0, 0);
5464 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5465 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5466 else
5467 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5468 IEM_MC_ADVANCE_RIP();
5469 IEM_MC_END();
5470 return VINF_SUCCESS;
5471}
5472
5473
5474/** Opcode 0x0f 0xae 11b/6. */
5475FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5476{
5477 RT_NOREF_PV(bRm);
5478 IEMOP_MNEMONIC(mfence, "mfence");
5479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5480 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5481 return IEMOP_RAISE_INVALID_OPCODE();
5482
5483 IEM_MC_BEGIN(0, 0);
5484 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5485 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5486 else
5487 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5488 IEM_MC_ADVANCE_RIP();
5489 IEM_MC_END();
5490 return VINF_SUCCESS;
5491}
5492
5493
5494/** Opcode 0x0f 0xae 11b/7. */
5495FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5496{
5497 RT_NOREF_PV(bRm);
5498 IEMOP_MNEMONIC(sfence, "sfence");
5499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5500 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5501 return IEMOP_RAISE_INVALID_OPCODE();
5502
5503 IEM_MC_BEGIN(0, 0);
5504 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5505 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5506 else
5507 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5508 IEM_MC_ADVANCE_RIP();
5509 IEM_MC_END();
5510 return VINF_SUCCESS;
5511}
5512
5513
5514/** Opcode 0xf3 0x0f 0xae 11b/0. */
5515FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5516
5517/** Opcode 0xf3 0x0f 0xae 11b/1. */
5518FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5519
5520/** Opcode 0xf3 0x0f 0xae 11b/2. */
5521FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5522
5523/** Opcode 0xf3 0x0f 0xae 11b/3. */
5524FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5525
5526
5527/** Opcode 0x0f 0xae. */
5528FNIEMOP_DEF(iemOp_Grp15)
5529{
5530 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5531 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5532 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5533 {
5534 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5535 {
5536 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5537 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5538 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5539 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5540 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5541 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5542 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5543 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5544 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5545 }
5546 }
5547 else
5548 {
5549 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5550 {
5551 case 0:
5552 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5553 {
5554 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5555 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5556 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5557 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5558 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5559 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5560 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5561 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5563 }
5564 break;
5565
5566 case IEM_OP_PRF_REPZ:
5567 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5568 {
5569 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5570 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5571 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5572 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5573 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5574 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5575 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5576 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5577 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5578 }
5579 break;
5580
5581 default:
5582 return IEMOP_RAISE_INVALID_OPCODE();
5583 }
5584 }
5585}
5586
5587
5588/** Opcode 0x0f 0xaf. */
5589FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5590{
5591 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5592 IEMOP_HLP_MIN_386();
5593 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5594 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5595}
5596
5597
5598/** Opcode 0x0f 0xb0. */
5599FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5600{
5601 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5602 IEMOP_HLP_MIN_486();
5603 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5604
5605 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5606 {
5607 IEMOP_HLP_DONE_DECODING();
5608 IEM_MC_BEGIN(4, 0);
5609 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5610 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5611 IEM_MC_ARG(uint8_t, u8Src, 2);
5612 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5613
5614 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5615 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5616 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5617 IEM_MC_REF_EFLAGS(pEFlags);
5618 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5619 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5620 else
5621 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5622
5623 IEM_MC_ADVANCE_RIP();
5624 IEM_MC_END();
5625 }
5626 else
5627 {
5628 IEM_MC_BEGIN(4, 3);
5629 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5630 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5631 IEM_MC_ARG(uint8_t, u8Src, 2);
5632 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5634 IEM_MC_LOCAL(uint8_t, u8Al);
5635
5636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5637 IEMOP_HLP_DONE_DECODING();
5638 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5639 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5640 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5641 IEM_MC_FETCH_EFLAGS(EFlags);
5642 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5643 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5644 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5645 else
5646 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5647
5648 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5649 IEM_MC_COMMIT_EFLAGS(EFlags);
5650 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5651 IEM_MC_ADVANCE_RIP();
5652 IEM_MC_END();
5653 }
5654 return VINF_SUCCESS;
5655}
5656
5657/** Opcode 0x0f 0xb1. */
5658FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5659{
5660 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
5661 IEMOP_HLP_MIN_486();
5662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5663
5664 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5665 {
5666 IEMOP_HLP_DONE_DECODING();
5667 switch (pVCpu->iem.s.enmEffOpSize)
5668 {
5669 case IEMMODE_16BIT:
5670 IEM_MC_BEGIN(4, 0);
5671 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5672 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5673 IEM_MC_ARG(uint16_t, u16Src, 2);
5674 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5675
5676 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5677 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5678 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5679 IEM_MC_REF_EFLAGS(pEFlags);
5680 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5681 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5682 else
5683 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5684
5685 IEM_MC_ADVANCE_RIP();
5686 IEM_MC_END();
5687 return VINF_SUCCESS;
5688
5689 case IEMMODE_32BIT:
5690 IEM_MC_BEGIN(4, 0);
5691 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5692 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5693 IEM_MC_ARG(uint32_t, u32Src, 2);
5694 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5695
5696 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5697 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5698 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5699 IEM_MC_REF_EFLAGS(pEFlags);
5700 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5701 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5702 else
5703 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5704
5705 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5706 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5707 IEM_MC_ADVANCE_RIP();
5708 IEM_MC_END();
5709 return VINF_SUCCESS;
5710
5711 case IEMMODE_64BIT:
5712 IEM_MC_BEGIN(4, 0);
5713 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5714 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5715#ifdef RT_ARCH_X86
5716 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5717#else
5718 IEM_MC_ARG(uint64_t, u64Src, 2);
5719#endif
5720 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5721
5722 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5723 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5724 IEM_MC_REF_EFLAGS(pEFlags);
5725#ifdef RT_ARCH_X86
5726 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5727 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5728 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5729 else
5730 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5731#else
5732 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5733 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5734 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5735 else
5736 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5737#endif
5738
5739 IEM_MC_ADVANCE_RIP();
5740 IEM_MC_END();
5741 return VINF_SUCCESS;
5742
5743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5744 }
5745 }
5746 else
5747 {
5748 switch (pVCpu->iem.s.enmEffOpSize)
5749 {
5750 case IEMMODE_16BIT:
5751 IEM_MC_BEGIN(4, 3);
5752 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5753 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5754 IEM_MC_ARG(uint16_t, u16Src, 2);
5755 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5757 IEM_MC_LOCAL(uint16_t, u16Ax);
5758
5759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5760 IEMOP_HLP_DONE_DECODING();
5761 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5762 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5763 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5764 IEM_MC_FETCH_EFLAGS(EFlags);
5765 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5766 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5767 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5768 else
5769 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5770
5771 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5772 IEM_MC_COMMIT_EFLAGS(EFlags);
5773 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5774 IEM_MC_ADVANCE_RIP();
5775 IEM_MC_END();
5776 return VINF_SUCCESS;
5777
5778 case IEMMODE_32BIT:
5779 IEM_MC_BEGIN(4, 3);
5780 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5781 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5782 IEM_MC_ARG(uint32_t, u32Src, 2);
5783 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5785 IEM_MC_LOCAL(uint32_t, u32Eax);
5786
5787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5788 IEMOP_HLP_DONE_DECODING();
5789 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5790 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5791 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5792 IEM_MC_FETCH_EFLAGS(EFlags);
5793 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5794 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5795 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5796 else
5797 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5798
5799 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5800 IEM_MC_COMMIT_EFLAGS(EFlags);
5801 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5802 IEM_MC_ADVANCE_RIP();
5803 IEM_MC_END();
5804 return VINF_SUCCESS;
5805
5806 case IEMMODE_64BIT:
5807 IEM_MC_BEGIN(4, 3);
5808 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5809 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5810#ifdef RT_ARCH_X86
5811 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5812#else
5813 IEM_MC_ARG(uint64_t, u64Src, 2);
5814#endif
5815 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5817 IEM_MC_LOCAL(uint64_t, u64Rax);
5818
5819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5820 IEMOP_HLP_DONE_DECODING();
5821 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5822 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5823 IEM_MC_FETCH_EFLAGS(EFlags);
5824 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5825#ifdef RT_ARCH_X86
5826 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5827 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5828 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5829 else
5830 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5831#else
5832 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5833 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5834 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5835 else
5836 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5837#endif
5838
5839 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5840 IEM_MC_COMMIT_EFLAGS(EFlags);
5841 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5842 IEM_MC_ADVANCE_RIP();
5843 IEM_MC_END();
5844 return VINF_SUCCESS;
5845
5846 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5847 }
5848 }
5849}
5850
5851
5852FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5853{
5854 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5855 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
5856
5857 switch (pVCpu->iem.s.enmEffOpSize)
5858 {
5859 case IEMMODE_16BIT:
5860 IEM_MC_BEGIN(5, 1);
5861 IEM_MC_ARG(uint16_t, uSel, 0);
5862 IEM_MC_ARG(uint16_t, offSeg, 1);
5863 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5864 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5865 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5866 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5869 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5870 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
5871 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5872 IEM_MC_END();
5873 return VINF_SUCCESS;
5874
5875 case IEMMODE_32BIT:
5876 IEM_MC_BEGIN(5, 1);
5877 IEM_MC_ARG(uint16_t, uSel, 0);
5878 IEM_MC_ARG(uint32_t, offSeg, 1);
5879 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5880 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5881 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5882 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5885 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5886 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
5887 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5888 IEM_MC_END();
5889 return VINF_SUCCESS;
5890
5891 case IEMMODE_64BIT:
5892 IEM_MC_BEGIN(5, 1);
5893 IEM_MC_ARG(uint16_t, uSel, 0);
5894 IEM_MC_ARG(uint64_t, offSeg, 1);
5895 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5896 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5897 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5898 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5901 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5902 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5903 else
5904 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5905 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
5906 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5907 IEM_MC_END();
5908 return VINF_SUCCESS;
5909
5910 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5911 }
5912}
5913
5914
5915/** Opcode 0x0f 0xb2. */
5916FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5917{
5918 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
5919 IEMOP_HLP_MIN_386();
5920 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5921 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5922 return IEMOP_RAISE_INVALID_OPCODE();
5923 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5924}
5925
5926
5927/** Opcode 0x0f 0xb3. */
5928FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5929{
5930 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
5931 IEMOP_HLP_MIN_386();
5932 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5933}
5934
5935
5936/** Opcode 0x0f 0xb4. */
5937FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5938{
5939 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
5940 IEMOP_HLP_MIN_386();
5941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5942 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5943 return IEMOP_RAISE_INVALID_OPCODE();
5944 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5945}
5946
5947
5948/** Opcode 0x0f 0xb5. */
5949FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5950{
5951 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
5952 IEMOP_HLP_MIN_386();
5953 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5954 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5955 return IEMOP_RAISE_INVALID_OPCODE();
5956 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5957}
5958
5959
5960/** Opcode 0x0f 0xb6. */
5961FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5962{
5963 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
5964 IEMOP_HLP_MIN_386();
5965
5966 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5967
5968 /*
5969 * If rm is denoting a register, no more instruction bytes.
5970 */
5971 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5972 {
5973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5974 switch (pVCpu->iem.s.enmEffOpSize)
5975 {
5976 case IEMMODE_16BIT:
5977 IEM_MC_BEGIN(0, 1);
5978 IEM_MC_LOCAL(uint16_t, u16Value);
5979 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5980 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
5981 IEM_MC_ADVANCE_RIP();
5982 IEM_MC_END();
5983 return VINF_SUCCESS;
5984
5985 case IEMMODE_32BIT:
5986 IEM_MC_BEGIN(0, 1);
5987 IEM_MC_LOCAL(uint32_t, u32Value);
5988 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5989 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
5990 IEM_MC_ADVANCE_RIP();
5991 IEM_MC_END();
5992 return VINF_SUCCESS;
5993
5994 case IEMMODE_64BIT:
5995 IEM_MC_BEGIN(0, 1);
5996 IEM_MC_LOCAL(uint64_t, u64Value);
5997 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5998 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
5999 IEM_MC_ADVANCE_RIP();
6000 IEM_MC_END();
6001 return VINF_SUCCESS;
6002
6003 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6004 }
6005 }
6006 else
6007 {
6008 /*
6009 * We're loading a register from memory.
6010 */
6011 switch (pVCpu->iem.s.enmEffOpSize)
6012 {
6013 case IEMMODE_16BIT:
6014 IEM_MC_BEGIN(0, 2);
6015 IEM_MC_LOCAL(uint16_t, u16Value);
6016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6019 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6020 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6021 IEM_MC_ADVANCE_RIP();
6022 IEM_MC_END();
6023 return VINF_SUCCESS;
6024
6025 case IEMMODE_32BIT:
6026 IEM_MC_BEGIN(0, 2);
6027 IEM_MC_LOCAL(uint32_t, u32Value);
6028 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6029 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6031 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6032 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6033 IEM_MC_ADVANCE_RIP();
6034 IEM_MC_END();
6035 return VINF_SUCCESS;
6036
6037 case IEMMODE_64BIT:
6038 IEM_MC_BEGIN(0, 2);
6039 IEM_MC_LOCAL(uint64_t, u64Value);
6040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6043 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6044 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6045 IEM_MC_ADVANCE_RIP();
6046 IEM_MC_END();
6047 return VINF_SUCCESS;
6048
6049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6050 }
6051 }
6052}
6053
6054
6055/** Opcode 0x0f 0xb7. */
6056FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6057{
6058 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6059 IEMOP_HLP_MIN_386();
6060
6061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6062
6063 /** @todo Not entirely sure how the operand size prefix is handled here,
6064 * assuming that it will be ignored. Would be nice to have a few
6065 * test for this. */
6066 /*
6067 * If rm is denoting a register, no more instruction bytes.
6068 */
6069 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6070 {
6071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6072 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6073 {
6074 IEM_MC_BEGIN(0, 1);
6075 IEM_MC_LOCAL(uint32_t, u32Value);
6076 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6077 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6078 IEM_MC_ADVANCE_RIP();
6079 IEM_MC_END();
6080 }
6081 else
6082 {
6083 IEM_MC_BEGIN(0, 1);
6084 IEM_MC_LOCAL(uint64_t, u64Value);
6085 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6086 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6087 IEM_MC_ADVANCE_RIP();
6088 IEM_MC_END();
6089 }
6090 }
6091 else
6092 {
6093 /*
6094 * We're loading a register from memory.
6095 */
6096 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6097 {
6098 IEM_MC_BEGIN(0, 2);
6099 IEM_MC_LOCAL(uint32_t, u32Value);
6100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6103 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6104 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6105 IEM_MC_ADVANCE_RIP();
6106 IEM_MC_END();
6107 }
6108 else
6109 {
6110 IEM_MC_BEGIN(0, 2);
6111 IEM_MC_LOCAL(uint64_t, u64Value);
6112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6115 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6116 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6117 IEM_MC_ADVANCE_RIP();
6118 IEM_MC_END();
6119 }
6120 }
6121 return VINF_SUCCESS;
6122}
6123
6124
6125/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6126FNIEMOP_UD_STUB(iemOp_jmpe);
6127/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6128FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6129
6130
6131/** Opcode 0x0f 0xb9. */
6132FNIEMOP_DEF(iemOp_Grp10)
6133{
6134 Log(("iemOp_Grp10 -> #UD\n"));
6135 return IEMOP_RAISE_INVALID_OPCODE();
6136}
6137
6138
6139/** Opcode 0x0f 0xba. */
6140FNIEMOP_DEF(iemOp_Grp8)
6141{
6142 IEMOP_HLP_MIN_386();
6143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6144 PCIEMOPBINSIZES pImpl;
6145 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6146 {
6147 case 0: case 1: case 2: case 3:
6148 return IEMOP_RAISE_INVALID_OPCODE();
6149 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6150 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6151 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6152 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6153 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6154 }
6155 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6156
6157 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6158 {
6159 /* register destination. */
6160 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6162
6163 switch (pVCpu->iem.s.enmEffOpSize)
6164 {
6165 case IEMMODE_16BIT:
6166 IEM_MC_BEGIN(3, 0);
6167 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6168 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6169 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6170
6171 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6172 IEM_MC_REF_EFLAGS(pEFlags);
6173 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6174
6175 IEM_MC_ADVANCE_RIP();
6176 IEM_MC_END();
6177 return VINF_SUCCESS;
6178
6179 case IEMMODE_32BIT:
6180 IEM_MC_BEGIN(3, 0);
6181 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6182 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6183 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6184
6185 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6186 IEM_MC_REF_EFLAGS(pEFlags);
6187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6188
6189 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6190 IEM_MC_ADVANCE_RIP();
6191 IEM_MC_END();
6192 return VINF_SUCCESS;
6193
6194 case IEMMODE_64BIT:
6195 IEM_MC_BEGIN(3, 0);
6196 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6197 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6198 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6199
6200 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6201 IEM_MC_REF_EFLAGS(pEFlags);
6202 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6203
6204 IEM_MC_ADVANCE_RIP();
6205 IEM_MC_END();
6206 return VINF_SUCCESS;
6207
6208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6209 }
6210 }
6211 else
6212 {
6213 /* memory destination. */
6214
6215 uint32_t fAccess;
6216 if (pImpl->pfnLockedU16)
6217 fAccess = IEM_ACCESS_DATA_RW;
6218 else /* BT */
6219 fAccess = IEM_ACCESS_DATA_R;
6220
6221 /** @todo test negative bit offsets! */
6222 switch (pVCpu->iem.s.enmEffOpSize)
6223 {
6224 case IEMMODE_16BIT:
6225 IEM_MC_BEGIN(3, 1);
6226 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6227 IEM_MC_ARG(uint16_t, u16Src, 1);
6228 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6230
6231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6232 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6233 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6234 if (pImpl->pfnLockedU16)
6235 IEMOP_HLP_DONE_DECODING();
6236 else
6237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6238 IEM_MC_FETCH_EFLAGS(EFlags);
6239 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6240 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6241 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6242 else
6243 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6244 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6245
6246 IEM_MC_COMMIT_EFLAGS(EFlags);
6247 IEM_MC_ADVANCE_RIP();
6248 IEM_MC_END();
6249 return VINF_SUCCESS;
6250
6251 case IEMMODE_32BIT:
6252 IEM_MC_BEGIN(3, 1);
6253 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6254 IEM_MC_ARG(uint32_t, u32Src, 1);
6255 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6257
6258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6259 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6260 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6261 if (pImpl->pfnLockedU16)
6262 IEMOP_HLP_DONE_DECODING();
6263 else
6264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6265 IEM_MC_FETCH_EFLAGS(EFlags);
6266 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6267 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6268 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6269 else
6270 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6271 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6272
6273 IEM_MC_COMMIT_EFLAGS(EFlags);
6274 IEM_MC_ADVANCE_RIP();
6275 IEM_MC_END();
6276 return VINF_SUCCESS;
6277
6278 case IEMMODE_64BIT:
6279 IEM_MC_BEGIN(3, 1);
6280 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6281 IEM_MC_ARG(uint64_t, u64Src, 1);
6282 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6284
6285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6286 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6287 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6288 if (pImpl->pfnLockedU16)
6289 IEMOP_HLP_DONE_DECODING();
6290 else
6291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6292 IEM_MC_FETCH_EFLAGS(EFlags);
6293 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6294 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6295 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6296 else
6297 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6298 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6299
6300 IEM_MC_COMMIT_EFLAGS(EFlags);
6301 IEM_MC_ADVANCE_RIP();
6302 IEM_MC_END();
6303 return VINF_SUCCESS;
6304
6305 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6306 }
6307 }
6308
6309}
6310
6311
6312/** Opcode 0x0f 0xbb. */
6313FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6314{
6315 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6316 IEMOP_HLP_MIN_386();
6317 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6318}
6319
6320
6321/** Opcode 0x0f 0xbc. */
6322FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6323{
6324 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6325 IEMOP_HLP_MIN_386();
6326 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6327 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6328}
6329
6330
6331/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6332FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6333
6334
6335/** Opcode 0x0f 0xbd. */
6336FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6337{
6338 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6339 IEMOP_HLP_MIN_386();
6340 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6341 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6342}
6343
6344
6345/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6346FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6347
6348
6349/** Opcode 0x0f 0xbe. */
6350FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6351{
6352 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6353 IEMOP_HLP_MIN_386();
6354
6355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6356
6357 /*
6358 * If rm is denoting a register, no more instruction bytes.
6359 */
6360 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6361 {
6362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6363 switch (pVCpu->iem.s.enmEffOpSize)
6364 {
6365 case IEMMODE_16BIT:
6366 IEM_MC_BEGIN(0, 1);
6367 IEM_MC_LOCAL(uint16_t, u16Value);
6368 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6369 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6370 IEM_MC_ADVANCE_RIP();
6371 IEM_MC_END();
6372 return VINF_SUCCESS;
6373
6374 case IEMMODE_32BIT:
6375 IEM_MC_BEGIN(0, 1);
6376 IEM_MC_LOCAL(uint32_t, u32Value);
6377 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6378 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6379 IEM_MC_ADVANCE_RIP();
6380 IEM_MC_END();
6381 return VINF_SUCCESS;
6382
6383 case IEMMODE_64BIT:
6384 IEM_MC_BEGIN(0, 1);
6385 IEM_MC_LOCAL(uint64_t, u64Value);
6386 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6387 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6388 IEM_MC_ADVANCE_RIP();
6389 IEM_MC_END();
6390 return VINF_SUCCESS;
6391
6392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6393 }
6394 }
6395 else
6396 {
6397 /*
6398 * We're loading a register from memory.
6399 */
6400 switch (pVCpu->iem.s.enmEffOpSize)
6401 {
6402 case IEMMODE_16BIT:
6403 IEM_MC_BEGIN(0, 2);
6404 IEM_MC_LOCAL(uint16_t, u16Value);
6405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6408 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6409 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6410 IEM_MC_ADVANCE_RIP();
6411 IEM_MC_END();
6412 return VINF_SUCCESS;
6413
6414 case IEMMODE_32BIT:
6415 IEM_MC_BEGIN(0, 2);
6416 IEM_MC_LOCAL(uint32_t, u32Value);
6417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6420 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6421 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6422 IEM_MC_ADVANCE_RIP();
6423 IEM_MC_END();
6424 return VINF_SUCCESS;
6425
6426 case IEMMODE_64BIT:
6427 IEM_MC_BEGIN(0, 2);
6428 IEM_MC_LOCAL(uint64_t, u64Value);
6429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6432 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6433 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6434 IEM_MC_ADVANCE_RIP();
6435 IEM_MC_END();
6436 return VINF_SUCCESS;
6437
6438 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6439 }
6440 }
6441}
6442
6443
6444/** Opcode 0x0f 0xbf. */
6445FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6446{
6447 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6448 IEMOP_HLP_MIN_386();
6449
6450 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6451
6452 /** @todo Not entirely sure how the operand size prefix is handled here,
6453 * assuming that it will be ignored. Would be nice to have a few
6454 * test for this. */
6455 /*
6456 * If rm is denoting a register, no more instruction bytes.
6457 */
6458 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6459 {
6460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6461 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6462 {
6463 IEM_MC_BEGIN(0, 1);
6464 IEM_MC_LOCAL(uint32_t, u32Value);
6465 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6466 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6467 IEM_MC_ADVANCE_RIP();
6468 IEM_MC_END();
6469 }
6470 else
6471 {
6472 IEM_MC_BEGIN(0, 1);
6473 IEM_MC_LOCAL(uint64_t, u64Value);
6474 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6475 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6476 IEM_MC_ADVANCE_RIP();
6477 IEM_MC_END();
6478 }
6479 }
6480 else
6481 {
6482 /*
6483 * We're loading a register from memory.
6484 */
6485 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6486 {
6487 IEM_MC_BEGIN(0, 2);
6488 IEM_MC_LOCAL(uint32_t, u32Value);
6489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6492 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6493 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6494 IEM_MC_ADVANCE_RIP();
6495 IEM_MC_END();
6496 }
6497 else
6498 {
6499 IEM_MC_BEGIN(0, 2);
6500 IEM_MC_LOCAL(uint64_t, u64Value);
6501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6504 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6505 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6506 IEM_MC_ADVANCE_RIP();
6507 IEM_MC_END();
6508 }
6509 }
6510 return VINF_SUCCESS;
6511}
6512
6513
6514/** Opcode 0x0f 0xc0. */
6515FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6516{
6517 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6518 IEMOP_HLP_MIN_486();
6519 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6520
6521 /*
6522 * If rm is denoting a register, no more instruction bytes.
6523 */
6524 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6525 {
6526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6527
6528 IEM_MC_BEGIN(3, 0);
6529 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6530 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6531 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6532
6533 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6534 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6535 IEM_MC_REF_EFLAGS(pEFlags);
6536 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6537
6538 IEM_MC_ADVANCE_RIP();
6539 IEM_MC_END();
6540 }
6541 else
6542 {
6543 /*
6544 * We're accessing memory.
6545 */
6546 IEM_MC_BEGIN(3, 3);
6547 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6548 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6549 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6550 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6552
6553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6554 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6555 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6556 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6557 IEM_MC_FETCH_EFLAGS(EFlags);
6558 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6559 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6560 else
6561 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6562
6563 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6564 IEM_MC_COMMIT_EFLAGS(EFlags);
6565 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6566 IEM_MC_ADVANCE_RIP();
6567 IEM_MC_END();
6568 return VINF_SUCCESS;
6569 }
6570 return VINF_SUCCESS;
6571}
6572
6573
6574/** Opcode 0x0f 0xc1. */
6575FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6576{
6577 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6578 IEMOP_HLP_MIN_486();
6579 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6580
6581 /*
6582 * If rm is denoting a register, no more instruction bytes.
6583 */
6584 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6585 {
6586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6587
6588 switch (pVCpu->iem.s.enmEffOpSize)
6589 {
6590 case IEMMODE_16BIT:
6591 IEM_MC_BEGIN(3, 0);
6592 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6593 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6594 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6595
6596 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6597 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6598 IEM_MC_REF_EFLAGS(pEFlags);
6599 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6600
6601 IEM_MC_ADVANCE_RIP();
6602 IEM_MC_END();
6603 return VINF_SUCCESS;
6604
6605 case IEMMODE_32BIT:
6606 IEM_MC_BEGIN(3, 0);
6607 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6608 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6609 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6610
6611 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6612 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6613 IEM_MC_REF_EFLAGS(pEFlags);
6614 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6615
6616 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6617 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6618 IEM_MC_ADVANCE_RIP();
6619 IEM_MC_END();
6620 return VINF_SUCCESS;
6621
6622 case IEMMODE_64BIT:
6623 IEM_MC_BEGIN(3, 0);
6624 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6625 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6626 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6627
6628 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6629 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6630 IEM_MC_REF_EFLAGS(pEFlags);
6631 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6632
6633 IEM_MC_ADVANCE_RIP();
6634 IEM_MC_END();
6635 return VINF_SUCCESS;
6636
6637 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6638 }
6639 }
6640 else
6641 {
6642 /*
6643 * We're accessing memory.
6644 */
6645 switch (pVCpu->iem.s.enmEffOpSize)
6646 {
6647 case IEMMODE_16BIT:
6648 IEM_MC_BEGIN(3, 3);
6649 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6650 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6651 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6652 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6654
6655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6656 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6657 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6658 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6659 IEM_MC_FETCH_EFLAGS(EFlags);
6660 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6661 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6662 else
6663 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6664
6665 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6666 IEM_MC_COMMIT_EFLAGS(EFlags);
6667 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
6668 IEM_MC_ADVANCE_RIP();
6669 IEM_MC_END();
6670 return VINF_SUCCESS;
6671
6672 case IEMMODE_32BIT:
6673 IEM_MC_BEGIN(3, 3);
6674 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6675 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6676 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6677 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6679
6680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6681 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6682 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6683 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6684 IEM_MC_FETCH_EFLAGS(EFlags);
6685 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6686 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6687 else
6688 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6689
6690 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6691 IEM_MC_COMMIT_EFLAGS(EFlags);
6692 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
6693 IEM_MC_ADVANCE_RIP();
6694 IEM_MC_END();
6695 return VINF_SUCCESS;
6696
6697 case IEMMODE_64BIT:
6698 IEM_MC_BEGIN(3, 3);
6699 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6700 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6701 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6702 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6704
6705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6706 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6707 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6708 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6709 IEM_MC_FETCH_EFLAGS(EFlags);
6710 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6711 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6712 else
6713 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6714
6715 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6716 IEM_MC_COMMIT_EFLAGS(EFlags);
6717 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
6718 IEM_MC_ADVANCE_RIP();
6719 IEM_MC_END();
6720 return VINF_SUCCESS;
6721
6722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6723 }
6724 }
6725}
6726
6727
6728/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
6729FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
6730/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
6731FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
6732/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
6733FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
6734/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
6735FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
6736
6737
6738/** Opcode 0x0f 0xc3. */
6739FNIEMOP_DEF(iemOp_movnti_My_Gy)
6740{
6741 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
6742
6743 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6744
6745 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6746 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6747 {
6748 switch (pVCpu->iem.s.enmEffOpSize)
6749 {
6750 case IEMMODE_32BIT:
6751 IEM_MC_BEGIN(0, 2);
6752 IEM_MC_LOCAL(uint32_t, u32Value);
6753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6754
6755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6757 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6758 return IEMOP_RAISE_INVALID_OPCODE();
6759
6760 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6761 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6762 IEM_MC_ADVANCE_RIP();
6763 IEM_MC_END();
6764 break;
6765
6766 case IEMMODE_64BIT:
6767 IEM_MC_BEGIN(0, 2);
6768 IEM_MC_LOCAL(uint64_t, u64Value);
6769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6770
6771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6773 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6774 return IEMOP_RAISE_INVALID_OPCODE();
6775
6776 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6777 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
6778 IEM_MC_ADVANCE_RIP();
6779 IEM_MC_END();
6780 break;
6781
6782 case IEMMODE_16BIT:
6783 /** @todo check this form. */
6784 return IEMOP_RAISE_INVALID_OPCODE();
6785 }
6786 }
6787 else
6788 return IEMOP_RAISE_INVALID_OPCODE();
6789 return VINF_SUCCESS;
6790}
6791/* Opcode 0x66 0x0f 0xc3 - invalid */
6792/* Opcode 0xf3 0x0f 0xc3 - invalid */
6793/* Opcode 0xf2 0x0f 0xc3 - invalid */
6794
6795/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
6796FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
6797/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
6798FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
6799/* Opcode 0xf3 0x0f 0xc4 - invalid */
6800/* Opcode 0xf2 0x0f 0xc4 - invalid */
6801
6802/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
6803FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
6804/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
6805FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
6806/* Opcode 0xf3 0x0f 0xc5 - invalid */
6807/* Opcode 0xf2 0x0f 0xc5 - invalid */
6808
6809/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
6810FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
6811/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
6812FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
6813/* Opcode 0xf3 0x0f 0xc6 - invalid */
6814/* Opcode 0xf2 0x0f 0xc6 - invalid */
6815
6816
6817/** Opcode 0x0f 0xc7 !11/1. */
6818FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6819{
6820 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
6821
6822 IEM_MC_BEGIN(4, 3);
6823 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6824 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6825 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6826 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6827 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6828 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6830
6831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6832 IEMOP_HLP_DONE_DECODING();
6833 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6834
6835 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6836 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6837 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6838
6839 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6840 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6841 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6842
6843 IEM_MC_FETCH_EFLAGS(EFlags);
6844 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6845 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6846 else
6847 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6848
6849 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6850 IEM_MC_COMMIT_EFLAGS(EFlags);
6851 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6852 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6853 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6854 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6855 IEM_MC_ENDIF();
6856 IEM_MC_ADVANCE_RIP();
6857
6858 IEM_MC_END();
6859 return VINF_SUCCESS;
6860}
6861
6862
6863/** Opcode REX.W 0x0f 0xc7 !11/1. */
6864FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
6865{
6866 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
6867 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6868 {
6869#if 0
6870 RT_NOREF(bRm);
6871 IEMOP_BITCH_ABOUT_STUB();
6872 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6873#else
6874 IEM_MC_BEGIN(4, 3);
6875 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
6876 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
6877 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
6878 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6879 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
6880 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
6881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6882
6883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6884 IEMOP_HLP_DONE_DECODING();
6885 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
6886 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6887
6888 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
6889 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
6890 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
6891
6892 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
6893 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
6894 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
6895
6896 IEM_MC_FETCH_EFLAGS(EFlags);
6897# ifdef RT_ARCH_AMD64
6898 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6899 {
6900 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6901 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6902 else
6903 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6904 }
6905 else
6906# endif
6907 {
6908 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
6909 accesses and not all all atomic, which works fine on in UNI CPU guest
6910 configuration (ignoring DMA). If guest SMP is active we have no choice
6911 but to use a rendezvous callback here. Sigh. */
6912 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
6913 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6914 else
6915 {
6916 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6917 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
6918 }
6919 }
6920
6921 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
6922 IEM_MC_COMMIT_EFLAGS(EFlags);
6923 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6924 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
6925 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
6926 IEM_MC_ENDIF();
6927 IEM_MC_ADVANCE_RIP();
6928
6929 IEM_MC_END();
6930 return VINF_SUCCESS;
6931#endif
6932 }
6933 Log(("cmpxchg16b -> #UD\n"));
6934 return IEMOP_RAISE_INVALID_OPCODE();
6935}
6936
6937
6938/** Opcode 0x0f 0xc7 11/6. */
6939FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6940
6941/** Opcode 0x0f 0xc7 !11/6. */
6942FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6943
6944/** Opcode 0x66 0x0f 0xc7 !11/6. */
6945FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6946
6947/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6948FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6949
6950/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6951FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6952
6953
6954/** Opcode 0x0f 0xc7. */
6955FNIEMOP_DEF(iemOp_Grp9)
6956{
6957 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6958 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6959 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6960 {
6961 case 0: case 2: case 3: case 4: case 5:
6962 return IEMOP_RAISE_INVALID_OPCODE();
6963 case 1:
6964 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6965 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6966 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6967 return IEMOP_RAISE_INVALID_OPCODE();
6968 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6969 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6970 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6971 case 6:
6972 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6973 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6974 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6975 {
6976 case 0:
6977 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6978 case IEM_OP_PRF_SIZE_OP:
6979 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6980 case IEM_OP_PRF_REPZ:
6981 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6982 default:
6983 return IEMOP_RAISE_INVALID_OPCODE();
6984 }
6985 case 7:
6986 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6987 {
6988 case 0:
6989 case IEM_OP_PRF_REPZ:
6990 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6991 default:
6992 return IEMOP_RAISE_INVALID_OPCODE();
6993 }
6994 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6995 }
6996}
6997
6998
6999/**
7000 * Common 'bswap register' helper.
7001 */
7002FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7003{
7004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7005 switch (pVCpu->iem.s.enmEffOpSize)
7006 {
7007 case IEMMODE_16BIT:
7008 IEM_MC_BEGIN(1, 0);
7009 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7010 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7011 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7012 IEM_MC_ADVANCE_RIP();
7013 IEM_MC_END();
7014 return VINF_SUCCESS;
7015
7016 case IEMMODE_32BIT:
7017 IEM_MC_BEGIN(1, 0);
7018 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7019 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7020 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7021 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7022 IEM_MC_ADVANCE_RIP();
7023 IEM_MC_END();
7024 return VINF_SUCCESS;
7025
7026 case IEMMODE_64BIT:
7027 IEM_MC_BEGIN(1, 0);
7028 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7029 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7030 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7031 IEM_MC_ADVANCE_RIP();
7032 IEM_MC_END();
7033 return VINF_SUCCESS;
7034
7035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7036 }
7037}
7038
7039
7040/** Opcode 0x0f 0xc8. */
7041FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7042{
7043 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7044 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7045 prefix. REX.B is the correct prefix it appears. For a parallel
7046 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7047 IEMOP_HLP_MIN_486();
7048 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7049}
7050
7051
7052/** Opcode 0x0f 0xc9. */
7053FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7054{
7055 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7056 IEMOP_HLP_MIN_486();
7057 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7058}
7059
7060
7061/** Opcode 0x0f 0xca. */
7062FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7063{
7064 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7065 IEMOP_HLP_MIN_486();
7066 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7067}
7068
7069
7070/** Opcode 0x0f 0xcb. */
7071FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7072{
7073 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7074 IEMOP_HLP_MIN_486();
7075 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7076}
7077
7078
7079/** Opcode 0x0f 0xcc. */
7080FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7081{
7082 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7083 IEMOP_HLP_MIN_486();
7084 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7085}
7086
7087
7088/** Opcode 0x0f 0xcd. */
7089FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7090{
7091 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7092 IEMOP_HLP_MIN_486();
7093 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7094}
7095
7096
7097/** Opcode 0x0f 0xce. */
7098FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7099{
7100 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7101 IEMOP_HLP_MIN_486();
7102 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7103}
7104
7105
7106/** Opcode 0x0f 0xcf. */
7107FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7108{
7109 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7110 IEMOP_HLP_MIN_486();
7111 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7112}
7113
7114
7115/* Opcode 0x0f 0xd0 - invalid */
7116/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7117FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7118/* Opcode 0xf3 0x0f 0xd0 - invalid */
7119/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7120FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7121
7122/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7123FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7124/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7125FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7126/* Opcode 0xf3 0x0f 0xd1 - invalid */
7127/* Opcode 0xf2 0x0f 0xd1 - invalid */
7128
7129/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7130FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7131/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7132FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7133/* Opcode 0xf3 0x0f 0xd2 - invalid */
7134/* Opcode 0xf2 0x0f 0xd2 - invalid */
7135
7136/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7137FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7138/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7139FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7140/* Opcode 0xf3 0x0f 0xd3 - invalid */
7141/* Opcode 0xf2 0x0f 0xd3 - invalid */
7142
7143/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7144FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7145/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7146FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7147/* Opcode 0xf3 0x0f 0xd4 - invalid */
7148/* Opcode 0xf2 0x0f 0xd4 - invalid */
7149
7150/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7151FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7152/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7153FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7154/* Opcode 0xf3 0x0f 0xd5 - invalid */
7155/* Opcode 0xf2 0x0f 0xd5 - invalid */
7156
7157/* Opcode 0x0f 0xd6 - invalid */
7158/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7159FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7160/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7161FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7162/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7163FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7164#if 0
7165FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7166{
7167 /* Docs says register only. */
7168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7169
7170 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7171 {
7172 case IEM_OP_PRF_SIZE_OP: /* SSE */
7173 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7174 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7175 IEM_MC_BEGIN(2, 0);
7176 IEM_MC_ARG(uint64_t *, pDst, 0);
7177 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7178 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7179 IEM_MC_PREPARE_SSE_USAGE();
7180 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7181 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7182 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7183 IEM_MC_ADVANCE_RIP();
7184 IEM_MC_END();
7185 return VINF_SUCCESS;
7186
7187 case 0: /* MMX */
7188 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7189 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7190 IEM_MC_BEGIN(2, 0);
7191 IEM_MC_ARG(uint64_t *, pDst, 0);
7192 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7193 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7194 IEM_MC_PREPARE_FPU_USAGE();
7195 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7196 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7197 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7198 IEM_MC_ADVANCE_RIP();
7199 IEM_MC_END();
7200 return VINF_SUCCESS;
7201
7202 default:
7203 return IEMOP_RAISE_INVALID_OPCODE();
7204 }
7205}
7206#endif
7207
7208
7209/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7210FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7211{
7212 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7213 /** @todo testcase: Check that the instruction implicitly clears the high
7214 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7215 * and opcode modifications are made to work with the whole width (not
7216 * just 128). */
7217 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
7218 /* Docs says register only. */
7219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7220 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7221 {
7222 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7223 IEM_MC_BEGIN(2, 0);
7224 IEM_MC_ARG(uint64_t *, pDst, 0);
7225 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7226 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7227 IEM_MC_PREPARE_FPU_USAGE();
7228 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7229 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7230 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7231 IEM_MC_ADVANCE_RIP();
7232 IEM_MC_END();
7233 return VINF_SUCCESS;
7234 }
7235 return IEMOP_RAISE_INVALID_OPCODE();
7236}
7237
7238/** Opcode 0x66 0x0f 0xd7 - */
7239FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
7240{
7241 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7242 /** @todo testcase: Check that the instruction implicitly clears the high
7243 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7244 * and opcode modifications are made to work with the whole width (not
7245 * just 128). */
7246 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
7247 /* Docs says register only. */
7248 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7250 {
7251 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7252 IEM_MC_BEGIN(2, 0);
7253 IEM_MC_ARG(uint64_t *, pDst, 0);
7254 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7255 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7256 IEM_MC_PREPARE_SSE_USAGE();
7257 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7258 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7259 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7260 IEM_MC_ADVANCE_RIP();
7261 IEM_MC_END();
7262 return VINF_SUCCESS;
7263 }
7264 return IEMOP_RAISE_INVALID_OPCODE();
7265}
7266
7267/* Opcode 0xf3 0x0f 0xd7 - invalid */
7268/* Opcode 0xf2 0x0f 0xd7 - invalid */
7269
7270
7271/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7272FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7273/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7274FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7275/* Opcode 0xf3 0x0f 0xd8 - invalid */
7276/* Opcode 0xf2 0x0f 0xd8 - invalid */
7277
7278/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7279FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7280/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7281FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7282/* Opcode 0xf3 0x0f 0xd9 - invalid */
7283/* Opcode 0xf2 0x0f 0xd9 - invalid */
7284
7285/** Opcode 0x0f 0xda - pminub Pq, Qq */
7286FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7287/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7288FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7289/* Opcode 0xf3 0x0f 0xda - invalid */
7290/* Opcode 0xf2 0x0f 0xda - invalid */
7291
7292/** Opcode 0x0f 0xdb - pand Pq, Qq */
7293FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7294/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7295FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7296/* Opcode 0xf3 0x0f 0xdb - invalid */
7297/* Opcode 0xf2 0x0f 0xdb - invalid */
7298
7299/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7300FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7301/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7302FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7303/* Opcode 0xf3 0x0f 0xdc - invalid */
7304/* Opcode 0xf2 0x0f 0xdc - invalid */
7305
7306/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7307FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7308/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7309FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7310/* Opcode 0xf3 0x0f 0xdd - invalid */
7311/* Opcode 0xf2 0x0f 0xdd - invalid */
7312
7313/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7314FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7315/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7316FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7317/* Opcode 0xf3 0x0f 0xde - invalid */
7318/* Opcode 0xf2 0x0f 0xde - invalid */
7319
7320/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7321FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7322/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7323FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7324/* Opcode 0xf3 0x0f 0xdf - invalid */
7325/* Opcode 0xf2 0x0f 0xdf - invalid */
7326
7327/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7328FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7329/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7330FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7331/* Opcode 0xf3 0x0f 0xe0 - invalid */
7332/* Opcode 0xf2 0x0f 0xe0 - invalid */
7333
7334/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7335FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7336/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7337FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7338/* Opcode 0xf3 0x0f 0xe1 - invalid */
7339/* Opcode 0xf2 0x0f 0xe1 - invalid */
7340
7341/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7342FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7343/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7344FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7345/* Opcode 0xf3 0x0f 0xe2 - invalid */
7346/* Opcode 0xf2 0x0f 0xe2 - invalid */
7347
7348/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7349FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7350/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7351FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7352/* Opcode 0xf3 0x0f 0xe3 - invalid */
7353/* Opcode 0xf2 0x0f 0xe3 - invalid */
7354
7355/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7356FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7357/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7358FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7359/* Opcode 0xf3 0x0f 0xe4 - invalid */
7360/* Opcode 0xf2 0x0f 0xe4 - invalid */
7361
7362/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7363FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7364/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7365FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7366/* Opcode 0xf3 0x0f 0xe5 - invalid */
7367/* Opcode 0xf2 0x0f 0xe5 - invalid */
7368
7369/* Opcode 0x0f 0xe6 - invalid */
7370/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7371FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7372/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7373FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7374/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7375FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7376
7377
7378/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
7379FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
7380{
7381 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7382 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7383 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7384 {
7385 /* Register, memory. */
7386 IEM_MC_BEGIN(0, 2);
7387 IEM_MC_LOCAL(uint64_t, uSrc);
7388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7389
7390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7392 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7393 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7394
7395 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7396 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7397
7398 IEM_MC_ADVANCE_RIP();
7399 IEM_MC_END();
7400 return VINF_SUCCESS;
7401 }
7402 /* The register, register encoding is invalid. */
7403 return IEMOP_RAISE_INVALID_OPCODE();
7404}
7405
7406/** Opcode 0x66 0x0f 0xe7 - vmovntdq Mx, Vx */
7407FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
7408{
7409 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7410 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7411 {
7412 /* Register, memory. */
7413 IEMOP_MNEMONIC(vmovntdq_Mx_Vx, "vmovntdq Mx,Vx");
7414 IEM_MC_BEGIN(0, 2);
7415 IEM_MC_LOCAL(uint128_t, uSrc);
7416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7417
7418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7420 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7421 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7422
7423 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7424 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7425
7426 IEM_MC_ADVANCE_RIP();
7427 IEM_MC_END();
7428 return VINF_SUCCESS;
7429 }
7430
7431 /* The register, register encoding is invalid. */
7432 return IEMOP_RAISE_INVALID_OPCODE();
7433}
7434
7435/* Opcode 0xf3 0x0f 0xe7 - invalid */
7436/* Opcode 0xf2 0x0f 0xe7 - invalid */
7437
7438
7439/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7440FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7441/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7442FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7443/* Opcode 0xf3 0x0f 0xe8 - invalid */
7444/* Opcode 0xf2 0x0f 0xe8 - invalid */
7445
7446/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7447FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7448/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7449FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7450/* Opcode 0xf3 0x0f 0xe9 - invalid */
7451/* Opcode 0xf2 0x0f 0xe9 - invalid */
7452
7453/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7454FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7455/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7456FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7457/* Opcode 0xf3 0x0f 0xea - invalid */
7458/* Opcode 0xf2 0x0f 0xea - invalid */
7459
7460/** Opcode 0x0f 0xeb - por Pq, Qq */
7461FNIEMOP_STUB(iemOp_por_Pq_Qq);
7462/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7463FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7464/* Opcode 0xf3 0x0f 0xeb - invalid */
7465/* Opcode 0xf2 0x0f 0xeb - invalid */
7466
7467/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7468FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7469/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7470FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7471/* Opcode 0xf3 0x0f 0xec - invalid */
7472/* Opcode 0xf2 0x0f 0xec - invalid */
7473
7474/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7475FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7476/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7477FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7478/* Opcode 0xf3 0x0f 0xed - invalid */
7479/* Opcode 0xf2 0x0f 0xed - invalid */
7480
7481/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7482FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7483/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7484FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7485/* Opcode 0xf3 0x0f 0xee - invalid */
7486/* Opcode 0xf2 0x0f 0xee - invalid */
7487
7488
7489/** Opcode 0x0f 0xef - pxor Pq, Qq */
7490FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
7491{
7492 IEMOP_MNEMONIC(pxor, "pxor");
7493 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
7494}
7495
7496/** Opcode 0x66 0x0f 0xef - vpxor Vx, Hx, Wx */
7497FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
7498{
7499 IEMOP_MNEMONIC(vpxor, "vpxor");
7500 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7501}
7502
7503/* Opcode 0xf3 0x0f 0xef - invalid */
7504/* Opcode 0xf2 0x0f 0xef - invalid */
7505
7506/* Opcode 0x0f 0xf0 - invalid */
7507/* Opcode 0x66 0x0f 0xf0 - invalid */
7508/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7509FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7510
7511/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7512FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7513/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7514FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7515/* Opcode 0xf2 0x0f 0xf1 - invalid */
7516
7517/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7518FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7519/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7520FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7521/* Opcode 0xf2 0x0f 0xf2 - invalid */
7522
7523/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7524FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7525/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7526FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7527/* Opcode 0xf2 0x0f 0xf3 - invalid */
7528
7529/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7530FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7531/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7532FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7533/* Opcode 0xf2 0x0f 0xf4 - invalid */
7534
7535/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7536FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7537/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7538FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7539/* Opcode 0xf2 0x0f 0xf5 - invalid */
7540
7541/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7542FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7543/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7544FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7545/* Opcode 0xf2 0x0f 0xf6 - invalid */
7546
7547/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7548FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7549/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7550FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7551/* Opcode 0xf2 0x0f 0xf7 - invalid */
7552
7553/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7554FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7555/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7556FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7557/* Opcode 0xf2 0x0f 0xf8 - invalid */
7558
7559/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7560FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7561/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7562FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7563/* Opcode 0xf2 0x0f 0xf9 - invalid */
7564
7565/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7566FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7567/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7568FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7569/* Opcode 0xf2 0x0f 0xfa - invalid */
7570
7571/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7572FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7573/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7574FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7575/* Opcode 0xf2 0x0f 0xfb - invalid */
7576
7577/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7578FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7579/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7580FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7581/* Opcode 0xf2 0x0f 0xfc - invalid */
7582
7583/** Opcode 0x0f 0xfd - paddw Pq, Qq */
7584FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
7585/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
7586FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
7587/* Opcode 0xf2 0x0f 0xfd - invalid */
7588
7589/** Opcode 0x0f 0xfe - paddd Pq, Qq */
7590FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
7591/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
7592FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
7593/* Opcode 0xf2 0x0f 0xfe - invalid */
7594
7595
7596/** Opcode **** 0x0f 0xff - UD0 */
7597FNIEMOP_DEF(iemOp_ud0)
7598{
7599 IEMOP_MNEMONIC(ud0, "ud0");
7600 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7601 {
7602 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7603#ifndef TST_IEM_CHECK_MC
7604 RTGCPTR GCPtrEff;
7605 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
7606 if (rcStrict != VINF_SUCCESS)
7607 return rcStrict;
7608#endif
7609 IEMOP_HLP_DONE_DECODING();
7610 }
7611 return IEMOP_RAISE_INVALID_OPCODE();
7612}
7613
7614
7615
7616/**
7617 * Two byte opcode map, first byte 0x0f.
7618 *
7619 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
7620 * check if it needs updating as well when making changes.
7621 */
7622IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
7623{
7624 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7625 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
7626 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
7627 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
7628 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
7629 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
7630 /* 0x05 */ IEMOP_X4(iemOp_syscall),
7631 /* 0x06 */ IEMOP_X4(iemOp_clts),
7632 /* 0x07 */ IEMOP_X4(iemOp_sysret),
7633 /* 0x08 */ IEMOP_X4(iemOp_invd),
7634 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
7635 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
7636 /* 0x0b */ IEMOP_X4(iemOp_ud2),
7637 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
7638 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
7639 /* 0x0e */ IEMOP_X4(iemOp_femms),
7640 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
7641
7642 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7643 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7644 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7645 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7646 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7647 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7648 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7649 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7650 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
7651 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
7652 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
7653 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
7654 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
7655 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
7656 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
7657 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
7658
7659 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
7660 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
7661 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
7662 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
7663 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
7664 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7665 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
7666 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7667 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7668 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7669 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7670 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7671 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7672 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7673 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7674 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7675
7676 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
7677 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
7678 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
7679 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
7680 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
7681 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
7682 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
7683 /* 0x37 */ IEMOP_X4(iemOp_getsec),
7684 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
7685 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7686 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
7687 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7688 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7689 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7690 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7691 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7692
7693 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
7694 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
7695 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
7696 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
7697 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
7698 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
7699 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
7700 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
7701 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
7702 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
7703 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
7704 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
7705 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
7706 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
7707 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
7708 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
7709
7710 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7711 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
7712 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7713 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7714 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7715 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7716 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7717 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7718 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
7719 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
7720 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
7721 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
7722 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
7723 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
7724 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
7725 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
7726
7727 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7728 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7729 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7730 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7731 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7732 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7733 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7734 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7735 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7736 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7737 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7738 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7739 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7740 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7741 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7742 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
7743
7744 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
7745 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
7746 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
7747 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
7748 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7749 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7750 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7751 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7752
7753 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7754 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7755 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7756 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7757 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
7758 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
7759 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
7760 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
7761
7762 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
7763 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
7764 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
7765 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
7766 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
7767 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
7768 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
7769 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
7770 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
7771 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
7772 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
7773 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
7774 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
7775 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
7776 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
7777 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
7778
7779 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
7780 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
7781 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
7782 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
7783 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
7784 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
7785 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
7786 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
7787 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
7788 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
7789 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
7790 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
7791 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
7792 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
7793 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
7794 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
7795
7796 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
7797 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
7798 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
7799 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
7800 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
7801 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
7802 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
7803 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7804 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
7805 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
7806 /* 0xaa */ IEMOP_X4(iemOp_rsm),
7807 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
7808 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
7809 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
7810 /* 0xae */ IEMOP_X4(iemOp_Grp15),
7811 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
7812
7813 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
7814 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
7815 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
7816 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
7817 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
7818 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
7819 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
7820 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
7821 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
7822 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
7823 /* 0xba */ IEMOP_X4(iemOp_Grp8),
7824 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
7825 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
7826 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
7827 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
7828 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
7829
7830 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
7831 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
7832 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
7833 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7834 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7835 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7836 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
7837 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
7838 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
7839 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
7840 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
7841 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
7842 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
7843 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
7844 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
7845 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
7846
7847 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
7848 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7849 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7850 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7851 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7852 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7853 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
7854 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7855 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7856 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7857 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7858 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7859 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7860 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7861 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7862 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7863
7864 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7865 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7866 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7867 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7868 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7869 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7870 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
7871 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7872 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7873 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7874 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7875 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7876 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7877 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7878 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7879 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7880
7881 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
7882 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7883 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7884 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7885 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7886 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7887 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7888 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7889 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7890 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7891 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7892 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7893 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7894 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7895 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7896 /* 0xff */ IEMOP_X4(iemOp_ud0),
7897};
7898AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
7899
7900
7901/**
7902 * VEX opcode map \#1.
7903 *
7904 * @remarks This is (currently) a subset of g_apfnTwoByteMap, so please check if
7905 * it it needs updating too when making changes.
7906 */
7907IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
7908{
7909 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7910 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
7911 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
7912 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
7913 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
7914 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
7915 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
7916 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
7917 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
7918 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
7919 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
7920 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
7921 /* 0x0b */ IEMOP_X4(iemOp_InvalidNeedRM),
7922 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
7923 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
7924 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
7925 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
7926
7927 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7928 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7929 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7930 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7931 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7932 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7933 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7934 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7935 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
7936 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
7937 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
7938 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
7939 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
7940 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
7941 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
7942 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
7943
7944 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
7945 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
7946 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
7947 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
7948 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
7949 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
7950 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
7951 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
7952 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7953 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7954 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7955 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7956 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7957 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7958 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7959 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7960
7961 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
7962 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
7963 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
7964 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
7965 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
7966 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
7967 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
7968 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
7969 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7970 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7971 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7972 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7973 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7974 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7975 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7976 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7977
7978 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
7979 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
7980 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
7981 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
7982 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
7983 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
7984 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
7985 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
7986 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
7987 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
7988 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
7989 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
7990 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
7991 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
7992 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
7993 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
7994
7995 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7996 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
7997 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7998 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7999 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8000 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8001 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8002 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8003 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8004 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8005 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8006 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8007 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8008 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8009 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8010 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8011
8012 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8013 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8014 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8015 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8016 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8017 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8018 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8019 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8020 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8021 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8022 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8023 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8024 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8025 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8026 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8027 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8028
8029 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8030 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_Grp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8031 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_Grp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8032 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_Grp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8033 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8034 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8035 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8036 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8037 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
8038 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
8039 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
8040 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
8041 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8042 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8043 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
8044 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
8045
8046 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
8047 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
8048 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
8049 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
8050 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
8051 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
8052 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
8053 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
8054 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
8055 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
8056 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
8057 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
8058 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
8059 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
8060 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
8061 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
8062 IEMOP_X4(iemOp_InvalidNeedRM),
8063 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
8064 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
8065 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
8066 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
8067 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
8068 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
8069 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
8070 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
8071 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
8072 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
8073 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
8074 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
8075 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
8076 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
8077 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
8078 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
8079
8080 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8081 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8082 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8083 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8084 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8085 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8086 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8087 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8088 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8089 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8090 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
8091 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
8092 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
8093 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
8094 /* 0xae */ IEMOP_X4(iemOp_Grp15), /** @todo groups and vex */
8095 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
8096
8097 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8098 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8099 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8100 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8101 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8102 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8103 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8104 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8105 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8106 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8107 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
8108 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
8109 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
8110 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
8111 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
8112 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
8113
8114 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8115 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8116 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8117 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8118 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8119 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8120 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8121 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8122 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8123 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8124 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
8125 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
8126 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
8127 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
8128 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
8129 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
8130
8131 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8132 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8133 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8134 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8135 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8136 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8137 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8138 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8139 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8140 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8141 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8142 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8143 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8144 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8145 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8146 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8147
8148 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8149 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8150 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8151 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8152 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8153 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8154 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8155 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8156 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8157 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8158 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8159 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8160 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8161 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8162 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8163 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8164
8165 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8166 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8167 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8168 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8169 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8170 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8171 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8172 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8173 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8174 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8175 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8176 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8177 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8178 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8179 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8180 /* 0xff */ IEMOP_X4(iemOp_ud0),
8181};
8182AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8183/** @} */
8184
8185
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette