VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 65769

Last change on this file since 65769 was 65769, checked in by vboxsync, 8 years ago

IEM: 0x0f 0x29 split up.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 307.4 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 65769 2017-02-13 15:07:12Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/** @name ..... opcodes.
26 *
27 * @{
28 */
29
30/** @} */
31
32
33/** @name Two byte opcodes (first byte 0x0f).
34 *
35 * @{
36 */
37
38/** Opcode 0x0f 0x00 /0. */
39FNIEMOPRM_DEF(iemOp_Grp6_sldt)
40{
41 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
42 IEMOP_HLP_MIN_286();
43 IEMOP_HLP_NO_REAL_OR_V86_MODE();
44
45 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
46 {
47 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
48 switch (pVCpu->iem.s.enmEffOpSize)
49 {
50 case IEMMODE_16BIT:
51 IEM_MC_BEGIN(0, 1);
52 IEM_MC_LOCAL(uint16_t, u16Ldtr);
53 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
54 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
55 IEM_MC_ADVANCE_RIP();
56 IEM_MC_END();
57 break;
58
59 case IEMMODE_32BIT:
60 IEM_MC_BEGIN(0, 1);
61 IEM_MC_LOCAL(uint32_t, u32Ldtr);
62 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
63 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
64 IEM_MC_ADVANCE_RIP();
65 IEM_MC_END();
66 break;
67
68 case IEMMODE_64BIT:
69 IEM_MC_BEGIN(0, 1);
70 IEM_MC_LOCAL(uint64_t, u64Ldtr);
71 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
72 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
73 IEM_MC_ADVANCE_RIP();
74 IEM_MC_END();
75 break;
76
77 IEM_NOT_REACHED_DEFAULT_CASE_RET();
78 }
79 }
80 else
81 {
82 IEM_MC_BEGIN(0, 2);
83 IEM_MC_LOCAL(uint16_t, u16Ldtr);
84 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
85 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
86 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
87 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
88 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
89 IEM_MC_ADVANCE_RIP();
90 IEM_MC_END();
91 }
92 return VINF_SUCCESS;
93}
94
95
96/** Opcode 0x0f 0x00 /1. */
97FNIEMOPRM_DEF(iemOp_Grp6_str)
98{
99 IEMOP_MNEMONIC(str, "str Rv/Mw");
100 IEMOP_HLP_MIN_286();
101 IEMOP_HLP_NO_REAL_OR_V86_MODE();
102
103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
104 {
105 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
106 switch (pVCpu->iem.s.enmEffOpSize)
107 {
108 case IEMMODE_16BIT:
109 IEM_MC_BEGIN(0, 1);
110 IEM_MC_LOCAL(uint16_t, u16Tr);
111 IEM_MC_FETCH_TR_U16(u16Tr);
112 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 break;
116
117 case IEMMODE_32BIT:
118 IEM_MC_BEGIN(0, 1);
119 IEM_MC_LOCAL(uint32_t, u32Tr);
120 IEM_MC_FETCH_TR_U32(u32Tr);
121 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
122 IEM_MC_ADVANCE_RIP();
123 IEM_MC_END();
124 break;
125
126 case IEMMODE_64BIT:
127 IEM_MC_BEGIN(0, 1);
128 IEM_MC_LOCAL(uint64_t, u64Tr);
129 IEM_MC_FETCH_TR_U64(u64Tr);
130 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
131 IEM_MC_ADVANCE_RIP();
132 IEM_MC_END();
133 break;
134
135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
136 }
137 }
138 else
139 {
140 IEM_MC_BEGIN(0, 2);
141 IEM_MC_LOCAL(uint16_t, u16Tr);
142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
144 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
145 IEM_MC_FETCH_TR_U16(u16Tr);
146 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
147 IEM_MC_ADVANCE_RIP();
148 IEM_MC_END();
149 }
150 return VINF_SUCCESS;
151}
152
153
154/** Opcode 0x0f 0x00 /2. */
155FNIEMOPRM_DEF(iemOp_Grp6_lldt)
156{
157 IEMOP_MNEMONIC(lldt, "lldt Ew");
158 IEMOP_HLP_MIN_286();
159 IEMOP_HLP_NO_REAL_OR_V86_MODE();
160
161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
162 {
163 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
164 IEM_MC_BEGIN(1, 0);
165 IEM_MC_ARG(uint16_t, u16Sel, 0);
166 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
167 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
168 IEM_MC_END();
169 }
170 else
171 {
172 IEM_MC_BEGIN(1, 1);
173 IEM_MC_ARG(uint16_t, u16Sel, 0);
174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
176 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
177 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
178 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
179 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
180 IEM_MC_END();
181 }
182 return VINF_SUCCESS;
183}
184
185
186/** Opcode 0x0f 0x00 /3. */
187FNIEMOPRM_DEF(iemOp_Grp6_ltr)
188{
189 IEMOP_MNEMONIC(ltr, "ltr Ew");
190 IEMOP_HLP_MIN_286();
191 IEMOP_HLP_NO_REAL_OR_V86_MODE();
192
193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
194 {
195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
196 IEM_MC_BEGIN(1, 0);
197 IEM_MC_ARG(uint16_t, u16Sel, 0);
198 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
199 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
200 IEM_MC_END();
201 }
202 else
203 {
204 IEM_MC_BEGIN(1, 1);
205 IEM_MC_ARG(uint16_t, u16Sel, 0);
206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
209 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
210 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
211 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
212 IEM_MC_END();
213 }
214 return VINF_SUCCESS;
215}
216
217
218/** Opcode 0x0f 0x00 /3. */
219FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
220{
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
223
224 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
225 {
226 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
227 IEM_MC_BEGIN(2, 0);
228 IEM_MC_ARG(uint16_t, u16Sel, 0);
229 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
230 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
231 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
232 IEM_MC_END();
233 }
234 else
235 {
236 IEM_MC_BEGIN(2, 1);
237 IEM_MC_ARG(uint16_t, u16Sel, 0);
238 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
241 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
242 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
243 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
244 IEM_MC_END();
245 }
246 return VINF_SUCCESS;
247}
248
249
250/** Opcode 0x0f 0x00 /4. */
251FNIEMOPRM_DEF(iemOp_Grp6_verr)
252{
253 IEMOP_MNEMONIC(verr, "verr Ew");
254 IEMOP_HLP_MIN_286();
255 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
256}
257
258
259/** Opcode 0x0f 0x00 /5. */
260FNIEMOPRM_DEF(iemOp_Grp6_verw)
261{
262 IEMOP_MNEMONIC(verw, "verw Ew");
263 IEMOP_HLP_MIN_286();
264 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
265}
266
267
268/**
269 * Group 6 jump table.
270 */
271IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
272{
273 iemOp_Grp6_sldt,
274 iemOp_Grp6_str,
275 iemOp_Grp6_lldt,
276 iemOp_Grp6_ltr,
277 iemOp_Grp6_verr,
278 iemOp_Grp6_verw,
279 iemOp_InvalidWithRM,
280 iemOp_InvalidWithRM
281};
282
283/** Opcode 0x0f 0x00. */
284FNIEMOP_DEF(iemOp_Grp6)
285{
286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
287 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
288}
289
290
291/** Opcode 0x0f 0x01 /0. */
292FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
293{
294 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
295 IEMOP_HLP_MIN_286();
296 IEMOP_HLP_64BIT_OP_SIZE();
297 IEM_MC_BEGIN(2, 1);
298 IEM_MC_ARG(uint8_t, iEffSeg, 0);
299 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
302 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
303 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
304 IEM_MC_END();
305 return VINF_SUCCESS;
306}
307
308
309/** Opcode 0x0f 0x01 /0. */
310FNIEMOP_DEF(iemOp_Grp7_vmcall)
311{
312 IEMOP_BITCH_ABOUT_STUB();
313 return IEMOP_RAISE_INVALID_OPCODE();
314}
315
316
317/** Opcode 0x0f 0x01 /0. */
318FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
319{
320 IEMOP_BITCH_ABOUT_STUB();
321 return IEMOP_RAISE_INVALID_OPCODE();
322}
323
324
325/** Opcode 0x0f 0x01 /0. */
326FNIEMOP_DEF(iemOp_Grp7_vmresume)
327{
328 IEMOP_BITCH_ABOUT_STUB();
329 return IEMOP_RAISE_INVALID_OPCODE();
330}
331
332
333/** Opcode 0x0f 0x01 /0. */
334FNIEMOP_DEF(iemOp_Grp7_vmxoff)
335{
336 IEMOP_BITCH_ABOUT_STUB();
337 return IEMOP_RAISE_INVALID_OPCODE();
338}
339
340
341/** Opcode 0x0f 0x01 /1. */
342FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
343{
344 IEMOP_MNEMONIC(sidt, "sidt Ms");
345 IEMOP_HLP_MIN_286();
346 IEMOP_HLP_64BIT_OP_SIZE();
347 IEM_MC_BEGIN(2, 1);
348 IEM_MC_ARG(uint8_t, iEffSeg, 0);
349 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
352 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
353 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
354 IEM_MC_END();
355 return VINF_SUCCESS;
356}
357
358
359/** Opcode 0x0f 0x01 /1. */
360FNIEMOP_DEF(iemOp_Grp7_monitor)
361{
362 IEMOP_MNEMONIC(monitor, "monitor");
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
364 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
365}
366
367
368/** Opcode 0x0f 0x01 /1. */
369FNIEMOP_DEF(iemOp_Grp7_mwait)
370{
371 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
374}
375
376
377/** Opcode 0x0f 0x01 /2. */
378FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
379{
380 IEMOP_MNEMONIC(lgdt, "lgdt");
381 IEMOP_HLP_64BIT_OP_SIZE();
382 IEM_MC_BEGIN(3, 1);
383 IEM_MC_ARG(uint8_t, iEffSeg, 0);
384 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
385 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
389 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
390 IEM_MC_END();
391 return VINF_SUCCESS;
392}
393
394
395/** Opcode 0x0f 0x01 0xd0. */
396FNIEMOP_DEF(iemOp_Grp7_xgetbv)
397{
398 IEMOP_MNEMONIC(xgetbv, "xgetbv");
399 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
400 {
401 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
402 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
403 }
404 return IEMOP_RAISE_INVALID_OPCODE();
405}
406
407
408/** Opcode 0x0f 0x01 0xd1. */
409FNIEMOP_DEF(iemOp_Grp7_xsetbv)
410{
411 IEMOP_MNEMONIC(xsetbv, "xsetbv");
412 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
413 {
414 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
416 }
417 return IEMOP_RAISE_INVALID_OPCODE();
418}
419
420
421/** Opcode 0x0f 0x01 /3. */
422FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
423{
424 IEMOP_MNEMONIC(lidt, "lidt");
425 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
426 ? IEMMODE_64BIT
427 : pVCpu->iem.s.enmEffOpSize;
428 IEM_MC_BEGIN(3, 1);
429 IEM_MC_ARG(uint8_t, iEffSeg, 0);
430 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
431 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
434 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
435 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
436 IEM_MC_END();
437 return VINF_SUCCESS;
438}
439
440
441/** Opcode 0x0f 0x01 0xd8. */
442FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
443
444/** Opcode 0x0f 0x01 0xd9. */
445FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
446
447/** Opcode 0x0f 0x01 0xda. */
448FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
449
450/** Opcode 0x0f 0x01 0xdb. */
451FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
452
453/** Opcode 0x0f 0x01 0xdc. */
454FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
455
456/** Opcode 0x0f 0x01 0xdd. */
457FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
458
459/** Opcode 0x0f 0x01 0xde. */
460FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
461
462/** Opcode 0x0f 0x01 0xdf. */
463FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
464
465/** Opcode 0x0f 0x01 /4. */
466FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
467{
468 IEMOP_MNEMONIC(smsw, "smsw");
469 IEMOP_HLP_MIN_286();
470 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
471 {
472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
473 switch (pVCpu->iem.s.enmEffOpSize)
474 {
475 case IEMMODE_16BIT:
476 IEM_MC_BEGIN(0, 1);
477 IEM_MC_LOCAL(uint16_t, u16Tmp);
478 IEM_MC_FETCH_CR0_U16(u16Tmp);
479 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
480 { /* likely */ }
481 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
482 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
483 else
484 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
485 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
486 IEM_MC_ADVANCE_RIP();
487 IEM_MC_END();
488 return VINF_SUCCESS;
489
490 case IEMMODE_32BIT:
491 IEM_MC_BEGIN(0, 1);
492 IEM_MC_LOCAL(uint32_t, u32Tmp);
493 IEM_MC_FETCH_CR0_U32(u32Tmp);
494 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
495 IEM_MC_ADVANCE_RIP();
496 IEM_MC_END();
497 return VINF_SUCCESS;
498
499 case IEMMODE_64BIT:
500 IEM_MC_BEGIN(0, 1);
501 IEM_MC_LOCAL(uint64_t, u64Tmp);
502 IEM_MC_FETCH_CR0_U64(u64Tmp);
503 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
504 IEM_MC_ADVANCE_RIP();
505 IEM_MC_END();
506 return VINF_SUCCESS;
507
508 IEM_NOT_REACHED_DEFAULT_CASE_RET();
509 }
510 }
511 else
512 {
513 /* Ignore operand size here, memory refs are always 16-bit. */
514 IEM_MC_BEGIN(0, 2);
515 IEM_MC_LOCAL(uint16_t, u16Tmp);
516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
519 IEM_MC_FETCH_CR0_U16(u16Tmp);
520 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
521 { /* likely */ }
522 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
523 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
524 else
525 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
526 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
527 IEM_MC_ADVANCE_RIP();
528 IEM_MC_END();
529 return VINF_SUCCESS;
530 }
531}
532
533
534/** Opcode 0x0f 0x01 /6. */
535FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
536{
537 /* The operand size is effectively ignored, all is 16-bit and only the
538 lower 3-bits are used. */
539 IEMOP_MNEMONIC(lmsw, "lmsw");
540 IEMOP_HLP_MIN_286();
541 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
542 {
543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
544 IEM_MC_BEGIN(1, 0);
545 IEM_MC_ARG(uint16_t, u16Tmp, 0);
546 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
547 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
548 IEM_MC_END();
549 }
550 else
551 {
552 IEM_MC_BEGIN(1, 1);
553 IEM_MC_ARG(uint16_t, u16Tmp, 0);
554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
557 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
558 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
559 IEM_MC_END();
560 }
561 return VINF_SUCCESS;
562}
563
564
565/** Opcode 0x0f 0x01 /7. */
566FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
567{
568 IEMOP_MNEMONIC(invlpg, "invlpg");
569 IEMOP_HLP_MIN_486();
570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
571 IEM_MC_BEGIN(1, 1);
572 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
574 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
575 IEM_MC_END();
576 return VINF_SUCCESS;
577}
578
579
580/** Opcode 0x0f 0x01 /7. */
581FNIEMOP_DEF(iemOp_Grp7_swapgs)
582{
583 IEMOP_MNEMONIC(swapgs, "swapgs");
584 IEMOP_HLP_ONLY_64BIT();
585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
586 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
587}
588
589
590/** Opcode 0x0f 0x01 /7. */
591FNIEMOP_DEF(iemOp_Grp7_rdtscp)
592{
593 NOREF(pVCpu);
594 IEMOP_BITCH_ABOUT_STUB();
595 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
596}
597
598
599/** Opcode 0x0f 0x01. */
600FNIEMOP_DEF(iemOp_Grp7)
601{
602 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
603 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
604 {
605 case 0:
606 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
607 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
608 switch (bRm & X86_MODRM_RM_MASK)
609 {
610 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
611 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
612 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
613 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
614 }
615 return IEMOP_RAISE_INVALID_OPCODE();
616
617 case 1:
618 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
619 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
620 switch (bRm & X86_MODRM_RM_MASK)
621 {
622 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
623 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
624 }
625 return IEMOP_RAISE_INVALID_OPCODE();
626
627 case 2:
628 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
629 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
630 switch (bRm & X86_MODRM_RM_MASK)
631 {
632 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
633 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
634 }
635 return IEMOP_RAISE_INVALID_OPCODE();
636
637 case 3:
638 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
639 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
640 switch (bRm & X86_MODRM_RM_MASK)
641 {
642 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
643 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
644 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
645 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
646 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
647 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
648 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
649 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
651 }
652
653 case 4:
654 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
655
656 case 5:
657 return IEMOP_RAISE_INVALID_OPCODE();
658
659 case 6:
660 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
661
662 case 7:
663 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
664 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
665 switch (bRm & X86_MODRM_RM_MASK)
666 {
667 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
668 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
669 }
670 return IEMOP_RAISE_INVALID_OPCODE();
671
672 IEM_NOT_REACHED_DEFAULT_CASE_RET();
673 }
674}
675
676/** Opcode 0x0f 0x00 /3. */
677FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
678{
679 IEMOP_HLP_NO_REAL_OR_V86_MODE();
680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
681
682 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
683 {
684 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
685 switch (pVCpu->iem.s.enmEffOpSize)
686 {
687 case IEMMODE_16BIT:
688 {
689 IEM_MC_BEGIN(3, 0);
690 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
691 IEM_MC_ARG(uint16_t, u16Sel, 1);
692 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
693
694 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
695 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
696 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
697
698 IEM_MC_END();
699 return VINF_SUCCESS;
700 }
701
702 case IEMMODE_32BIT:
703 case IEMMODE_64BIT:
704 {
705 IEM_MC_BEGIN(3, 0);
706 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
707 IEM_MC_ARG(uint16_t, u16Sel, 1);
708 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
709
710 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
711 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
712 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
713
714 IEM_MC_END();
715 return VINF_SUCCESS;
716 }
717
718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
719 }
720 }
721 else
722 {
723 switch (pVCpu->iem.s.enmEffOpSize)
724 {
725 case IEMMODE_16BIT:
726 {
727 IEM_MC_BEGIN(3, 1);
728 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
729 IEM_MC_ARG(uint16_t, u16Sel, 1);
730 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
732
733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
734 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
735
736 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
737 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
738 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
739
740 IEM_MC_END();
741 return VINF_SUCCESS;
742 }
743
744 case IEMMODE_32BIT:
745 case IEMMODE_64BIT:
746 {
747 IEM_MC_BEGIN(3, 1);
748 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
749 IEM_MC_ARG(uint16_t, u16Sel, 1);
750 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
752
753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755/** @todo testcase: make sure it's a 16-bit read. */
756
757 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
758 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
759 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
760
761 IEM_MC_END();
762 return VINF_SUCCESS;
763 }
764
765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
766 }
767 }
768}
769
770
771
772/** Opcode 0x0f 0x02. */
773FNIEMOP_DEF(iemOp_lar_Gv_Ew)
774{
775 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
776 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
777}
778
779
780/** Opcode 0x0f 0x03. */
781FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
782{
783 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
784 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
785}
786
787
788/** Opcode 0x0f 0x05. */
789FNIEMOP_DEF(iemOp_syscall)
790{
791 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
793 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
794}
795
796
797/** Opcode 0x0f 0x06. */
798FNIEMOP_DEF(iemOp_clts)
799{
800 IEMOP_MNEMONIC(clts, "clts");
801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
802 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
803}
804
805
806/** Opcode 0x0f 0x07. */
807FNIEMOP_DEF(iemOp_sysret)
808{
809 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
811 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
812}
813
814
815/** Opcode 0x0f 0x08. */
816FNIEMOP_STUB(iemOp_invd);
817// IEMOP_HLP_MIN_486();
818
819
820/** Opcode 0x0f 0x09. */
821FNIEMOP_DEF(iemOp_wbinvd)
822{
823 IEMOP_MNEMONIC(wbinvd, "wbinvd");
824 IEMOP_HLP_MIN_486();
825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
826 IEM_MC_BEGIN(0, 0);
827 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
828 IEM_MC_ADVANCE_RIP();
829 IEM_MC_END();
830 return VINF_SUCCESS; /* ignore for now */
831}
832
833
834/** Opcode 0x0f 0x0b. */
835FNIEMOP_DEF(iemOp_ud2)
836{
837 IEMOP_MNEMONIC(ud2, "ud2");
838 return IEMOP_RAISE_INVALID_OPCODE();
839}
840
841/** Opcode 0x0f 0x0d. */
842FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
843{
844 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
845 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
846 {
847 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
848 return IEMOP_RAISE_INVALID_OPCODE();
849 }
850
851 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
852 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
853 {
854 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
855 return IEMOP_RAISE_INVALID_OPCODE();
856 }
857
858 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
859 {
860 case 2: /* Aliased to /0 for the time being. */
861 case 4: /* Aliased to /0 for the time being. */
862 case 5: /* Aliased to /0 for the time being. */
863 case 6: /* Aliased to /0 for the time being. */
864 case 7: /* Aliased to /0 for the time being. */
865 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
866 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
867 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
869 }
870
871 IEM_MC_BEGIN(0, 1);
872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
875 /* Currently a NOP. */
876 NOREF(GCPtrEffSrc);
877 IEM_MC_ADVANCE_RIP();
878 IEM_MC_END();
879 return VINF_SUCCESS;
880}
881
882
883/** Opcode 0x0f 0x0e. */
884FNIEMOP_STUB(iemOp_femms);
885
886
887/** Opcode 0x0f 0x0f 0x0c. */
888FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
889
890/** Opcode 0x0f 0x0f 0x0d. */
891FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
892
893/** Opcode 0x0f 0x0f 0x1c. */
894FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
895
896/** Opcode 0x0f 0x0f 0x1d. */
897FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
898
899/** Opcode 0x0f 0x0f 0x8a. */
900FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
901
902/** Opcode 0x0f 0x0f 0x8e. */
903FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
904
905/** Opcode 0x0f 0x0f 0x90. */
906FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
907
908/** Opcode 0x0f 0x0f 0x94. */
909FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
910
911/** Opcode 0x0f 0x0f 0x96. */
912FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
913
914/** Opcode 0x0f 0x0f 0x97. */
915FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
916
917/** Opcode 0x0f 0x0f 0x9a. */
918FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
919
920/** Opcode 0x0f 0x0f 0x9e. */
921FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
922
923/** Opcode 0x0f 0x0f 0xa0. */
924FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
925
926/** Opcode 0x0f 0x0f 0xa4. */
927FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
928
929/** Opcode 0x0f 0x0f 0xa6. */
930FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
931
932/** Opcode 0x0f 0x0f 0xa7. */
933FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
934
935/** Opcode 0x0f 0x0f 0xaa. */
936FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
937
938/** Opcode 0x0f 0x0f 0xae. */
939FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
940
941/** Opcode 0x0f 0x0f 0xb0. */
942FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
943
944/** Opcode 0x0f 0x0f 0xb4. */
945FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
946
947/** Opcode 0x0f 0x0f 0xb6. */
948FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
949
950/** Opcode 0x0f 0x0f 0xb7. */
951FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
952
953/** Opcode 0x0f 0x0f 0xbb. */
954FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
955
956/** Opcode 0x0f 0x0f 0xbf. */
957FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
958
959
960/** Opcode 0x0f 0x0f. */
961FNIEMOP_DEF(iemOp_3Dnow)
962{
963 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
964 {
965 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
966 return IEMOP_RAISE_INVALID_OPCODE();
967 }
968
969 /* This is pretty sparse, use switch instead of table. */
970 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
971 switch (b)
972 {
973 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
974 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
975 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
976 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
977 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
978 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
979 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
980 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
981 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
982 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
983 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
984 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
985 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
986 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
987 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
988 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
989 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
990 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
991 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
992 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
993 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
994 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
995 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
996 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
997 default:
998 return IEMOP_RAISE_INVALID_OPCODE();
999 }
1000}
1001
1002
1003/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1004FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1005/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1006FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1007/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1008FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1009/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1010FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1011
1012
1013/** Opcode 0x0f 0x11 - vmovups Wps, Vps */
1014FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1015{
1016 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1017 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1018 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1019 {
1020 /*
1021 * Register, register.
1022 */
1023 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1024 IEM_MC_BEGIN(0, 0);
1025 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1026 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1027 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1028 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1029 IEM_MC_ADVANCE_RIP();
1030 IEM_MC_END();
1031 }
1032 else
1033 {
1034 /*
1035 * Memory, register.
1036 */
1037 IEM_MC_BEGIN(0, 2);
1038 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1040
1041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1042 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1043 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1044 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1045
1046 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1047 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1048
1049 IEM_MC_ADVANCE_RIP();
1050 IEM_MC_END();
1051 }
1052 return VINF_SUCCESS;
1053}
1054
1055
1056/** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */
1057FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
1058
1059/** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */
1060FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
1061
1062/** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */
1063FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1064{
1065 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1066 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1067 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1068 {
1069 /*
1070 * Register, register.
1071 */
1072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1073 IEM_MC_BEGIN(0, 1);
1074 IEM_MC_LOCAL(uint64_t, uSrc);
1075
1076 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1077 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1078 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1079 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1080
1081 IEM_MC_ADVANCE_RIP();
1082 IEM_MC_END();
1083 }
1084 else
1085 {
1086 /*
1087 * Memory, register.
1088 */
1089 IEM_MC_BEGIN(0, 2);
1090 IEM_MC_LOCAL(uint64_t, uSrc);
1091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1092
1093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1095 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1096 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1097
1098 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1099 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1100
1101 IEM_MC_ADVANCE_RIP();
1102 IEM_MC_END();
1103 }
1104 return VINF_SUCCESS;
1105}
1106
1107
1108/** Opcode 0x0f 0x12. */
1109FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1110
1111/** Opcode 0x66 0x0f 0x12. */
1112FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1113
1114/** Opcode 0xf3 0x0f 0x12. */
1115FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1116
1117/** Opcode 0xf2 0x0f 0x12. */
1118FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1119
1120/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1121FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1122
1123/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1124FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1125{
1126 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1127 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1128 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1129 {
1130#if 0
1131 /*
1132 * Register, register.
1133 */
1134 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1135 IEM_MC_BEGIN(0, 1);
1136 IEM_MC_LOCAL(uint64_t, uSrc);
1137 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1138 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1139 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1140 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1141 IEM_MC_ADVANCE_RIP();
1142 IEM_MC_END();
1143#else
1144 return IEMOP_RAISE_INVALID_OPCODE();
1145#endif
1146 }
1147 else
1148 {
1149 /*
1150 * Memory, register.
1151 */
1152 IEM_MC_BEGIN(0, 2);
1153 IEM_MC_LOCAL(uint64_t, uSrc);
1154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1155
1156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1157 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1158 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1159 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1160
1161 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1162 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1163
1164 IEM_MC_ADVANCE_RIP();
1165 IEM_MC_END();
1166 }
1167 return VINF_SUCCESS;
1168}
1169
1170/* Opcode 0xf3 0x0f 0x13 - invalid */
1171/* Opcode 0xf2 0x0f 0x13 - invalid */
1172
1173/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1174FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1175/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1176FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1177/* Opcode 0xf3 0x0f 0x14 - invalid */
1178/* Opcode 0xf2 0x0f 0x14 - invalid */
1179/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1180FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1181/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1182FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1183/* Opcode 0xf3 0x0f 0x15 - invalid */
1184/* Opcode 0xf2 0x0f 0x15 - invalid */
1185/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1186FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1187/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1188FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1189/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1190FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1191/* Opcode 0xf2 0x0f 0x16 - invalid */
1192/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1193FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1194/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1195FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1196/* Opcode 0xf3 0x0f 0x17 - invalid */
1197/* Opcode 0xf2 0x0f 0x17 - invalid */
1198
1199
1200/** Opcode 0x0f 0x18. */
1201FNIEMOP_DEF(iemOp_prefetch_Grp16)
1202{
1203 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1204 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1205 {
1206 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1207 {
1208 case 4: /* Aliased to /0 for the time being according to AMD. */
1209 case 5: /* Aliased to /0 for the time being according to AMD. */
1210 case 6: /* Aliased to /0 for the time being according to AMD. */
1211 case 7: /* Aliased to /0 for the time being according to AMD. */
1212 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1213 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1214 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1215 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1217 }
1218
1219 IEM_MC_BEGIN(0, 1);
1220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1223 /* Currently a NOP. */
1224 NOREF(GCPtrEffSrc);
1225 IEM_MC_ADVANCE_RIP();
1226 IEM_MC_END();
1227 return VINF_SUCCESS;
1228 }
1229
1230 return IEMOP_RAISE_INVALID_OPCODE();
1231}
1232
1233
1234/** Opcode 0x0f 0x19..0x1f. */
1235FNIEMOP_DEF(iemOp_nop_Ev)
1236{
1237 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1238 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1239 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1240 {
1241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1242 IEM_MC_BEGIN(0, 0);
1243 IEM_MC_ADVANCE_RIP();
1244 IEM_MC_END();
1245 }
1246 else
1247 {
1248 IEM_MC_BEGIN(0, 1);
1249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1252 /* Currently a NOP. */
1253 NOREF(GCPtrEffSrc);
1254 IEM_MC_ADVANCE_RIP();
1255 IEM_MC_END();
1256 }
1257 return VINF_SUCCESS;
1258}
1259
1260
1261/** Opcode 0x0f 0x20. */
1262FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1263{
1264 /* mod is ignored, as is operand size overrides. */
1265 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1266 IEMOP_HLP_MIN_386();
1267 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1268 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1269 else
1270 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1271
1272 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1273 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1274 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1275 {
1276 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1277 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1278 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1279 iCrReg |= 8;
1280 }
1281 switch (iCrReg)
1282 {
1283 case 0: case 2: case 3: case 4: case 8:
1284 break;
1285 default:
1286 return IEMOP_RAISE_INVALID_OPCODE();
1287 }
1288 IEMOP_HLP_DONE_DECODING();
1289
1290 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1291}
1292
1293
1294/** Opcode 0x0f 0x21. */
1295FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1296{
1297 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1298 IEMOP_HLP_MIN_386();
1299 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1301 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1302 return IEMOP_RAISE_INVALID_OPCODE();
1303 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1304 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1305 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1306}
1307
1308
1309/** Opcode 0x0f 0x22. */
1310FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1311{
1312 /* mod is ignored, as is operand size overrides. */
1313 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1314 IEMOP_HLP_MIN_386();
1315 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1316 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1317 else
1318 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1319
1320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1321 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1322 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1323 {
1324 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1325 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1326 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1327 iCrReg |= 8;
1328 }
1329 switch (iCrReg)
1330 {
1331 case 0: case 2: case 3: case 4: case 8:
1332 break;
1333 default:
1334 return IEMOP_RAISE_INVALID_OPCODE();
1335 }
1336 IEMOP_HLP_DONE_DECODING();
1337
1338 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1339}
1340
1341
1342/** Opcode 0x0f 0x23. */
1343FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1344{
1345 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1346 IEMOP_HLP_MIN_386();
1347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1349 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1350 return IEMOP_RAISE_INVALID_OPCODE();
1351 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1352 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1353 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1354}
1355
1356
1357/** Opcode 0x0f 0x24. */
1358FNIEMOP_DEF(iemOp_mov_Rd_Td)
1359{
1360 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1361 /** @todo works on 386 and 486. */
1362 /* The RM byte is not considered, see testcase. */
1363 return IEMOP_RAISE_INVALID_OPCODE();
1364}
1365
1366
1367/** Opcode 0x0f 0x26. */
1368FNIEMOP_DEF(iemOp_mov_Td_Rd)
1369{
1370 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1371 /** @todo works on 386 and 486. */
1372 /* The RM byte is not considered, see testcase. */
1373 return IEMOP_RAISE_INVALID_OPCODE();
1374}
1375
1376
1377/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
1378FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1379{
1380 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1382 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1383 {
1384 /*
1385 * Register, register.
1386 */
1387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1388 IEM_MC_BEGIN(0, 0);
1389 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1390 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1391 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1392 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1393 IEM_MC_ADVANCE_RIP();
1394 IEM_MC_END();
1395 }
1396 else
1397 {
1398 /*
1399 * Register, memory.
1400 */
1401 IEM_MC_BEGIN(0, 2);
1402 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1404
1405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1407 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1408 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1409
1410 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1411 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1412
1413 IEM_MC_ADVANCE_RIP();
1414 IEM_MC_END();
1415 }
1416 return VINF_SUCCESS;
1417}
1418
1419/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
1420FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1421{
1422 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1423 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1424 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1425 {
1426 /*
1427 * Register, register.
1428 */
1429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1430 IEM_MC_BEGIN(0, 0);
1431 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1432 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1433 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1434 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1435 IEM_MC_ADVANCE_RIP();
1436 IEM_MC_END();
1437 }
1438 else
1439 {
1440 /*
1441 * Register, memory.
1442 */
1443 IEM_MC_BEGIN(0, 2);
1444 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1446
1447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1449 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1450 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1451
1452 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1453 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1454
1455 IEM_MC_ADVANCE_RIP();
1456 IEM_MC_END();
1457 }
1458 return VINF_SUCCESS;
1459}
1460
1461/* Opcode 0xf3 0x0f 0x28 - invalid */
1462/* Opcode 0xf2 0x0f 0x28 - invalid */
1463
1464/** Opcode 0x0f 0x29 - vmovaps Wps, Vps */
1465FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1466{
1467 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1468 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1469 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1470 {
1471 /*
1472 * Register, register.
1473 */
1474 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1475 IEM_MC_BEGIN(0, 0);
1476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1477 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1478 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1479 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1480 IEM_MC_ADVANCE_RIP();
1481 IEM_MC_END();
1482 }
1483 else
1484 {
1485 /*
1486 * Memory, register.
1487 */
1488 IEM_MC_BEGIN(0, 2);
1489 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1491
1492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1493 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1494 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1495 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1496
1497 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1498 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1499
1500 IEM_MC_ADVANCE_RIP();
1501 IEM_MC_END();
1502 }
1503 return VINF_SUCCESS;
1504}
1505
1506/** Opcode 0x66 0x0f 0x29 - vmovapd Wpd,Vpd */
1507FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
1508{
1509 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1511 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1512 {
1513 /*
1514 * Register, register.
1515 */
1516 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1517 IEM_MC_BEGIN(0, 0);
1518 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1519 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1520 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1521 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1522 IEM_MC_ADVANCE_RIP();
1523 IEM_MC_END();
1524 }
1525 else
1526 {
1527 /*
1528 * Memory, register.
1529 */
1530 IEM_MC_BEGIN(0, 2);
1531 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1533
1534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1535 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1536 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1537 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1538
1539 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1540 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1541
1542 IEM_MC_ADVANCE_RIP();
1543 IEM_MC_END();
1544 }
1545 return VINF_SUCCESS;
1546}
1547
1548/* Opcode 0xf3 0x0f 0x29 - invalid */
1549/* Opcode 0xf2 0x0f 0x29 - invalid */
1550
1551
1552/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
1553FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
1554/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
1555FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
1556/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
1557FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
1558/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1559FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
1560
1561
1562/** Opcode 0x0f 0x2b. */
1563FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd) /** @todo split me */
1564{
1565 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1566 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1567 else
1568 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1569 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1570 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1571 {
1572 /*
1573 * memory, register.
1574 */
1575 IEM_MC_BEGIN(0, 2);
1576 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1578
1579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1580 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1581 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1582 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1583 else
1584 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1585 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1586
1587 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1588 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1589
1590 IEM_MC_ADVANCE_RIP();
1591 IEM_MC_END();
1592 }
1593 /* The register, register encoding is invalid. */
1594 else
1595 return IEMOP_RAISE_INVALID_OPCODE();
1596 return VINF_SUCCESS;
1597}
1598
1599
1600/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
1601FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
1602/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
1603FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
1604/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
1605FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
1606/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
1607FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
1608
1609/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
1610FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
1611/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
1612FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
1613/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
1614FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
1615/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
1616FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
1617
1618/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
1619FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
1620/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
1621FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
1622/* Opcode 0xf3 0x0f 0x2e - invalid */
1623/* Opcode 0xf2 0x0f 0x2e - invalid */
1624
1625/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
1626FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
1627/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
1628FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
1629/* Opcode 0xf3 0x0f 0x2f - invalid */
1630/* Opcode 0xf2 0x0f 0x2f - invalid */
1631
1632/** Opcode 0x0f 0x30. */
1633FNIEMOP_DEF(iemOp_wrmsr)
1634{
1635 IEMOP_MNEMONIC(wrmsr, "wrmsr");
1636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1637 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1638}
1639
1640
1641/** Opcode 0x0f 0x31. */
1642FNIEMOP_DEF(iemOp_rdtsc)
1643{
1644 IEMOP_MNEMONIC(rdtsc, "rdtsc");
1645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1646 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1647}
1648
1649
1650/** Opcode 0x0f 0x33. */
1651FNIEMOP_DEF(iemOp_rdmsr)
1652{
1653 IEMOP_MNEMONIC(rdmsr, "rdmsr");
1654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1655 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1656}
1657
1658
1659/** Opcode 0x0f 0x34. */
1660FNIEMOP_STUB(iemOp_rdpmc);
1661/** Opcode 0x0f 0x34. */
1662FNIEMOP_STUB(iemOp_sysenter);
1663/** Opcode 0x0f 0x35. */
1664FNIEMOP_STUB(iemOp_sysexit);
1665/** Opcode 0x0f 0x37. */
1666FNIEMOP_STUB(iemOp_getsec);
1667/** Opcode 0x0f 0x38. */
1668FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1669/** Opcode 0x0f 0x3a. */
1670FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1671
1672
1673/**
1674 * Implements a conditional move.
1675 *
1676 * Wish there was an obvious way to do this where we could share and reduce
1677 * code bloat.
1678 *
1679 * @param a_Cnd The conditional "microcode" operation.
1680 */
1681#define CMOV_X(a_Cnd) \
1682 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1683 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1684 { \
1685 switch (pVCpu->iem.s.enmEffOpSize) \
1686 { \
1687 case IEMMODE_16BIT: \
1688 IEM_MC_BEGIN(0, 1); \
1689 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1690 a_Cnd { \
1691 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1692 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
1693 } IEM_MC_ENDIF(); \
1694 IEM_MC_ADVANCE_RIP(); \
1695 IEM_MC_END(); \
1696 return VINF_SUCCESS; \
1697 \
1698 case IEMMODE_32BIT: \
1699 IEM_MC_BEGIN(0, 1); \
1700 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1701 a_Cnd { \
1702 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1703 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
1704 } IEM_MC_ELSE() { \
1705 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
1706 } IEM_MC_ENDIF(); \
1707 IEM_MC_ADVANCE_RIP(); \
1708 IEM_MC_END(); \
1709 return VINF_SUCCESS; \
1710 \
1711 case IEMMODE_64BIT: \
1712 IEM_MC_BEGIN(0, 1); \
1713 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1714 a_Cnd { \
1715 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1716 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
1717 } IEM_MC_ENDIF(); \
1718 IEM_MC_ADVANCE_RIP(); \
1719 IEM_MC_END(); \
1720 return VINF_SUCCESS; \
1721 \
1722 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1723 } \
1724 } \
1725 else \
1726 { \
1727 switch (pVCpu->iem.s.enmEffOpSize) \
1728 { \
1729 case IEMMODE_16BIT: \
1730 IEM_MC_BEGIN(0, 2); \
1731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1732 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1734 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1735 a_Cnd { \
1736 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
1737 } IEM_MC_ENDIF(); \
1738 IEM_MC_ADVANCE_RIP(); \
1739 IEM_MC_END(); \
1740 return VINF_SUCCESS; \
1741 \
1742 case IEMMODE_32BIT: \
1743 IEM_MC_BEGIN(0, 2); \
1744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1745 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1747 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1748 a_Cnd { \
1749 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
1750 } IEM_MC_ELSE() { \
1751 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
1752 } IEM_MC_ENDIF(); \
1753 IEM_MC_ADVANCE_RIP(); \
1754 IEM_MC_END(); \
1755 return VINF_SUCCESS; \
1756 \
1757 case IEMMODE_64BIT: \
1758 IEM_MC_BEGIN(0, 2); \
1759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1760 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1762 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1763 a_Cnd { \
1764 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
1765 } IEM_MC_ENDIF(); \
1766 IEM_MC_ADVANCE_RIP(); \
1767 IEM_MC_END(); \
1768 return VINF_SUCCESS; \
1769 \
1770 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1771 } \
1772 } do {} while (0)
1773
1774
1775
1776/** Opcode 0x0f 0x40. */
1777FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1778{
1779 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
1780 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1781}
1782
1783
1784/** Opcode 0x0f 0x41. */
1785FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1786{
1787 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
1788 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1789}
1790
1791
1792/** Opcode 0x0f 0x42. */
1793FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1794{
1795 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
1796 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1797}
1798
1799
1800/** Opcode 0x0f 0x43. */
1801FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1802{
1803 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
1804 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1805}
1806
1807
1808/** Opcode 0x0f 0x44. */
1809FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1810{
1811 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
1812 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1813}
1814
1815
1816/** Opcode 0x0f 0x45. */
1817FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1818{
1819 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
1820 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1821}
1822
1823
1824/** Opcode 0x0f 0x46. */
1825FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1826{
1827 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
1828 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1829}
1830
1831
1832/** Opcode 0x0f 0x47. */
1833FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1834{
1835 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
1836 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1837}
1838
1839
1840/** Opcode 0x0f 0x48. */
1841FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1842{
1843 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
1844 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1845}
1846
1847
1848/** Opcode 0x0f 0x49. */
1849FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1850{
1851 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
1852 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1853}
1854
1855
1856/** Opcode 0x0f 0x4a. */
1857FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1858{
1859 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
1860 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1861}
1862
1863
1864/** Opcode 0x0f 0x4b. */
1865FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1866{
1867 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
1868 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1869}
1870
1871
1872/** Opcode 0x0f 0x4c. */
1873FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1874{
1875 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
1876 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1877}
1878
1879
1880/** Opcode 0x0f 0x4d. */
1881FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1882{
1883 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
1884 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1885}
1886
1887
1888/** Opcode 0x0f 0x4e. */
1889FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1890{
1891 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
1892 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1893}
1894
1895
1896/** Opcode 0x0f 0x4f. */
1897FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1898{
1899 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
1900 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1901}
1902
1903#undef CMOV_X
1904
1905/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
1906FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
1907/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
1908FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
1909/* Opcode 0xf3 0x0f 0x50 - invalid */
1910/* Opcode 0xf2 0x0f 0x50 - invalid */
1911
1912/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
1913FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
1914/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
1915FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
1916/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
1917FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
1918/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
1919FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
1920
1921/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
1922FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
1923/* Opcode 0x66 0x0f 0x52 - invalid */
1924/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
1925FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
1926/* Opcode 0xf2 0x0f 0x52 - invalid */
1927
1928/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
1929FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
1930/* Opcode 0x66 0x0f 0x53 - invalid */
1931/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
1932FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
1933/* Opcode 0xf2 0x0f 0x53 - invalid */
1934
1935/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
1936FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
1937/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
1938FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
1939/* Opcode 0xf3 0x0f 0x54 - invalid */
1940/* Opcode 0xf2 0x0f 0x54 - invalid */
1941
1942/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
1943FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
1944/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
1945FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
1946/* Opcode 0xf3 0x0f 0x55 - invalid */
1947/* Opcode 0xf2 0x0f 0x55 - invalid */
1948
1949/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
1950FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
1951/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
1952FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
1953/* Opcode 0xf3 0x0f 0x56 - invalid */
1954/* Opcode 0xf2 0x0f 0x56 - invalid */
1955
1956/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
1957FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
1958/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
1959FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
1960/* Opcode 0xf3 0x0f 0x57 - invalid */
1961/* Opcode 0xf2 0x0f 0x57 - invalid */
1962
1963/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
1964FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
1965/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
1966FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
1967/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
1968FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
1969/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
1970FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
1971
1972/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
1973FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
1974/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
1975FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
1976/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
1977FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
1978/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
1979FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
1980
1981/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
1982FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
1983/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
1984FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
1985/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
1986FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
1987/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
1988FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
1989
1990/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
1991FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
1992/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
1993FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
1994/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
1995FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
1996/* Opcode 0xf2 0x0f 0x5b - invalid */
1997
1998/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
1999FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2000/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2001FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2002/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2003FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2004/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2005FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2006
2007/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2008FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2009/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2010FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2011/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2012FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2013/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2014FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2015
2016/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2017FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2018/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2019FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2020/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2021FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2022/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2023FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2024
2025/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2026FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2027/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2028FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2029/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2030FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2031/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2032FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2033
2034/**
2035 * Common worker for MMX instructions on the forms:
2036 * pxxxx mm1, mm2/mem32
2037 *
2038 * The 2nd operand is the first half of a register, which in the memory case
2039 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2040 * memory accessed for MMX.
2041 *
2042 * Exceptions type 4.
2043 */
2044FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2045{
2046 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2047 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2048 {
2049 /*
2050 * Register, register.
2051 */
2052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2053 IEM_MC_BEGIN(2, 0);
2054 IEM_MC_ARG(uint128_t *, pDst, 0);
2055 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2056 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2057 IEM_MC_PREPARE_SSE_USAGE();
2058 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2059 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2060 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2061 IEM_MC_ADVANCE_RIP();
2062 IEM_MC_END();
2063 }
2064 else
2065 {
2066 /*
2067 * Register, memory.
2068 */
2069 IEM_MC_BEGIN(2, 2);
2070 IEM_MC_ARG(uint128_t *, pDst, 0);
2071 IEM_MC_LOCAL(uint64_t, uSrc);
2072 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2074
2075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2077 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2078 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2079
2080 IEM_MC_PREPARE_SSE_USAGE();
2081 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2082 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2083
2084 IEM_MC_ADVANCE_RIP();
2085 IEM_MC_END();
2086 }
2087 return VINF_SUCCESS;
2088}
2089
2090
2091/**
2092 * Common worker for SSE2 instructions on the forms:
2093 * pxxxx xmm1, xmm2/mem128
2094 *
2095 * The 2nd operand is the first half of a register, which in the memory case
2096 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2097 * memory accessed for MMX.
2098 *
2099 * Exceptions type 4.
2100 */
2101FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2102{
2103 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2104 if (!pImpl->pfnU64)
2105 return IEMOP_RAISE_INVALID_OPCODE();
2106 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2107 {
2108 /*
2109 * Register, register.
2110 */
2111 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2112 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2114 IEM_MC_BEGIN(2, 0);
2115 IEM_MC_ARG(uint64_t *, pDst, 0);
2116 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2117 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2118 IEM_MC_PREPARE_FPU_USAGE();
2119 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2120 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2121 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2122 IEM_MC_ADVANCE_RIP();
2123 IEM_MC_END();
2124 }
2125 else
2126 {
2127 /*
2128 * Register, memory.
2129 */
2130 IEM_MC_BEGIN(2, 2);
2131 IEM_MC_ARG(uint64_t *, pDst, 0);
2132 IEM_MC_LOCAL(uint32_t, uSrc);
2133 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2135
2136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2138 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2139 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2140
2141 IEM_MC_PREPARE_FPU_USAGE();
2142 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2143 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2144
2145 IEM_MC_ADVANCE_RIP();
2146 IEM_MC_END();
2147 }
2148 return VINF_SUCCESS;
2149}
2150
2151
2152/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2153FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2154{
2155 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2156 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2157}
2158
2159/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2160FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2161{
2162 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2163 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2164}
2165
2166/* Opcode 0xf3 0x0f 0x60 - invalid */
2167
2168
2169/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2170FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2171{
2172 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2173 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2174}
2175
2176/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2177FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2178{
2179 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2180 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2181}
2182
2183/* Opcode 0xf3 0x0f 0x61 - invalid */
2184
2185
2186/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2187FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2188{
2189 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2190 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2191}
2192
2193/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2194FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2195{
2196 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2197 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2198}
2199
2200/* Opcode 0xf3 0x0f 0x62 - invalid */
2201
2202
2203
2204/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2205FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2206/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2207FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2208/* Opcode 0xf3 0x0f 0x63 - invalid */
2209
2210/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2211FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2212/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2213FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2214/* Opcode 0xf3 0x0f 0x64 - invalid */
2215
2216/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2217FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2218/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2219FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2220/* Opcode 0xf3 0x0f 0x65 - invalid */
2221
2222/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2223FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2224/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2225FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2226/* Opcode 0xf3 0x0f 0x66 - invalid */
2227
2228/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2229FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2230/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2231FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2232/* Opcode 0xf3 0x0f 0x67 - invalid */
2233
2234
2235/**
2236 * Common worker for MMX instructions on the form:
2237 * pxxxx mm1, mm2/mem64
2238 *
2239 * The 2nd operand is the second half of a register, which in the memory case
2240 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2241 * where it may read the full 128 bits or only the upper 64 bits.
2242 *
2243 * Exceptions type 4.
2244 */
2245FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2246{
2247 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2248 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2250 {
2251 /*
2252 * Register, register.
2253 */
2254 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2255 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2257 IEM_MC_BEGIN(2, 0);
2258 IEM_MC_ARG(uint64_t *, pDst, 0);
2259 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2260 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2261 IEM_MC_PREPARE_FPU_USAGE();
2262 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2263 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2264 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2265 IEM_MC_ADVANCE_RIP();
2266 IEM_MC_END();
2267 }
2268 else
2269 {
2270 /*
2271 * Register, memory.
2272 */
2273 IEM_MC_BEGIN(2, 2);
2274 IEM_MC_ARG(uint64_t *, pDst, 0);
2275 IEM_MC_LOCAL(uint64_t, uSrc);
2276 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2278
2279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2281 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2282 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2283
2284 IEM_MC_PREPARE_FPU_USAGE();
2285 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2286 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2287
2288 IEM_MC_ADVANCE_RIP();
2289 IEM_MC_END();
2290 }
2291 return VINF_SUCCESS;
2292}
2293
2294
2295/**
2296 * Common worker for SSE2 instructions on the form:
2297 * pxxxx xmm1, xmm2/mem128
2298 *
2299 * The 2nd operand is the second half of a register, which in the memory case
2300 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2301 * where it may read the full 128 bits or only the upper 64 bits.
2302 *
2303 * Exceptions type 4.
2304 */
2305FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2306{
2307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2308 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2309 {
2310 /*
2311 * Register, register.
2312 */
2313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2314 IEM_MC_BEGIN(2, 0);
2315 IEM_MC_ARG(uint128_t *, pDst, 0);
2316 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2317 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2318 IEM_MC_PREPARE_SSE_USAGE();
2319 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2320 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2321 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2322 IEM_MC_ADVANCE_RIP();
2323 IEM_MC_END();
2324 }
2325 else
2326 {
2327 /*
2328 * Register, memory.
2329 */
2330 IEM_MC_BEGIN(2, 2);
2331 IEM_MC_ARG(uint128_t *, pDst, 0);
2332 IEM_MC_LOCAL(uint128_t, uSrc);
2333 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2335
2336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2338 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2339 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2340
2341 IEM_MC_PREPARE_SSE_USAGE();
2342 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2343 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2344
2345 IEM_MC_ADVANCE_RIP();
2346 IEM_MC_END();
2347 }
2348 return VINF_SUCCESS;
2349}
2350
2351
2352/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2353FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2354{
2355 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2356 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2357}
2358
2359/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2360FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2361{
2362 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2363 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2364}
2365/* Opcode 0xf3 0x0f 0x68 - invalid */
2366
2367
2368/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2369FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2370{
2371 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2372 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2373}
2374
2375/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2376FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2377{
2378 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2379 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2380
2381}
2382/* Opcode 0xf3 0x0f 0x69 - invalid */
2383
2384
2385/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2386FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2387{
2388 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2389 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2390}
2391
2392/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2393FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2394{
2395 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2396 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2397}
2398/* Opcode 0xf3 0x0f 0x6a - invalid */
2399
2400
2401/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2402FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2403/** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */
2404FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2405/* Opcode 0xf3 0x0f 0x6b - invalid */
2406
2407
2408/* Opcode 0x0f 0x6c - invalid */
2409
2410/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2411FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2412{
2413 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
2414 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2415}
2416
2417/* Opcode 0xf3 0x0f 0x6c - invalid */
2418/* Opcode 0xf2 0x0f 0x6c - invalid */
2419
2420
2421/* Opcode 0x0f 0x6d - invalid */
2422
2423/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
2424FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
2425{
2426 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2427 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2428}
2429
2430/* Opcode 0xf3 0x0f 0x6d - invalid */
2431
2432
2433/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
2434FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
2435{
2436 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2437 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2438 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2439 else
2440 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2441 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2442 {
2443 /* MMX, greg */
2444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2445 IEM_MC_BEGIN(0, 1);
2446 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2447 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2448 IEM_MC_LOCAL(uint64_t, u64Tmp);
2449 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2450 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2451 else
2452 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2453 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2454 IEM_MC_ADVANCE_RIP();
2455 IEM_MC_END();
2456 }
2457 else
2458 {
2459 /* MMX, [mem] */
2460 IEM_MC_BEGIN(0, 2);
2461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2462 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2465 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2466 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2467 {
2468 IEM_MC_LOCAL(uint64_t, u64Tmp);
2469 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2470 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2471 }
2472 else
2473 {
2474 IEM_MC_LOCAL(uint32_t, u32Tmp);
2475 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2476 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2477 }
2478 IEM_MC_ADVANCE_RIP();
2479 IEM_MC_END();
2480 }
2481 return VINF_SUCCESS;
2482}
2483
2484/** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */
2485FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
2486{
2487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2488 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2489 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
2490 else
2491 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
2492 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2493 {
2494 /* XMM, greg*/
2495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2496 IEM_MC_BEGIN(0, 1);
2497 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2498 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2499 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2500 {
2501 IEM_MC_LOCAL(uint64_t, u64Tmp);
2502 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2503 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2504 }
2505 else
2506 {
2507 IEM_MC_LOCAL(uint32_t, u32Tmp);
2508 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2509 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2510 }
2511 IEM_MC_ADVANCE_RIP();
2512 IEM_MC_END();
2513 }
2514 else
2515 {
2516 /* XMM, [mem] */
2517 IEM_MC_BEGIN(0, 2);
2518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2519 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2522 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2523 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2524 {
2525 IEM_MC_LOCAL(uint64_t, u64Tmp);
2526 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2527 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2528 }
2529 else
2530 {
2531 IEM_MC_LOCAL(uint32_t, u32Tmp);
2532 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2533 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2534 }
2535 IEM_MC_ADVANCE_RIP();
2536 IEM_MC_END();
2537 }
2538 return VINF_SUCCESS;
2539}
2540
2541/* Opcode 0xf3 0x0f 0x6e - invalid */
2542
2543
2544/** Opcode 0x0f 0x6f - movq Pq, Qq */
2545FNIEMOP_DEF(iemOp_movq_Pq_Qq)
2546{
2547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2548 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2549 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2550 {
2551 /*
2552 * Register, register.
2553 */
2554 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2555 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2557 IEM_MC_BEGIN(0, 1);
2558 IEM_MC_LOCAL(uint64_t, u64Tmp);
2559 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2560 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2561 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2562 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2563 IEM_MC_ADVANCE_RIP();
2564 IEM_MC_END();
2565 }
2566 else
2567 {
2568 /*
2569 * Register, memory.
2570 */
2571 IEM_MC_BEGIN(0, 2);
2572 IEM_MC_LOCAL(uint64_t, u64Tmp);
2573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2574
2575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2577 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2578 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2579 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2580 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2581
2582 IEM_MC_ADVANCE_RIP();
2583 IEM_MC_END();
2584 }
2585 return VINF_SUCCESS;
2586}
2587
2588/** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */
2589FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
2590{
2591 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2592 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
2593 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2594 {
2595 /*
2596 * Register, register.
2597 */
2598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2599 IEM_MC_BEGIN(0, 0);
2600 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2601 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2602 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2603 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2604 IEM_MC_ADVANCE_RIP();
2605 IEM_MC_END();
2606 }
2607 else
2608 {
2609 /*
2610 * Register, memory.
2611 */
2612 IEM_MC_BEGIN(0, 2);
2613 IEM_MC_LOCAL(uint128_t, u128Tmp);
2614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2615
2616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2618 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2619 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2620 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2621 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2622
2623 IEM_MC_ADVANCE_RIP();
2624 IEM_MC_END();
2625 }
2626 return VINF_SUCCESS;
2627}
2628
2629/** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */
2630FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
2631{
2632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2633 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
2634 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2635 {
2636 /*
2637 * Register, register.
2638 */
2639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2640 IEM_MC_BEGIN(0, 0);
2641 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2642 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2643 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2644 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2645 IEM_MC_ADVANCE_RIP();
2646 IEM_MC_END();
2647 }
2648 else
2649 {
2650 /*
2651 * Register, memory.
2652 */
2653 IEM_MC_BEGIN(0, 2);
2654 IEM_MC_LOCAL(uint128_t, u128Tmp);
2655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2656
2657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2659 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2660 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2661 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2662 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2663
2664 IEM_MC_ADVANCE_RIP();
2665 IEM_MC_END();
2666 }
2667 return VINF_SUCCESS;
2668}
2669
2670
2671/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
2672FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
2673{
2674 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
2675 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2676 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2677 {
2678 /*
2679 * Register, register.
2680 */
2681 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2683
2684 IEM_MC_BEGIN(3, 0);
2685 IEM_MC_ARG(uint64_t *, pDst, 0);
2686 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2687 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2688 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2689 IEM_MC_PREPARE_FPU_USAGE();
2690 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2691 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2692 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2693 IEM_MC_ADVANCE_RIP();
2694 IEM_MC_END();
2695 }
2696 else
2697 {
2698 /*
2699 * Register, memory.
2700 */
2701 IEM_MC_BEGIN(3, 2);
2702 IEM_MC_ARG(uint64_t *, pDst, 0);
2703 IEM_MC_LOCAL(uint64_t, uSrc);
2704 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2706
2707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2708 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2709 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2712
2713 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2714 IEM_MC_PREPARE_FPU_USAGE();
2715 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2716 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2717
2718 IEM_MC_ADVANCE_RIP();
2719 IEM_MC_END();
2720 }
2721 return VINF_SUCCESS;
2722}
2723
2724/** Opcode 0x66 0x0f 0x70 - vpshufd Vx, Wx, Ib */
2725FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
2726{
2727 IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
2728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2729 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2730 {
2731 /*
2732 * Register, register.
2733 */
2734 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2736
2737 IEM_MC_BEGIN(3, 0);
2738 IEM_MC_ARG(uint128_t *, pDst, 0);
2739 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2740 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2741 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2742 IEM_MC_PREPARE_SSE_USAGE();
2743 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2744 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2745 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2746 IEM_MC_ADVANCE_RIP();
2747 IEM_MC_END();
2748 }
2749 else
2750 {
2751 /*
2752 * Register, memory.
2753 */
2754 IEM_MC_BEGIN(3, 2);
2755 IEM_MC_ARG(uint128_t *, pDst, 0);
2756 IEM_MC_LOCAL(uint128_t, uSrc);
2757 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2759
2760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2761 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2762 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2764 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2765
2766 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2767 IEM_MC_PREPARE_SSE_USAGE();
2768 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2769 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2770
2771 IEM_MC_ADVANCE_RIP();
2772 IEM_MC_END();
2773 }
2774 return VINF_SUCCESS;
2775}
2776
2777/** Opcode 0xf3 0x0f 0x70 - vpshufhw Vx, Wx, Ib */
2778FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
2779{
2780 IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
2781 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2782 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2783 {
2784 /*
2785 * Register, register.
2786 */
2787 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2789
2790 IEM_MC_BEGIN(3, 0);
2791 IEM_MC_ARG(uint128_t *, pDst, 0);
2792 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2793 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2794 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2795 IEM_MC_PREPARE_SSE_USAGE();
2796 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2797 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2798 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2799 IEM_MC_ADVANCE_RIP();
2800 IEM_MC_END();
2801 }
2802 else
2803 {
2804 /*
2805 * Register, memory.
2806 */
2807 IEM_MC_BEGIN(3, 2);
2808 IEM_MC_ARG(uint128_t *, pDst, 0);
2809 IEM_MC_LOCAL(uint128_t, uSrc);
2810 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2812
2813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2814 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2815 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2817 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2818
2819 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2820 IEM_MC_PREPARE_SSE_USAGE();
2821 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2822 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2823
2824 IEM_MC_ADVANCE_RIP();
2825 IEM_MC_END();
2826 }
2827 return VINF_SUCCESS;
2828}
2829
2830/** Opcode 0xf2 0x0f 0x70 - vpshuflw Vx, Wx, Ib */
2831FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
2832{
2833 IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
2834 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2835 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2836 {
2837 /*
2838 * Register, register.
2839 */
2840 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2842
2843 IEM_MC_BEGIN(3, 0);
2844 IEM_MC_ARG(uint128_t *, pDst, 0);
2845 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2846 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2847 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2848 IEM_MC_PREPARE_SSE_USAGE();
2849 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2850 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2851 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2852 IEM_MC_ADVANCE_RIP();
2853 IEM_MC_END();
2854 }
2855 else
2856 {
2857 /*
2858 * Register, memory.
2859 */
2860 IEM_MC_BEGIN(3, 2);
2861 IEM_MC_ARG(uint128_t *, pDst, 0);
2862 IEM_MC_LOCAL(uint128_t, uSrc);
2863 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2865
2866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2867 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2868 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2870 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2871
2872 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2873 IEM_MC_PREPARE_SSE_USAGE();
2874 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2875 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2876
2877 IEM_MC_ADVANCE_RIP();
2878 IEM_MC_END();
2879 }
2880 return VINF_SUCCESS;
2881}
2882
2883
2884/** Opcode 0x0f 0x71 11/2. */
2885FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2886
2887/** Opcode 0x66 0x0f 0x71 11/2. */
2888FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2889
2890/** Opcode 0x0f 0x71 11/4. */
2891FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2892
2893/** Opcode 0x66 0x0f 0x71 11/4. */
2894FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2895
2896/** Opcode 0x0f 0x71 11/6. */
2897FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2898
2899/** Opcode 0x66 0x0f 0x71 11/6. */
2900FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2901
2902
2903/** Opcode 0x0f 0x71. */
2904FNIEMOP_DEF(iemOp_Grp12)
2905{
2906 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2907 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2908 return IEMOP_RAISE_INVALID_OPCODE();
2909 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2910 {
2911 case 0: case 1: case 3: case 5: case 7:
2912 return IEMOP_RAISE_INVALID_OPCODE();
2913 case 2:
2914 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2915 {
2916 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2917 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
2918 default: return IEMOP_RAISE_INVALID_OPCODE();
2919 }
2920 case 4:
2921 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2922 {
2923 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
2924 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
2925 default: return IEMOP_RAISE_INVALID_OPCODE();
2926 }
2927 case 6:
2928 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2929 {
2930 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
2931 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
2932 default: return IEMOP_RAISE_INVALID_OPCODE();
2933 }
2934 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2935 }
2936}
2937
2938
2939/** Opcode 0x0f 0x72 11/2. */
2940FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2941
2942/** Opcode 0x66 0x0f 0x72 11/2. */
2943FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
2944
2945/** Opcode 0x0f 0x72 11/4. */
2946FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2947
2948/** Opcode 0x66 0x0f 0x72 11/4. */
2949FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
2950
2951/** Opcode 0x0f 0x72 11/6. */
2952FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2953
2954/** Opcode 0x66 0x0f 0x72 11/6. */
2955FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
2956
2957
2958/** Opcode 0x0f 0x72. */
2959FNIEMOP_DEF(iemOp_Grp13)
2960{
2961 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2962 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2963 return IEMOP_RAISE_INVALID_OPCODE();
2964 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2965 {
2966 case 0: case 1: case 3: case 5: case 7:
2967 return IEMOP_RAISE_INVALID_OPCODE();
2968 case 2:
2969 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2970 {
2971 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
2972 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
2973 default: return IEMOP_RAISE_INVALID_OPCODE();
2974 }
2975 case 4:
2976 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2977 {
2978 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
2979 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
2980 default: return IEMOP_RAISE_INVALID_OPCODE();
2981 }
2982 case 6:
2983 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2984 {
2985 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
2986 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
2987 default: return IEMOP_RAISE_INVALID_OPCODE();
2988 }
2989 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2990 }
2991}
2992
2993
2994/** Opcode 0x0f 0x73 11/2. */
2995FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
2996
2997/** Opcode 0x66 0x0f 0x73 11/2. */
2998FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
2999
3000/** Opcode 0x66 0x0f 0x73 11/3. */
3001FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3002
3003/** Opcode 0x0f 0x73 11/6. */
3004FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3005
3006/** Opcode 0x66 0x0f 0x73 11/6. */
3007FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3008
3009/** Opcode 0x66 0x0f 0x73 11/7. */
3010FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3011
3012
3013/** Opcode 0x0f 0x73. */
3014FNIEMOP_DEF(iemOp_Grp14)
3015{
3016 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3017 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3018 return IEMOP_RAISE_INVALID_OPCODE();
3019 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3020 {
3021 case 0: case 1: case 4: case 5:
3022 return IEMOP_RAISE_INVALID_OPCODE();
3023 case 2:
3024 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3025 {
3026 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3027 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3028 default: return IEMOP_RAISE_INVALID_OPCODE();
3029 }
3030 case 3:
3031 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3032 {
3033 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3034 default: return IEMOP_RAISE_INVALID_OPCODE();
3035 }
3036 case 6:
3037 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3038 {
3039 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3040 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3041 default: return IEMOP_RAISE_INVALID_OPCODE();
3042 }
3043 case 7:
3044 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3045 {
3046 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3047 default: return IEMOP_RAISE_INVALID_OPCODE();
3048 }
3049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3050 }
3051}
3052
3053
3054/**
3055 * Common worker for MMX instructions on the form:
3056 * pxxx mm1, mm2/mem64
3057 */
3058FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3059{
3060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3061 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3062 {
3063 /*
3064 * Register, register.
3065 */
3066 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3067 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3069 IEM_MC_BEGIN(2, 0);
3070 IEM_MC_ARG(uint64_t *, pDst, 0);
3071 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3072 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3073 IEM_MC_PREPARE_FPU_USAGE();
3074 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3075 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3076 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3077 IEM_MC_ADVANCE_RIP();
3078 IEM_MC_END();
3079 }
3080 else
3081 {
3082 /*
3083 * Register, memory.
3084 */
3085 IEM_MC_BEGIN(2, 2);
3086 IEM_MC_ARG(uint64_t *, pDst, 0);
3087 IEM_MC_LOCAL(uint64_t, uSrc);
3088 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3090
3091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3093 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3094 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3095
3096 IEM_MC_PREPARE_FPU_USAGE();
3097 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3098 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3099
3100 IEM_MC_ADVANCE_RIP();
3101 IEM_MC_END();
3102 }
3103 return VINF_SUCCESS;
3104}
3105
3106
3107/**
3108 * Common worker for SSE2 instructions on the forms:
3109 * pxxx xmm1, xmm2/mem128
3110 *
3111 * Proper alignment of the 128-bit operand is enforced.
3112 * Exceptions type 4. SSE2 cpuid checks.
3113 */
3114FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3115{
3116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3117 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3118 {
3119 /*
3120 * Register, register.
3121 */
3122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3123 IEM_MC_BEGIN(2, 0);
3124 IEM_MC_ARG(uint128_t *, pDst, 0);
3125 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3126 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3127 IEM_MC_PREPARE_SSE_USAGE();
3128 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3129 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3130 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3131 IEM_MC_ADVANCE_RIP();
3132 IEM_MC_END();
3133 }
3134 else
3135 {
3136 /*
3137 * Register, memory.
3138 */
3139 IEM_MC_BEGIN(2, 2);
3140 IEM_MC_ARG(uint128_t *, pDst, 0);
3141 IEM_MC_LOCAL(uint128_t, uSrc);
3142 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3144
3145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3147 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3148 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3149
3150 IEM_MC_PREPARE_SSE_USAGE();
3151 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3152 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3153
3154 IEM_MC_ADVANCE_RIP();
3155 IEM_MC_END();
3156 }
3157 return VINF_SUCCESS;
3158}
3159
3160
3161/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3162FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3163{
3164 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3165 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3166}
3167
3168/** Opcode 0x66 0x0f 0x74 - vpcmpeqb Vx, Hx, Wx */
3169FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3170{
3171 IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb");
3172 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3173}
3174
3175/* Opcode 0xf3 0x0f 0x74 - invalid */
3176/* Opcode 0xf2 0x0f 0x74 - invalid */
3177
3178
3179/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3180FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3181{
3182 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3183 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3184}
3185
3186/** Opcode 0x66 0x0f 0x75 - vpcmpeqw Vx, Hx, Wx */
3187FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3188{
3189 IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw");
3190 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3191}
3192
3193/* Opcode 0xf3 0x0f 0x75 - invalid */
3194/* Opcode 0xf2 0x0f 0x75 - invalid */
3195
3196
3197/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3198FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3199{
3200 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3201 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3202}
3203
3204/** Opcode 0x66 0x0f 0x76 - vpcmpeqd Vx, Hx, Wx */
3205FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3206{
3207 IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd");
3208 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3209}
3210
3211/* Opcode 0xf3 0x0f 0x76 - invalid */
3212/* Opcode 0xf2 0x0f 0x76 - invalid */
3213
3214
3215/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3216FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3217/* Opcode 0x66 0x0f 0x77 - invalid */
3218/* Opcode 0xf3 0x0f 0x77 - invalid */
3219/* Opcode 0xf2 0x0f 0x77 - invalid */
3220
3221/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3222FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3223/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3224FNIEMOP_STUB(iemOp_AmdGrp17);
3225/* Opcode 0xf3 0x0f 0x78 - invalid */
3226/* Opcode 0xf2 0x0f 0x78 - invalid */
3227
3228/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3229FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3230/* Opcode 0x66 0x0f 0x79 - invalid */
3231/* Opcode 0xf3 0x0f 0x79 - invalid */
3232/* Opcode 0xf2 0x0f 0x79 - invalid */
3233
3234/* Opcode 0x0f 0x7a - invalid */
3235/* Opcode 0x66 0x0f 0x7a - invalid */
3236/* Opcode 0xf3 0x0f 0x7a - invalid */
3237/* Opcode 0xf2 0x0f 0x7a - invalid */
3238
3239/* Opcode 0x0f 0x7b - invalid */
3240/* Opcode 0x66 0x0f 0x7b - invalid */
3241/* Opcode 0xf3 0x0f 0x7b - invalid */
3242/* Opcode 0xf2 0x0f 0x7b - invalid */
3243
3244/* Opcode 0x0f 0x7c - invalid */
3245/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3246FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3247/* Opcode 0xf3 0x0f 0x7c - invalid */
3248/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3249FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3250
3251/* Opcode 0x0f 0x7d - invalid */
3252/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3253FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3254/* Opcode 0xf3 0x0f 0x7d - invalid */
3255/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3256FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3257
3258
3259/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3260FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3261{
3262 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3263 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3264 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3265 else
3266 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3268 {
3269 /* greg, MMX */
3270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3271 IEM_MC_BEGIN(0, 1);
3272 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3273 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3274 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3275 {
3276 IEM_MC_LOCAL(uint64_t, u64Tmp);
3277 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3278 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3279 }
3280 else
3281 {
3282 IEM_MC_LOCAL(uint32_t, u32Tmp);
3283 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3284 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3285 }
3286 IEM_MC_ADVANCE_RIP();
3287 IEM_MC_END();
3288 }
3289 else
3290 {
3291 /* [mem], MMX */
3292 IEM_MC_BEGIN(0, 2);
3293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3294 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3297 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3298 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3299 {
3300 IEM_MC_LOCAL(uint64_t, u64Tmp);
3301 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3302 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3303 }
3304 else
3305 {
3306 IEM_MC_LOCAL(uint32_t, u32Tmp);
3307 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3308 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3309 }
3310 IEM_MC_ADVANCE_RIP();
3311 IEM_MC_END();
3312 }
3313 return VINF_SUCCESS;
3314}
3315
3316/** Opcode 0x66 0x0f 0x7e - vmovd_q Ey, Vy */
3317FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3318{
3319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3320 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3321 IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq");
3322 else
3323 IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd");
3324 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3325 {
3326 /* greg, XMM */
3327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3328 IEM_MC_BEGIN(0, 1);
3329 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3330 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3331 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3332 {
3333 IEM_MC_LOCAL(uint64_t, u64Tmp);
3334 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3335 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3336 }
3337 else
3338 {
3339 IEM_MC_LOCAL(uint32_t, u32Tmp);
3340 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3341 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3342 }
3343 IEM_MC_ADVANCE_RIP();
3344 IEM_MC_END();
3345 }
3346 else
3347 {
3348 /* [mem], XMM */
3349 IEM_MC_BEGIN(0, 2);
3350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3351 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3354 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3355 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3356 {
3357 IEM_MC_LOCAL(uint64_t, u64Tmp);
3358 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3359 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3360 }
3361 else
3362 {
3363 IEM_MC_LOCAL(uint32_t, u32Tmp);
3364 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3365 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3366 }
3367 IEM_MC_ADVANCE_RIP();
3368 IEM_MC_END();
3369 }
3370 return VINF_SUCCESS;
3371}
3372
3373/** Opcode 0xf3 0x0f 0x7e - vmovq Vq, Wq */
3374FNIEMOP_STUB(iemOp_vmovq_Vq_Wq);
3375/* Opcode 0xf2 0x0f 0x7e - invalid */
3376
3377
3378/** Opcode 0x0f 0x7f - movq Qq, Pq */
3379FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3380{
3381 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3382 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3383 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3384 {
3385 /*
3386 * Register, register.
3387 */
3388 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3389 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3391 IEM_MC_BEGIN(0, 1);
3392 IEM_MC_LOCAL(uint64_t, u64Tmp);
3393 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3394 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3395 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3396 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3397 IEM_MC_ADVANCE_RIP();
3398 IEM_MC_END();
3399 }
3400 else
3401 {
3402 /*
3403 * Register, memory.
3404 */
3405 IEM_MC_BEGIN(0, 2);
3406 IEM_MC_LOCAL(uint64_t, u64Tmp);
3407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3408
3409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3411 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3412 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3413
3414 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3415 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3416
3417 IEM_MC_ADVANCE_RIP();
3418 IEM_MC_END();
3419 }
3420 return VINF_SUCCESS;
3421}
3422
3423/** Opcode 0x66 0x0f 0x7f - vmovdqa Wx,Vx */
3424FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3425{
3426 IEMOP_MNEMONIC(vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx");
3427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3428 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3429 {
3430 /*
3431 * Register, register.
3432 */
3433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3434 IEM_MC_BEGIN(0, 0);
3435 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3436 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3437 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3438 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3439 IEM_MC_ADVANCE_RIP();
3440 IEM_MC_END();
3441 }
3442 else
3443 {
3444 /*
3445 * Register, memory.
3446 */
3447 IEM_MC_BEGIN(0, 2);
3448 IEM_MC_LOCAL(uint128_t, u128Tmp);
3449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3450
3451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3453 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3454 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3455
3456 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3457 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3458
3459 IEM_MC_ADVANCE_RIP();
3460 IEM_MC_END();
3461 }
3462 return VINF_SUCCESS;
3463}
3464
3465/** Opcode 0xf3 0x0f 0x7f - vmovdqu Wx,Vx */
3466FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
3467{
3468 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3469 IEMOP_MNEMONIC(vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx");
3470 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3471 {
3472 /*
3473 * Register, register.
3474 */
3475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3476 IEM_MC_BEGIN(0, 0);
3477 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3478 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3479 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3480 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3481 IEM_MC_ADVANCE_RIP();
3482 IEM_MC_END();
3483 }
3484 else
3485 {
3486 /*
3487 * Register, memory.
3488 */
3489 IEM_MC_BEGIN(0, 2);
3490 IEM_MC_LOCAL(uint128_t, u128Tmp);
3491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3492
3493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3495 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3496 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3497
3498 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3499 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3500
3501 IEM_MC_ADVANCE_RIP();
3502 IEM_MC_END();
3503 }
3504 return VINF_SUCCESS;
3505}
3506
3507/* Opcode 0xf2 0x0f 0x7f - invalid */
3508
3509
3510
3511/** Opcode 0x0f 0x80. */
3512FNIEMOP_DEF(iemOp_jo_Jv)
3513{
3514 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3515 IEMOP_HLP_MIN_386();
3516 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3517 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3518 {
3519 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3521
3522 IEM_MC_BEGIN(0, 0);
3523 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3524 IEM_MC_REL_JMP_S16(i16Imm);
3525 } IEM_MC_ELSE() {
3526 IEM_MC_ADVANCE_RIP();
3527 } IEM_MC_ENDIF();
3528 IEM_MC_END();
3529 }
3530 else
3531 {
3532 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3534
3535 IEM_MC_BEGIN(0, 0);
3536 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3537 IEM_MC_REL_JMP_S32(i32Imm);
3538 } IEM_MC_ELSE() {
3539 IEM_MC_ADVANCE_RIP();
3540 } IEM_MC_ENDIF();
3541 IEM_MC_END();
3542 }
3543 return VINF_SUCCESS;
3544}
3545
3546
3547/** Opcode 0x0f 0x81. */
3548FNIEMOP_DEF(iemOp_jno_Jv)
3549{
3550 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3551 IEMOP_HLP_MIN_386();
3552 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3553 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3554 {
3555 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3557
3558 IEM_MC_BEGIN(0, 0);
3559 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3560 IEM_MC_ADVANCE_RIP();
3561 } IEM_MC_ELSE() {
3562 IEM_MC_REL_JMP_S16(i16Imm);
3563 } IEM_MC_ENDIF();
3564 IEM_MC_END();
3565 }
3566 else
3567 {
3568 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3570
3571 IEM_MC_BEGIN(0, 0);
3572 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3573 IEM_MC_ADVANCE_RIP();
3574 } IEM_MC_ELSE() {
3575 IEM_MC_REL_JMP_S32(i32Imm);
3576 } IEM_MC_ENDIF();
3577 IEM_MC_END();
3578 }
3579 return VINF_SUCCESS;
3580}
3581
3582
3583/** Opcode 0x0f 0x82. */
3584FNIEMOP_DEF(iemOp_jc_Jv)
3585{
3586 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3587 IEMOP_HLP_MIN_386();
3588 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3589 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3590 {
3591 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3593
3594 IEM_MC_BEGIN(0, 0);
3595 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3596 IEM_MC_REL_JMP_S16(i16Imm);
3597 } IEM_MC_ELSE() {
3598 IEM_MC_ADVANCE_RIP();
3599 } IEM_MC_ENDIF();
3600 IEM_MC_END();
3601 }
3602 else
3603 {
3604 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3606
3607 IEM_MC_BEGIN(0, 0);
3608 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3609 IEM_MC_REL_JMP_S32(i32Imm);
3610 } IEM_MC_ELSE() {
3611 IEM_MC_ADVANCE_RIP();
3612 } IEM_MC_ENDIF();
3613 IEM_MC_END();
3614 }
3615 return VINF_SUCCESS;
3616}
3617
3618
3619/** Opcode 0x0f 0x83. */
3620FNIEMOP_DEF(iemOp_jnc_Jv)
3621{
3622 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3623 IEMOP_HLP_MIN_386();
3624 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3625 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3626 {
3627 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3629
3630 IEM_MC_BEGIN(0, 0);
3631 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3632 IEM_MC_ADVANCE_RIP();
3633 } IEM_MC_ELSE() {
3634 IEM_MC_REL_JMP_S16(i16Imm);
3635 } IEM_MC_ENDIF();
3636 IEM_MC_END();
3637 }
3638 else
3639 {
3640 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3642
3643 IEM_MC_BEGIN(0, 0);
3644 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3645 IEM_MC_ADVANCE_RIP();
3646 } IEM_MC_ELSE() {
3647 IEM_MC_REL_JMP_S32(i32Imm);
3648 } IEM_MC_ENDIF();
3649 IEM_MC_END();
3650 }
3651 return VINF_SUCCESS;
3652}
3653
3654
3655/** Opcode 0x0f 0x84. */
3656FNIEMOP_DEF(iemOp_je_Jv)
3657{
3658 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
3659 IEMOP_HLP_MIN_386();
3660 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3661 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3662 {
3663 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3665
3666 IEM_MC_BEGIN(0, 0);
3667 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3668 IEM_MC_REL_JMP_S16(i16Imm);
3669 } IEM_MC_ELSE() {
3670 IEM_MC_ADVANCE_RIP();
3671 } IEM_MC_ENDIF();
3672 IEM_MC_END();
3673 }
3674 else
3675 {
3676 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3678
3679 IEM_MC_BEGIN(0, 0);
3680 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3681 IEM_MC_REL_JMP_S32(i32Imm);
3682 } IEM_MC_ELSE() {
3683 IEM_MC_ADVANCE_RIP();
3684 } IEM_MC_ENDIF();
3685 IEM_MC_END();
3686 }
3687 return VINF_SUCCESS;
3688}
3689
3690
3691/** Opcode 0x0f 0x85. */
3692FNIEMOP_DEF(iemOp_jne_Jv)
3693{
3694 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
3695 IEMOP_HLP_MIN_386();
3696 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3697 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3698 {
3699 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3701
3702 IEM_MC_BEGIN(0, 0);
3703 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3704 IEM_MC_ADVANCE_RIP();
3705 } IEM_MC_ELSE() {
3706 IEM_MC_REL_JMP_S16(i16Imm);
3707 } IEM_MC_ENDIF();
3708 IEM_MC_END();
3709 }
3710 else
3711 {
3712 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3714
3715 IEM_MC_BEGIN(0, 0);
3716 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3717 IEM_MC_ADVANCE_RIP();
3718 } IEM_MC_ELSE() {
3719 IEM_MC_REL_JMP_S32(i32Imm);
3720 } IEM_MC_ENDIF();
3721 IEM_MC_END();
3722 }
3723 return VINF_SUCCESS;
3724}
3725
3726
3727/** Opcode 0x0f 0x86. */
3728FNIEMOP_DEF(iemOp_jbe_Jv)
3729{
3730 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
3731 IEMOP_HLP_MIN_386();
3732 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3733 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3734 {
3735 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3737
3738 IEM_MC_BEGIN(0, 0);
3739 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3740 IEM_MC_REL_JMP_S16(i16Imm);
3741 } IEM_MC_ELSE() {
3742 IEM_MC_ADVANCE_RIP();
3743 } IEM_MC_ENDIF();
3744 IEM_MC_END();
3745 }
3746 else
3747 {
3748 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3750
3751 IEM_MC_BEGIN(0, 0);
3752 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3753 IEM_MC_REL_JMP_S32(i32Imm);
3754 } IEM_MC_ELSE() {
3755 IEM_MC_ADVANCE_RIP();
3756 } IEM_MC_ENDIF();
3757 IEM_MC_END();
3758 }
3759 return VINF_SUCCESS;
3760}
3761
3762
3763/** Opcode 0x0f 0x87. */
3764FNIEMOP_DEF(iemOp_jnbe_Jv)
3765{
3766 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
3767 IEMOP_HLP_MIN_386();
3768 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3769 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3770 {
3771 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3773
3774 IEM_MC_BEGIN(0, 0);
3775 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3776 IEM_MC_ADVANCE_RIP();
3777 } IEM_MC_ELSE() {
3778 IEM_MC_REL_JMP_S16(i16Imm);
3779 } IEM_MC_ENDIF();
3780 IEM_MC_END();
3781 }
3782 else
3783 {
3784 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3786
3787 IEM_MC_BEGIN(0, 0);
3788 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3789 IEM_MC_ADVANCE_RIP();
3790 } IEM_MC_ELSE() {
3791 IEM_MC_REL_JMP_S32(i32Imm);
3792 } IEM_MC_ENDIF();
3793 IEM_MC_END();
3794 }
3795 return VINF_SUCCESS;
3796}
3797
3798
3799/** Opcode 0x0f 0x88. */
3800FNIEMOP_DEF(iemOp_js_Jv)
3801{
3802 IEMOP_MNEMONIC(js_Jv, "js Jv");
3803 IEMOP_HLP_MIN_386();
3804 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3805 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3806 {
3807 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3809
3810 IEM_MC_BEGIN(0, 0);
3811 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3812 IEM_MC_REL_JMP_S16(i16Imm);
3813 } IEM_MC_ELSE() {
3814 IEM_MC_ADVANCE_RIP();
3815 } IEM_MC_ENDIF();
3816 IEM_MC_END();
3817 }
3818 else
3819 {
3820 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3822
3823 IEM_MC_BEGIN(0, 0);
3824 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3825 IEM_MC_REL_JMP_S32(i32Imm);
3826 } IEM_MC_ELSE() {
3827 IEM_MC_ADVANCE_RIP();
3828 } IEM_MC_ENDIF();
3829 IEM_MC_END();
3830 }
3831 return VINF_SUCCESS;
3832}
3833
3834
3835/** Opcode 0x0f 0x89. */
3836FNIEMOP_DEF(iemOp_jns_Jv)
3837{
3838 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
3839 IEMOP_HLP_MIN_386();
3840 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3841 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3842 {
3843 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3845
3846 IEM_MC_BEGIN(0, 0);
3847 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3848 IEM_MC_ADVANCE_RIP();
3849 } IEM_MC_ELSE() {
3850 IEM_MC_REL_JMP_S16(i16Imm);
3851 } IEM_MC_ENDIF();
3852 IEM_MC_END();
3853 }
3854 else
3855 {
3856 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3858
3859 IEM_MC_BEGIN(0, 0);
3860 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3861 IEM_MC_ADVANCE_RIP();
3862 } IEM_MC_ELSE() {
3863 IEM_MC_REL_JMP_S32(i32Imm);
3864 } IEM_MC_ENDIF();
3865 IEM_MC_END();
3866 }
3867 return VINF_SUCCESS;
3868}
3869
3870
3871/** Opcode 0x0f 0x8a. */
3872FNIEMOP_DEF(iemOp_jp_Jv)
3873{
3874 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
3875 IEMOP_HLP_MIN_386();
3876 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3877 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3878 {
3879 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3881
3882 IEM_MC_BEGIN(0, 0);
3883 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3884 IEM_MC_REL_JMP_S16(i16Imm);
3885 } IEM_MC_ELSE() {
3886 IEM_MC_ADVANCE_RIP();
3887 } IEM_MC_ENDIF();
3888 IEM_MC_END();
3889 }
3890 else
3891 {
3892 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3894
3895 IEM_MC_BEGIN(0, 0);
3896 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3897 IEM_MC_REL_JMP_S32(i32Imm);
3898 } IEM_MC_ELSE() {
3899 IEM_MC_ADVANCE_RIP();
3900 } IEM_MC_ENDIF();
3901 IEM_MC_END();
3902 }
3903 return VINF_SUCCESS;
3904}
3905
3906
3907/** Opcode 0x0f 0x8b. */
3908FNIEMOP_DEF(iemOp_jnp_Jv)
3909{
3910 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
3911 IEMOP_HLP_MIN_386();
3912 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3913 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3914 {
3915 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3917
3918 IEM_MC_BEGIN(0, 0);
3919 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3920 IEM_MC_ADVANCE_RIP();
3921 } IEM_MC_ELSE() {
3922 IEM_MC_REL_JMP_S16(i16Imm);
3923 } IEM_MC_ENDIF();
3924 IEM_MC_END();
3925 }
3926 else
3927 {
3928 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3930
3931 IEM_MC_BEGIN(0, 0);
3932 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3933 IEM_MC_ADVANCE_RIP();
3934 } IEM_MC_ELSE() {
3935 IEM_MC_REL_JMP_S32(i32Imm);
3936 } IEM_MC_ENDIF();
3937 IEM_MC_END();
3938 }
3939 return VINF_SUCCESS;
3940}
3941
3942
3943/** Opcode 0x0f 0x8c. */
3944FNIEMOP_DEF(iemOp_jl_Jv)
3945{
3946 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
3947 IEMOP_HLP_MIN_386();
3948 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3949 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3950 {
3951 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3953
3954 IEM_MC_BEGIN(0, 0);
3955 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3956 IEM_MC_REL_JMP_S16(i16Imm);
3957 } IEM_MC_ELSE() {
3958 IEM_MC_ADVANCE_RIP();
3959 } IEM_MC_ENDIF();
3960 IEM_MC_END();
3961 }
3962 else
3963 {
3964 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3966
3967 IEM_MC_BEGIN(0, 0);
3968 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3969 IEM_MC_REL_JMP_S32(i32Imm);
3970 } IEM_MC_ELSE() {
3971 IEM_MC_ADVANCE_RIP();
3972 } IEM_MC_ENDIF();
3973 IEM_MC_END();
3974 }
3975 return VINF_SUCCESS;
3976}
3977
3978
3979/** Opcode 0x0f 0x8d. */
3980FNIEMOP_DEF(iemOp_jnl_Jv)
3981{
3982 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
3983 IEMOP_HLP_MIN_386();
3984 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3985 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3986 {
3987 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3989
3990 IEM_MC_BEGIN(0, 0);
3991 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3992 IEM_MC_ADVANCE_RIP();
3993 } IEM_MC_ELSE() {
3994 IEM_MC_REL_JMP_S16(i16Imm);
3995 } IEM_MC_ENDIF();
3996 IEM_MC_END();
3997 }
3998 else
3999 {
4000 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4002
4003 IEM_MC_BEGIN(0, 0);
4004 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4005 IEM_MC_ADVANCE_RIP();
4006 } IEM_MC_ELSE() {
4007 IEM_MC_REL_JMP_S32(i32Imm);
4008 } IEM_MC_ENDIF();
4009 IEM_MC_END();
4010 }
4011 return VINF_SUCCESS;
4012}
4013
4014
4015/** Opcode 0x0f 0x8e. */
4016FNIEMOP_DEF(iemOp_jle_Jv)
4017{
4018 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4019 IEMOP_HLP_MIN_386();
4020 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4021 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4022 {
4023 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4025
4026 IEM_MC_BEGIN(0, 0);
4027 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4028 IEM_MC_REL_JMP_S16(i16Imm);
4029 } IEM_MC_ELSE() {
4030 IEM_MC_ADVANCE_RIP();
4031 } IEM_MC_ENDIF();
4032 IEM_MC_END();
4033 }
4034 else
4035 {
4036 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4038
4039 IEM_MC_BEGIN(0, 0);
4040 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4041 IEM_MC_REL_JMP_S32(i32Imm);
4042 } IEM_MC_ELSE() {
4043 IEM_MC_ADVANCE_RIP();
4044 } IEM_MC_ENDIF();
4045 IEM_MC_END();
4046 }
4047 return VINF_SUCCESS;
4048}
4049
4050
4051/** Opcode 0x0f 0x8f. */
4052FNIEMOP_DEF(iemOp_jnle_Jv)
4053{
4054 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4055 IEMOP_HLP_MIN_386();
4056 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4057 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4058 {
4059 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4061
4062 IEM_MC_BEGIN(0, 0);
4063 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4064 IEM_MC_ADVANCE_RIP();
4065 } IEM_MC_ELSE() {
4066 IEM_MC_REL_JMP_S16(i16Imm);
4067 } IEM_MC_ENDIF();
4068 IEM_MC_END();
4069 }
4070 else
4071 {
4072 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4074
4075 IEM_MC_BEGIN(0, 0);
4076 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4077 IEM_MC_ADVANCE_RIP();
4078 } IEM_MC_ELSE() {
4079 IEM_MC_REL_JMP_S32(i32Imm);
4080 } IEM_MC_ENDIF();
4081 IEM_MC_END();
4082 }
4083 return VINF_SUCCESS;
4084}
4085
4086
4087/** Opcode 0x0f 0x90. */
4088FNIEMOP_DEF(iemOp_seto_Eb)
4089{
4090 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4091 IEMOP_HLP_MIN_386();
4092 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4093
4094 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4095 * any way. AMD says it's "unused", whatever that means. We're
4096 * ignoring for now. */
4097 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4098 {
4099 /* register target */
4100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4101 IEM_MC_BEGIN(0, 0);
4102 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4103 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4104 } IEM_MC_ELSE() {
4105 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4106 } IEM_MC_ENDIF();
4107 IEM_MC_ADVANCE_RIP();
4108 IEM_MC_END();
4109 }
4110 else
4111 {
4112 /* memory target */
4113 IEM_MC_BEGIN(0, 1);
4114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4115 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4117 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4118 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4119 } IEM_MC_ELSE() {
4120 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4121 } IEM_MC_ENDIF();
4122 IEM_MC_ADVANCE_RIP();
4123 IEM_MC_END();
4124 }
4125 return VINF_SUCCESS;
4126}
4127
4128
4129/** Opcode 0x0f 0x91. */
4130FNIEMOP_DEF(iemOp_setno_Eb)
4131{
4132 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4133 IEMOP_HLP_MIN_386();
4134 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4135
4136 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4137 * any way. AMD says it's "unused", whatever that means. We're
4138 * ignoring for now. */
4139 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4140 {
4141 /* register target */
4142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4143 IEM_MC_BEGIN(0, 0);
4144 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4145 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4146 } IEM_MC_ELSE() {
4147 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4148 } IEM_MC_ENDIF();
4149 IEM_MC_ADVANCE_RIP();
4150 IEM_MC_END();
4151 }
4152 else
4153 {
4154 /* memory target */
4155 IEM_MC_BEGIN(0, 1);
4156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4159 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4160 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4161 } IEM_MC_ELSE() {
4162 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4163 } IEM_MC_ENDIF();
4164 IEM_MC_ADVANCE_RIP();
4165 IEM_MC_END();
4166 }
4167 return VINF_SUCCESS;
4168}
4169
4170
4171/** Opcode 0x0f 0x92. */
4172FNIEMOP_DEF(iemOp_setc_Eb)
4173{
4174 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4175 IEMOP_HLP_MIN_386();
4176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4177
4178 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4179 * any way. AMD says it's "unused", whatever that means. We're
4180 * ignoring for now. */
4181 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4182 {
4183 /* register target */
4184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4185 IEM_MC_BEGIN(0, 0);
4186 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4187 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4188 } IEM_MC_ELSE() {
4189 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4190 } IEM_MC_ENDIF();
4191 IEM_MC_ADVANCE_RIP();
4192 IEM_MC_END();
4193 }
4194 else
4195 {
4196 /* memory target */
4197 IEM_MC_BEGIN(0, 1);
4198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4201 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4202 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4203 } IEM_MC_ELSE() {
4204 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4205 } IEM_MC_ENDIF();
4206 IEM_MC_ADVANCE_RIP();
4207 IEM_MC_END();
4208 }
4209 return VINF_SUCCESS;
4210}
4211
4212
4213/** Opcode 0x0f 0x93. */
4214FNIEMOP_DEF(iemOp_setnc_Eb)
4215{
4216 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4217 IEMOP_HLP_MIN_386();
4218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4219
4220 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4221 * any way. AMD says it's "unused", whatever that means. We're
4222 * ignoring for now. */
4223 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4224 {
4225 /* register target */
4226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4227 IEM_MC_BEGIN(0, 0);
4228 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4229 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4230 } IEM_MC_ELSE() {
4231 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4232 } IEM_MC_ENDIF();
4233 IEM_MC_ADVANCE_RIP();
4234 IEM_MC_END();
4235 }
4236 else
4237 {
4238 /* memory target */
4239 IEM_MC_BEGIN(0, 1);
4240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4243 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4244 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4245 } IEM_MC_ELSE() {
4246 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4247 } IEM_MC_ENDIF();
4248 IEM_MC_ADVANCE_RIP();
4249 IEM_MC_END();
4250 }
4251 return VINF_SUCCESS;
4252}
4253
4254
4255/** Opcode 0x0f 0x94. */
4256FNIEMOP_DEF(iemOp_sete_Eb)
4257{
4258 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4259 IEMOP_HLP_MIN_386();
4260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4261
4262 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4263 * any way. AMD says it's "unused", whatever that means. We're
4264 * ignoring for now. */
4265 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4266 {
4267 /* register target */
4268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4269 IEM_MC_BEGIN(0, 0);
4270 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4271 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4272 } IEM_MC_ELSE() {
4273 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4274 } IEM_MC_ENDIF();
4275 IEM_MC_ADVANCE_RIP();
4276 IEM_MC_END();
4277 }
4278 else
4279 {
4280 /* memory target */
4281 IEM_MC_BEGIN(0, 1);
4282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4285 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4286 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4287 } IEM_MC_ELSE() {
4288 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4289 } IEM_MC_ENDIF();
4290 IEM_MC_ADVANCE_RIP();
4291 IEM_MC_END();
4292 }
4293 return VINF_SUCCESS;
4294}
4295
4296
4297/** Opcode 0x0f 0x95. */
4298FNIEMOP_DEF(iemOp_setne_Eb)
4299{
4300 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4301 IEMOP_HLP_MIN_386();
4302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4303
4304 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4305 * any way. AMD says it's "unused", whatever that means. We're
4306 * ignoring for now. */
4307 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4308 {
4309 /* register target */
4310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4311 IEM_MC_BEGIN(0, 0);
4312 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4313 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4314 } IEM_MC_ELSE() {
4315 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4316 } IEM_MC_ENDIF();
4317 IEM_MC_ADVANCE_RIP();
4318 IEM_MC_END();
4319 }
4320 else
4321 {
4322 /* memory target */
4323 IEM_MC_BEGIN(0, 1);
4324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4327 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4328 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4329 } IEM_MC_ELSE() {
4330 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4331 } IEM_MC_ENDIF();
4332 IEM_MC_ADVANCE_RIP();
4333 IEM_MC_END();
4334 }
4335 return VINF_SUCCESS;
4336}
4337
4338
4339/** Opcode 0x0f 0x96. */
4340FNIEMOP_DEF(iemOp_setbe_Eb)
4341{
4342 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4343 IEMOP_HLP_MIN_386();
4344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4345
4346 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4347 * any way. AMD says it's "unused", whatever that means. We're
4348 * ignoring for now. */
4349 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4350 {
4351 /* register target */
4352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4353 IEM_MC_BEGIN(0, 0);
4354 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4355 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4356 } IEM_MC_ELSE() {
4357 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4358 } IEM_MC_ENDIF();
4359 IEM_MC_ADVANCE_RIP();
4360 IEM_MC_END();
4361 }
4362 else
4363 {
4364 /* memory target */
4365 IEM_MC_BEGIN(0, 1);
4366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4369 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4370 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4371 } IEM_MC_ELSE() {
4372 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4373 } IEM_MC_ENDIF();
4374 IEM_MC_ADVANCE_RIP();
4375 IEM_MC_END();
4376 }
4377 return VINF_SUCCESS;
4378}
4379
4380
4381/** Opcode 0x0f 0x97. */
4382FNIEMOP_DEF(iemOp_setnbe_Eb)
4383{
4384 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4385 IEMOP_HLP_MIN_386();
4386 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4387
4388 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4389 * any way. AMD says it's "unused", whatever that means. We're
4390 * ignoring for now. */
4391 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4392 {
4393 /* register target */
4394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4395 IEM_MC_BEGIN(0, 0);
4396 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4397 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4398 } IEM_MC_ELSE() {
4399 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4400 } IEM_MC_ENDIF();
4401 IEM_MC_ADVANCE_RIP();
4402 IEM_MC_END();
4403 }
4404 else
4405 {
4406 /* memory target */
4407 IEM_MC_BEGIN(0, 1);
4408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4411 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4412 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4413 } IEM_MC_ELSE() {
4414 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4415 } IEM_MC_ENDIF();
4416 IEM_MC_ADVANCE_RIP();
4417 IEM_MC_END();
4418 }
4419 return VINF_SUCCESS;
4420}
4421
4422
4423/** Opcode 0x0f 0x98. */
4424FNIEMOP_DEF(iemOp_sets_Eb)
4425{
4426 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4427 IEMOP_HLP_MIN_386();
4428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4429
4430 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4431 * any way. AMD says it's "unused", whatever that means. We're
4432 * ignoring for now. */
4433 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4434 {
4435 /* register target */
4436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4437 IEM_MC_BEGIN(0, 0);
4438 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4439 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4440 } IEM_MC_ELSE() {
4441 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4442 } IEM_MC_ENDIF();
4443 IEM_MC_ADVANCE_RIP();
4444 IEM_MC_END();
4445 }
4446 else
4447 {
4448 /* memory target */
4449 IEM_MC_BEGIN(0, 1);
4450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4453 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4454 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4455 } IEM_MC_ELSE() {
4456 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4457 } IEM_MC_ENDIF();
4458 IEM_MC_ADVANCE_RIP();
4459 IEM_MC_END();
4460 }
4461 return VINF_SUCCESS;
4462}
4463
4464
4465/** Opcode 0x0f 0x99. */
4466FNIEMOP_DEF(iemOp_setns_Eb)
4467{
4468 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4469 IEMOP_HLP_MIN_386();
4470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4471
4472 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4473 * any way. AMD says it's "unused", whatever that means. We're
4474 * ignoring for now. */
4475 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4476 {
4477 /* register target */
4478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4479 IEM_MC_BEGIN(0, 0);
4480 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4481 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4482 } IEM_MC_ELSE() {
4483 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4484 } IEM_MC_ENDIF();
4485 IEM_MC_ADVANCE_RIP();
4486 IEM_MC_END();
4487 }
4488 else
4489 {
4490 /* memory target */
4491 IEM_MC_BEGIN(0, 1);
4492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4495 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4496 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4497 } IEM_MC_ELSE() {
4498 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4499 } IEM_MC_ENDIF();
4500 IEM_MC_ADVANCE_RIP();
4501 IEM_MC_END();
4502 }
4503 return VINF_SUCCESS;
4504}
4505
4506
4507/** Opcode 0x0f 0x9a. */
4508FNIEMOP_DEF(iemOp_setp_Eb)
4509{
4510 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4511 IEMOP_HLP_MIN_386();
4512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4513
4514 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4515 * any way. AMD says it's "unused", whatever that means. We're
4516 * ignoring for now. */
4517 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4518 {
4519 /* register target */
4520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4521 IEM_MC_BEGIN(0, 0);
4522 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4523 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4524 } IEM_MC_ELSE() {
4525 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4526 } IEM_MC_ENDIF();
4527 IEM_MC_ADVANCE_RIP();
4528 IEM_MC_END();
4529 }
4530 else
4531 {
4532 /* memory target */
4533 IEM_MC_BEGIN(0, 1);
4534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4537 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4538 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4539 } IEM_MC_ELSE() {
4540 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4541 } IEM_MC_ENDIF();
4542 IEM_MC_ADVANCE_RIP();
4543 IEM_MC_END();
4544 }
4545 return VINF_SUCCESS;
4546}
4547
4548
4549/** Opcode 0x0f 0x9b. */
4550FNIEMOP_DEF(iemOp_setnp_Eb)
4551{
4552 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4553 IEMOP_HLP_MIN_386();
4554 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4555
4556 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4557 * any way. AMD says it's "unused", whatever that means. We're
4558 * ignoring for now. */
4559 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4560 {
4561 /* register target */
4562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4563 IEM_MC_BEGIN(0, 0);
4564 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4565 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4566 } IEM_MC_ELSE() {
4567 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4568 } IEM_MC_ENDIF();
4569 IEM_MC_ADVANCE_RIP();
4570 IEM_MC_END();
4571 }
4572 else
4573 {
4574 /* memory target */
4575 IEM_MC_BEGIN(0, 1);
4576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4579 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4580 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4581 } IEM_MC_ELSE() {
4582 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4583 } IEM_MC_ENDIF();
4584 IEM_MC_ADVANCE_RIP();
4585 IEM_MC_END();
4586 }
4587 return VINF_SUCCESS;
4588}
4589
4590
4591/** Opcode 0x0f 0x9c. */
4592FNIEMOP_DEF(iemOp_setl_Eb)
4593{
4594 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4595 IEMOP_HLP_MIN_386();
4596 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4597
4598 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4599 * any way. AMD says it's "unused", whatever that means. We're
4600 * ignoring for now. */
4601 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4602 {
4603 /* register target */
4604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4605 IEM_MC_BEGIN(0, 0);
4606 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4607 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4608 } IEM_MC_ELSE() {
4609 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4610 } IEM_MC_ENDIF();
4611 IEM_MC_ADVANCE_RIP();
4612 IEM_MC_END();
4613 }
4614 else
4615 {
4616 /* memory target */
4617 IEM_MC_BEGIN(0, 1);
4618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4621 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4622 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4623 } IEM_MC_ELSE() {
4624 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4625 } IEM_MC_ENDIF();
4626 IEM_MC_ADVANCE_RIP();
4627 IEM_MC_END();
4628 }
4629 return VINF_SUCCESS;
4630}
4631
4632
4633/** Opcode 0x0f 0x9d. */
4634FNIEMOP_DEF(iemOp_setnl_Eb)
4635{
4636 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4637 IEMOP_HLP_MIN_386();
4638 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4639
4640 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4641 * any way. AMD says it's "unused", whatever that means. We're
4642 * ignoring for now. */
4643 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4644 {
4645 /* register target */
4646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4647 IEM_MC_BEGIN(0, 0);
4648 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4649 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4650 } IEM_MC_ELSE() {
4651 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4652 } IEM_MC_ENDIF();
4653 IEM_MC_ADVANCE_RIP();
4654 IEM_MC_END();
4655 }
4656 else
4657 {
4658 /* memory target */
4659 IEM_MC_BEGIN(0, 1);
4660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4663 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4664 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4665 } IEM_MC_ELSE() {
4666 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4667 } IEM_MC_ENDIF();
4668 IEM_MC_ADVANCE_RIP();
4669 IEM_MC_END();
4670 }
4671 return VINF_SUCCESS;
4672}
4673
4674
4675/** Opcode 0x0f 0x9e. */
4676FNIEMOP_DEF(iemOp_setle_Eb)
4677{
4678 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
4679 IEMOP_HLP_MIN_386();
4680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4681
4682 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4683 * any way. AMD says it's "unused", whatever that means. We're
4684 * ignoring for now. */
4685 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4686 {
4687 /* register target */
4688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4689 IEM_MC_BEGIN(0, 0);
4690 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4691 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4692 } IEM_MC_ELSE() {
4693 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4694 } IEM_MC_ENDIF();
4695 IEM_MC_ADVANCE_RIP();
4696 IEM_MC_END();
4697 }
4698 else
4699 {
4700 /* memory target */
4701 IEM_MC_BEGIN(0, 1);
4702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4705 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4706 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4707 } IEM_MC_ELSE() {
4708 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4709 } IEM_MC_ENDIF();
4710 IEM_MC_ADVANCE_RIP();
4711 IEM_MC_END();
4712 }
4713 return VINF_SUCCESS;
4714}
4715
4716
4717/** Opcode 0x0f 0x9f. */
4718FNIEMOP_DEF(iemOp_setnle_Eb)
4719{
4720 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
4721 IEMOP_HLP_MIN_386();
4722 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4723
4724 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4725 * any way. AMD says it's "unused", whatever that means. We're
4726 * ignoring for now. */
4727 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4728 {
4729 /* register target */
4730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4731 IEM_MC_BEGIN(0, 0);
4732 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4733 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4734 } IEM_MC_ELSE() {
4735 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4736 } IEM_MC_ENDIF();
4737 IEM_MC_ADVANCE_RIP();
4738 IEM_MC_END();
4739 }
4740 else
4741 {
4742 /* memory target */
4743 IEM_MC_BEGIN(0, 1);
4744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4747 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4748 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4749 } IEM_MC_ELSE() {
4750 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4751 } IEM_MC_ENDIF();
4752 IEM_MC_ADVANCE_RIP();
4753 IEM_MC_END();
4754 }
4755 return VINF_SUCCESS;
4756}
4757
4758
4759/**
4760 * Common 'push segment-register' helper.
4761 */
4762FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4763{
4764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4765 if (iReg < X86_SREG_FS)
4766 IEMOP_HLP_NO_64BIT();
4767 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4768
4769 switch (pVCpu->iem.s.enmEffOpSize)
4770 {
4771 case IEMMODE_16BIT:
4772 IEM_MC_BEGIN(0, 1);
4773 IEM_MC_LOCAL(uint16_t, u16Value);
4774 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4775 IEM_MC_PUSH_U16(u16Value);
4776 IEM_MC_ADVANCE_RIP();
4777 IEM_MC_END();
4778 break;
4779
4780 case IEMMODE_32BIT:
4781 IEM_MC_BEGIN(0, 1);
4782 IEM_MC_LOCAL(uint32_t, u32Value);
4783 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4784 IEM_MC_PUSH_U32_SREG(u32Value);
4785 IEM_MC_ADVANCE_RIP();
4786 IEM_MC_END();
4787 break;
4788
4789 case IEMMODE_64BIT:
4790 IEM_MC_BEGIN(0, 1);
4791 IEM_MC_LOCAL(uint64_t, u64Value);
4792 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4793 IEM_MC_PUSH_U64(u64Value);
4794 IEM_MC_ADVANCE_RIP();
4795 IEM_MC_END();
4796 break;
4797 }
4798
4799 return VINF_SUCCESS;
4800}
4801
4802
4803/** Opcode 0x0f 0xa0. */
4804FNIEMOP_DEF(iemOp_push_fs)
4805{
4806 IEMOP_MNEMONIC(push_fs, "push fs");
4807 IEMOP_HLP_MIN_386();
4808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4809 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4810}
4811
4812
4813/** Opcode 0x0f 0xa1. */
4814FNIEMOP_DEF(iemOp_pop_fs)
4815{
4816 IEMOP_MNEMONIC(pop_fs, "pop fs");
4817 IEMOP_HLP_MIN_386();
4818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4819 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
4820}
4821
4822
4823/** Opcode 0x0f 0xa2. */
4824FNIEMOP_DEF(iemOp_cpuid)
4825{
4826 IEMOP_MNEMONIC(cpuid, "cpuid");
4827 IEMOP_HLP_MIN_486(); /* not all 486es. */
4828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4829 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4830}
4831
4832
4833/**
4834 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4835 * iemOp_bts_Ev_Gv.
4836 */
4837FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4838{
4839 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4840 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4841
4842 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4843 {
4844 /* register destination. */
4845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4846 switch (pVCpu->iem.s.enmEffOpSize)
4847 {
4848 case IEMMODE_16BIT:
4849 IEM_MC_BEGIN(3, 0);
4850 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4851 IEM_MC_ARG(uint16_t, u16Src, 1);
4852 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4853
4854 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4855 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4856 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4857 IEM_MC_REF_EFLAGS(pEFlags);
4858 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4859
4860 IEM_MC_ADVANCE_RIP();
4861 IEM_MC_END();
4862 return VINF_SUCCESS;
4863
4864 case IEMMODE_32BIT:
4865 IEM_MC_BEGIN(3, 0);
4866 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4867 IEM_MC_ARG(uint32_t, u32Src, 1);
4868 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4869
4870 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4871 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4872 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4873 IEM_MC_REF_EFLAGS(pEFlags);
4874 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4875
4876 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4877 IEM_MC_ADVANCE_RIP();
4878 IEM_MC_END();
4879 return VINF_SUCCESS;
4880
4881 case IEMMODE_64BIT:
4882 IEM_MC_BEGIN(3, 0);
4883 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4884 IEM_MC_ARG(uint64_t, u64Src, 1);
4885 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4886
4887 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4888 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4889 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4890 IEM_MC_REF_EFLAGS(pEFlags);
4891 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4892
4893 IEM_MC_ADVANCE_RIP();
4894 IEM_MC_END();
4895 return VINF_SUCCESS;
4896
4897 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4898 }
4899 }
4900 else
4901 {
4902 /* memory destination. */
4903
4904 uint32_t fAccess;
4905 if (pImpl->pfnLockedU16)
4906 fAccess = IEM_ACCESS_DATA_RW;
4907 else /* BT */
4908 fAccess = IEM_ACCESS_DATA_R;
4909
4910 /** @todo test negative bit offsets! */
4911 switch (pVCpu->iem.s.enmEffOpSize)
4912 {
4913 case IEMMODE_16BIT:
4914 IEM_MC_BEGIN(3, 2);
4915 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4916 IEM_MC_ARG(uint16_t, u16Src, 1);
4917 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4919 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4920
4921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4922 if (pImpl->pfnLockedU16)
4923 IEMOP_HLP_DONE_DECODING();
4924 else
4925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4926 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4927 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4928 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4929 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4930 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
4931 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4932 IEM_MC_FETCH_EFLAGS(EFlags);
4933
4934 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4935 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4936 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4937 else
4938 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4939 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
4940
4941 IEM_MC_COMMIT_EFLAGS(EFlags);
4942 IEM_MC_ADVANCE_RIP();
4943 IEM_MC_END();
4944 return VINF_SUCCESS;
4945
4946 case IEMMODE_32BIT:
4947 IEM_MC_BEGIN(3, 2);
4948 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4949 IEM_MC_ARG(uint32_t, u32Src, 1);
4950 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4952 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4953
4954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4955 if (pImpl->pfnLockedU16)
4956 IEMOP_HLP_DONE_DECODING();
4957 else
4958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4959 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4960 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4961 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4962 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4963 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4964 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4965 IEM_MC_FETCH_EFLAGS(EFlags);
4966
4967 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4968 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4969 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4970 else
4971 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4972 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
4973
4974 IEM_MC_COMMIT_EFLAGS(EFlags);
4975 IEM_MC_ADVANCE_RIP();
4976 IEM_MC_END();
4977 return VINF_SUCCESS;
4978
4979 case IEMMODE_64BIT:
4980 IEM_MC_BEGIN(3, 2);
4981 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4982 IEM_MC_ARG(uint64_t, u64Src, 1);
4983 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4985 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4986
4987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4988 if (pImpl->pfnLockedU16)
4989 IEMOP_HLP_DONE_DECODING();
4990 else
4991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4992 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4993 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4994 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4995 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4996 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
4997 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
4998 IEM_MC_FETCH_EFLAGS(EFlags);
4999
5000 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5001 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5002 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5003 else
5004 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5005 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5006
5007 IEM_MC_COMMIT_EFLAGS(EFlags);
5008 IEM_MC_ADVANCE_RIP();
5009 IEM_MC_END();
5010 return VINF_SUCCESS;
5011
5012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5013 }
5014 }
5015}
5016
5017
5018/** Opcode 0x0f 0xa3. */
5019FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5020{
5021 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5022 IEMOP_HLP_MIN_386();
5023 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5024}
5025
5026
5027/**
5028 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5029 */
5030FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5031{
5032 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5033 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5034
5035 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5036 {
5037 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5039
5040 switch (pVCpu->iem.s.enmEffOpSize)
5041 {
5042 case IEMMODE_16BIT:
5043 IEM_MC_BEGIN(4, 0);
5044 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5045 IEM_MC_ARG(uint16_t, u16Src, 1);
5046 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5047 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5048
5049 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5050 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5051 IEM_MC_REF_EFLAGS(pEFlags);
5052 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5053
5054 IEM_MC_ADVANCE_RIP();
5055 IEM_MC_END();
5056 return VINF_SUCCESS;
5057
5058 case IEMMODE_32BIT:
5059 IEM_MC_BEGIN(4, 0);
5060 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5061 IEM_MC_ARG(uint32_t, u32Src, 1);
5062 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5063 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5064
5065 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5066 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5067 IEM_MC_REF_EFLAGS(pEFlags);
5068 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5069
5070 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5071 IEM_MC_ADVANCE_RIP();
5072 IEM_MC_END();
5073 return VINF_SUCCESS;
5074
5075 case IEMMODE_64BIT:
5076 IEM_MC_BEGIN(4, 0);
5077 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5078 IEM_MC_ARG(uint64_t, u64Src, 1);
5079 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5080 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5081
5082 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5083 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5084 IEM_MC_REF_EFLAGS(pEFlags);
5085 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5086
5087 IEM_MC_ADVANCE_RIP();
5088 IEM_MC_END();
5089 return VINF_SUCCESS;
5090
5091 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5092 }
5093 }
5094 else
5095 {
5096 switch (pVCpu->iem.s.enmEffOpSize)
5097 {
5098 case IEMMODE_16BIT:
5099 IEM_MC_BEGIN(4, 2);
5100 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5101 IEM_MC_ARG(uint16_t, u16Src, 1);
5102 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5103 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5105
5106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5107 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5108 IEM_MC_ASSIGN(cShiftArg, cShift);
5109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5110 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5111 IEM_MC_FETCH_EFLAGS(EFlags);
5112 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5113 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5114
5115 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5116 IEM_MC_COMMIT_EFLAGS(EFlags);
5117 IEM_MC_ADVANCE_RIP();
5118 IEM_MC_END();
5119 return VINF_SUCCESS;
5120
5121 case IEMMODE_32BIT:
5122 IEM_MC_BEGIN(4, 2);
5123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5124 IEM_MC_ARG(uint32_t, u32Src, 1);
5125 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5126 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5128
5129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5130 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5131 IEM_MC_ASSIGN(cShiftArg, cShift);
5132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5133 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5134 IEM_MC_FETCH_EFLAGS(EFlags);
5135 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5136 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5137
5138 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5139 IEM_MC_COMMIT_EFLAGS(EFlags);
5140 IEM_MC_ADVANCE_RIP();
5141 IEM_MC_END();
5142 return VINF_SUCCESS;
5143
5144 case IEMMODE_64BIT:
5145 IEM_MC_BEGIN(4, 2);
5146 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5147 IEM_MC_ARG(uint64_t, u64Src, 1);
5148 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5149 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5151
5152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5153 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5154 IEM_MC_ASSIGN(cShiftArg, cShift);
5155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5156 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5157 IEM_MC_FETCH_EFLAGS(EFlags);
5158 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5159 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5160
5161 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5162 IEM_MC_COMMIT_EFLAGS(EFlags);
5163 IEM_MC_ADVANCE_RIP();
5164 IEM_MC_END();
5165 return VINF_SUCCESS;
5166
5167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5168 }
5169 }
5170}
5171
5172
5173/**
5174 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5175 */
5176FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5177{
5178 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5179 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5180
5181 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5182 {
5183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5184
5185 switch (pVCpu->iem.s.enmEffOpSize)
5186 {
5187 case IEMMODE_16BIT:
5188 IEM_MC_BEGIN(4, 0);
5189 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5190 IEM_MC_ARG(uint16_t, u16Src, 1);
5191 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5192 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5193
5194 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5195 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5196 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5197 IEM_MC_REF_EFLAGS(pEFlags);
5198 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5199
5200 IEM_MC_ADVANCE_RIP();
5201 IEM_MC_END();
5202 return VINF_SUCCESS;
5203
5204 case IEMMODE_32BIT:
5205 IEM_MC_BEGIN(4, 0);
5206 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5207 IEM_MC_ARG(uint32_t, u32Src, 1);
5208 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5209 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5210
5211 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5212 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5213 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5214 IEM_MC_REF_EFLAGS(pEFlags);
5215 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5216
5217 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5218 IEM_MC_ADVANCE_RIP();
5219 IEM_MC_END();
5220 return VINF_SUCCESS;
5221
5222 case IEMMODE_64BIT:
5223 IEM_MC_BEGIN(4, 0);
5224 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5225 IEM_MC_ARG(uint64_t, u64Src, 1);
5226 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5227 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5228
5229 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5230 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5231 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5232 IEM_MC_REF_EFLAGS(pEFlags);
5233 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5234
5235 IEM_MC_ADVANCE_RIP();
5236 IEM_MC_END();
5237 return VINF_SUCCESS;
5238
5239 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5240 }
5241 }
5242 else
5243 {
5244 switch (pVCpu->iem.s.enmEffOpSize)
5245 {
5246 case IEMMODE_16BIT:
5247 IEM_MC_BEGIN(4, 2);
5248 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5249 IEM_MC_ARG(uint16_t, u16Src, 1);
5250 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5251 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5253
5254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5256 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5257 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5258 IEM_MC_FETCH_EFLAGS(EFlags);
5259 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5260 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5261
5262 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5263 IEM_MC_COMMIT_EFLAGS(EFlags);
5264 IEM_MC_ADVANCE_RIP();
5265 IEM_MC_END();
5266 return VINF_SUCCESS;
5267
5268 case IEMMODE_32BIT:
5269 IEM_MC_BEGIN(4, 2);
5270 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5271 IEM_MC_ARG(uint32_t, u32Src, 1);
5272 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5273 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5275
5276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5278 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5279 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5280 IEM_MC_FETCH_EFLAGS(EFlags);
5281 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5282 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5283
5284 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5285 IEM_MC_COMMIT_EFLAGS(EFlags);
5286 IEM_MC_ADVANCE_RIP();
5287 IEM_MC_END();
5288 return VINF_SUCCESS;
5289
5290 case IEMMODE_64BIT:
5291 IEM_MC_BEGIN(4, 2);
5292 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5293 IEM_MC_ARG(uint64_t, u64Src, 1);
5294 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5295 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5297
5298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5300 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5301 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5302 IEM_MC_FETCH_EFLAGS(EFlags);
5303 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5304 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5305
5306 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5307 IEM_MC_COMMIT_EFLAGS(EFlags);
5308 IEM_MC_ADVANCE_RIP();
5309 IEM_MC_END();
5310 return VINF_SUCCESS;
5311
5312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5313 }
5314 }
5315}
5316
5317
5318
5319/** Opcode 0x0f 0xa4. */
5320FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5321{
5322 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5323 IEMOP_HLP_MIN_386();
5324 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5325}
5326
5327
5328/** Opcode 0x0f 0xa5. */
5329FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5330{
5331 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5332 IEMOP_HLP_MIN_386();
5333 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5334}
5335
5336
5337/** Opcode 0x0f 0xa8. */
5338FNIEMOP_DEF(iemOp_push_gs)
5339{
5340 IEMOP_MNEMONIC(push_gs, "push gs");
5341 IEMOP_HLP_MIN_386();
5342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5343 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5344}
5345
5346
5347/** Opcode 0x0f 0xa9. */
5348FNIEMOP_DEF(iemOp_pop_gs)
5349{
5350 IEMOP_MNEMONIC(pop_gs, "pop gs");
5351 IEMOP_HLP_MIN_386();
5352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5353 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5354}
5355
5356
5357/** Opcode 0x0f 0xaa. */
5358FNIEMOP_STUB(iemOp_rsm);
5359//IEMOP_HLP_MIN_386();
5360
5361
5362/** Opcode 0x0f 0xab. */
5363FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5364{
5365 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5366 IEMOP_HLP_MIN_386();
5367 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5368}
5369
5370
5371/** Opcode 0x0f 0xac. */
5372FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5373{
5374 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5375 IEMOP_HLP_MIN_386();
5376 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5377}
5378
5379
5380/** Opcode 0x0f 0xad. */
5381FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5382{
5383 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5384 IEMOP_HLP_MIN_386();
5385 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5386}
5387
5388
5389/** Opcode 0x0f 0xae mem/0. */
5390FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5391{
5392 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5393 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5394 return IEMOP_RAISE_INVALID_OPCODE();
5395
5396 IEM_MC_BEGIN(3, 1);
5397 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5398 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5399 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5402 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5403 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5404 IEM_MC_END();
5405 return VINF_SUCCESS;
5406}
5407
5408
5409/** Opcode 0x0f 0xae mem/1. */
5410FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5411{
5412 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5413 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5414 return IEMOP_RAISE_INVALID_OPCODE();
5415
5416 IEM_MC_BEGIN(3, 1);
5417 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5418 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5419 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5422 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5423 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5424 IEM_MC_END();
5425 return VINF_SUCCESS;
5426}
5427
5428
5429/** Opcode 0x0f 0xae mem/2. */
5430FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5431
5432/** Opcode 0x0f 0xae mem/3. */
5433FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5434
5435/** Opcode 0x0f 0xae mem/4. */
5436FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5437
5438/** Opcode 0x0f 0xae mem/5. */
5439FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5440
5441/** Opcode 0x0f 0xae mem/6. */
5442FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5443
5444/** Opcode 0x0f 0xae mem/7. */
5445FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5446
5447
5448/** Opcode 0x0f 0xae 11b/5. */
5449FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5450{
5451 RT_NOREF_PV(bRm);
5452 IEMOP_MNEMONIC(lfence, "lfence");
5453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5454 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5455 return IEMOP_RAISE_INVALID_OPCODE();
5456
5457 IEM_MC_BEGIN(0, 0);
5458 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5459 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5460 else
5461 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5462 IEM_MC_ADVANCE_RIP();
5463 IEM_MC_END();
5464 return VINF_SUCCESS;
5465}
5466
5467
5468/** Opcode 0x0f 0xae 11b/6. */
5469FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5470{
5471 RT_NOREF_PV(bRm);
5472 IEMOP_MNEMONIC(mfence, "mfence");
5473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5474 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5475 return IEMOP_RAISE_INVALID_OPCODE();
5476
5477 IEM_MC_BEGIN(0, 0);
5478 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5479 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5480 else
5481 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5482 IEM_MC_ADVANCE_RIP();
5483 IEM_MC_END();
5484 return VINF_SUCCESS;
5485}
5486
5487
5488/** Opcode 0x0f 0xae 11b/7. */
5489FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5490{
5491 RT_NOREF_PV(bRm);
5492 IEMOP_MNEMONIC(sfence, "sfence");
5493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5494 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5495 return IEMOP_RAISE_INVALID_OPCODE();
5496
5497 IEM_MC_BEGIN(0, 0);
5498 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5499 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5500 else
5501 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5502 IEM_MC_ADVANCE_RIP();
5503 IEM_MC_END();
5504 return VINF_SUCCESS;
5505}
5506
5507
5508/** Opcode 0xf3 0x0f 0xae 11b/0. */
5509FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5510
5511/** Opcode 0xf3 0x0f 0xae 11b/1. */
5512FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5513
5514/** Opcode 0xf3 0x0f 0xae 11b/2. */
5515FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5516
5517/** Opcode 0xf3 0x0f 0xae 11b/3. */
5518FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5519
5520
5521/** Opcode 0x0f 0xae. */
5522FNIEMOP_DEF(iemOp_Grp15)
5523{
5524 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5526 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5527 {
5528 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5529 {
5530 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5531 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5532 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5533 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5534 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5535 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5536 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5537 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5539 }
5540 }
5541 else
5542 {
5543 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5544 {
5545 case 0:
5546 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5547 {
5548 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5549 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5550 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5551 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5552 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5553 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5554 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5555 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5556 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5557 }
5558 break;
5559
5560 case IEM_OP_PRF_REPZ:
5561 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5562 {
5563 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5564 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5565 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5566 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5567 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5568 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5569 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5570 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5571 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5572 }
5573 break;
5574
5575 default:
5576 return IEMOP_RAISE_INVALID_OPCODE();
5577 }
5578 }
5579}
5580
5581
5582/** Opcode 0x0f 0xaf. */
5583FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5584{
5585 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5586 IEMOP_HLP_MIN_386();
5587 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5588 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5589}
5590
5591
5592/** Opcode 0x0f 0xb0. */
5593FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5594{
5595 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5596 IEMOP_HLP_MIN_486();
5597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5598
5599 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5600 {
5601 IEMOP_HLP_DONE_DECODING();
5602 IEM_MC_BEGIN(4, 0);
5603 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5604 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5605 IEM_MC_ARG(uint8_t, u8Src, 2);
5606 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5607
5608 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5609 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5610 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5611 IEM_MC_REF_EFLAGS(pEFlags);
5612 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5613 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5614 else
5615 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5616
5617 IEM_MC_ADVANCE_RIP();
5618 IEM_MC_END();
5619 }
5620 else
5621 {
5622 IEM_MC_BEGIN(4, 3);
5623 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5624 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5625 IEM_MC_ARG(uint8_t, u8Src, 2);
5626 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5628 IEM_MC_LOCAL(uint8_t, u8Al);
5629
5630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5631 IEMOP_HLP_DONE_DECODING();
5632 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5633 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5634 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5635 IEM_MC_FETCH_EFLAGS(EFlags);
5636 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5637 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5638 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5639 else
5640 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5641
5642 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5643 IEM_MC_COMMIT_EFLAGS(EFlags);
5644 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5645 IEM_MC_ADVANCE_RIP();
5646 IEM_MC_END();
5647 }
5648 return VINF_SUCCESS;
5649}
5650
5651/** Opcode 0x0f 0xb1. */
5652FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5653{
5654 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
5655 IEMOP_HLP_MIN_486();
5656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5657
5658 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5659 {
5660 IEMOP_HLP_DONE_DECODING();
5661 switch (pVCpu->iem.s.enmEffOpSize)
5662 {
5663 case IEMMODE_16BIT:
5664 IEM_MC_BEGIN(4, 0);
5665 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5666 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5667 IEM_MC_ARG(uint16_t, u16Src, 2);
5668 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5669
5670 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5671 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5672 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5673 IEM_MC_REF_EFLAGS(pEFlags);
5674 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5675 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5676 else
5677 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5678
5679 IEM_MC_ADVANCE_RIP();
5680 IEM_MC_END();
5681 return VINF_SUCCESS;
5682
5683 case IEMMODE_32BIT:
5684 IEM_MC_BEGIN(4, 0);
5685 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5686 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5687 IEM_MC_ARG(uint32_t, u32Src, 2);
5688 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5689
5690 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5691 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5692 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5693 IEM_MC_REF_EFLAGS(pEFlags);
5694 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5695 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5696 else
5697 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5698
5699 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5700 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5701 IEM_MC_ADVANCE_RIP();
5702 IEM_MC_END();
5703 return VINF_SUCCESS;
5704
5705 case IEMMODE_64BIT:
5706 IEM_MC_BEGIN(4, 0);
5707 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5708 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5709#ifdef RT_ARCH_X86
5710 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5711#else
5712 IEM_MC_ARG(uint64_t, u64Src, 2);
5713#endif
5714 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5715
5716 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5717 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5718 IEM_MC_REF_EFLAGS(pEFlags);
5719#ifdef RT_ARCH_X86
5720 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5721 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5722 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5723 else
5724 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5725#else
5726 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5727 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5728 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5729 else
5730 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5731#endif
5732
5733 IEM_MC_ADVANCE_RIP();
5734 IEM_MC_END();
5735 return VINF_SUCCESS;
5736
5737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5738 }
5739 }
5740 else
5741 {
5742 switch (pVCpu->iem.s.enmEffOpSize)
5743 {
5744 case IEMMODE_16BIT:
5745 IEM_MC_BEGIN(4, 3);
5746 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5747 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5748 IEM_MC_ARG(uint16_t, u16Src, 2);
5749 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5751 IEM_MC_LOCAL(uint16_t, u16Ax);
5752
5753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5754 IEMOP_HLP_DONE_DECODING();
5755 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5756 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5757 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5758 IEM_MC_FETCH_EFLAGS(EFlags);
5759 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5760 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5761 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5762 else
5763 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5764
5765 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5766 IEM_MC_COMMIT_EFLAGS(EFlags);
5767 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5768 IEM_MC_ADVANCE_RIP();
5769 IEM_MC_END();
5770 return VINF_SUCCESS;
5771
5772 case IEMMODE_32BIT:
5773 IEM_MC_BEGIN(4, 3);
5774 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5775 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5776 IEM_MC_ARG(uint32_t, u32Src, 2);
5777 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5779 IEM_MC_LOCAL(uint32_t, u32Eax);
5780
5781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5782 IEMOP_HLP_DONE_DECODING();
5783 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5784 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5785 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5786 IEM_MC_FETCH_EFLAGS(EFlags);
5787 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5788 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5789 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5790 else
5791 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5792
5793 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5794 IEM_MC_COMMIT_EFLAGS(EFlags);
5795 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5796 IEM_MC_ADVANCE_RIP();
5797 IEM_MC_END();
5798 return VINF_SUCCESS;
5799
5800 case IEMMODE_64BIT:
5801 IEM_MC_BEGIN(4, 3);
5802 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5803 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5804#ifdef RT_ARCH_X86
5805 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5806#else
5807 IEM_MC_ARG(uint64_t, u64Src, 2);
5808#endif
5809 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5811 IEM_MC_LOCAL(uint64_t, u64Rax);
5812
5813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5814 IEMOP_HLP_DONE_DECODING();
5815 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5816 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5817 IEM_MC_FETCH_EFLAGS(EFlags);
5818 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5819#ifdef RT_ARCH_X86
5820 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5821 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5822 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5823 else
5824 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5825#else
5826 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5827 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5828 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5829 else
5830 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5831#endif
5832
5833 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5834 IEM_MC_COMMIT_EFLAGS(EFlags);
5835 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5836 IEM_MC_ADVANCE_RIP();
5837 IEM_MC_END();
5838 return VINF_SUCCESS;
5839
5840 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5841 }
5842 }
5843}
5844
5845
5846FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5847{
5848 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5849 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
5850
5851 switch (pVCpu->iem.s.enmEffOpSize)
5852 {
5853 case IEMMODE_16BIT:
5854 IEM_MC_BEGIN(5, 1);
5855 IEM_MC_ARG(uint16_t, uSel, 0);
5856 IEM_MC_ARG(uint16_t, offSeg, 1);
5857 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5858 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5859 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5860 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5863 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5864 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
5865 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5866 IEM_MC_END();
5867 return VINF_SUCCESS;
5868
5869 case IEMMODE_32BIT:
5870 IEM_MC_BEGIN(5, 1);
5871 IEM_MC_ARG(uint16_t, uSel, 0);
5872 IEM_MC_ARG(uint32_t, offSeg, 1);
5873 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5874 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5875 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5876 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5879 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5880 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
5881 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5882 IEM_MC_END();
5883 return VINF_SUCCESS;
5884
5885 case IEMMODE_64BIT:
5886 IEM_MC_BEGIN(5, 1);
5887 IEM_MC_ARG(uint16_t, uSel, 0);
5888 IEM_MC_ARG(uint64_t, offSeg, 1);
5889 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5890 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5891 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5892 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5895 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5896 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5897 else
5898 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5899 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
5900 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5901 IEM_MC_END();
5902 return VINF_SUCCESS;
5903
5904 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5905 }
5906}
5907
5908
5909/** Opcode 0x0f 0xb2. */
5910FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5911{
5912 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
5913 IEMOP_HLP_MIN_386();
5914 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5915 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5916 return IEMOP_RAISE_INVALID_OPCODE();
5917 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5918}
5919
5920
5921/** Opcode 0x0f 0xb3. */
5922FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5923{
5924 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
5925 IEMOP_HLP_MIN_386();
5926 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5927}
5928
5929
5930/** Opcode 0x0f 0xb4. */
5931FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5932{
5933 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
5934 IEMOP_HLP_MIN_386();
5935 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5936 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5937 return IEMOP_RAISE_INVALID_OPCODE();
5938 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5939}
5940
5941
5942/** Opcode 0x0f 0xb5. */
5943FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5944{
5945 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
5946 IEMOP_HLP_MIN_386();
5947 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5948 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5949 return IEMOP_RAISE_INVALID_OPCODE();
5950 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5951}
5952
5953
5954/** Opcode 0x0f 0xb6. */
5955FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5956{
5957 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
5958 IEMOP_HLP_MIN_386();
5959
5960 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5961
5962 /*
5963 * If rm is denoting a register, no more instruction bytes.
5964 */
5965 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5966 {
5967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5968 switch (pVCpu->iem.s.enmEffOpSize)
5969 {
5970 case IEMMODE_16BIT:
5971 IEM_MC_BEGIN(0, 1);
5972 IEM_MC_LOCAL(uint16_t, u16Value);
5973 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5974 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
5975 IEM_MC_ADVANCE_RIP();
5976 IEM_MC_END();
5977 return VINF_SUCCESS;
5978
5979 case IEMMODE_32BIT:
5980 IEM_MC_BEGIN(0, 1);
5981 IEM_MC_LOCAL(uint32_t, u32Value);
5982 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5983 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
5984 IEM_MC_ADVANCE_RIP();
5985 IEM_MC_END();
5986 return VINF_SUCCESS;
5987
5988 case IEMMODE_64BIT:
5989 IEM_MC_BEGIN(0, 1);
5990 IEM_MC_LOCAL(uint64_t, u64Value);
5991 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5992 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
5993 IEM_MC_ADVANCE_RIP();
5994 IEM_MC_END();
5995 return VINF_SUCCESS;
5996
5997 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5998 }
5999 }
6000 else
6001 {
6002 /*
6003 * We're loading a register from memory.
6004 */
6005 switch (pVCpu->iem.s.enmEffOpSize)
6006 {
6007 case IEMMODE_16BIT:
6008 IEM_MC_BEGIN(0, 2);
6009 IEM_MC_LOCAL(uint16_t, u16Value);
6010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6013 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6014 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6015 IEM_MC_ADVANCE_RIP();
6016 IEM_MC_END();
6017 return VINF_SUCCESS;
6018
6019 case IEMMODE_32BIT:
6020 IEM_MC_BEGIN(0, 2);
6021 IEM_MC_LOCAL(uint32_t, u32Value);
6022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6025 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6026 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6027 IEM_MC_ADVANCE_RIP();
6028 IEM_MC_END();
6029 return VINF_SUCCESS;
6030
6031 case IEMMODE_64BIT:
6032 IEM_MC_BEGIN(0, 2);
6033 IEM_MC_LOCAL(uint64_t, u64Value);
6034 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6037 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6038 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6039 IEM_MC_ADVANCE_RIP();
6040 IEM_MC_END();
6041 return VINF_SUCCESS;
6042
6043 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6044 }
6045 }
6046}
6047
6048
6049/** Opcode 0x0f 0xb7. */
6050FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6051{
6052 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6053 IEMOP_HLP_MIN_386();
6054
6055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6056
6057 /** @todo Not entirely sure how the operand size prefix is handled here,
6058 * assuming that it will be ignored. Would be nice to have a few
6059 * test for this. */
6060 /*
6061 * If rm is denoting a register, no more instruction bytes.
6062 */
6063 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6064 {
6065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6066 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6067 {
6068 IEM_MC_BEGIN(0, 1);
6069 IEM_MC_LOCAL(uint32_t, u32Value);
6070 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6071 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6072 IEM_MC_ADVANCE_RIP();
6073 IEM_MC_END();
6074 }
6075 else
6076 {
6077 IEM_MC_BEGIN(0, 1);
6078 IEM_MC_LOCAL(uint64_t, u64Value);
6079 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6080 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6081 IEM_MC_ADVANCE_RIP();
6082 IEM_MC_END();
6083 }
6084 }
6085 else
6086 {
6087 /*
6088 * We're loading a register from memory.
6089 */
6090 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6091 {
6092 IEM_MC_BEGIN(0, 2);
6093 IEM_MC_LOCAL(uint32_t, u32Value);
6094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6097 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6098 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6099 IEM_MC_ADVANCE_RIP();
6100 IEM_MC_END();
6101 }
6102 else
6103 {
6104 IEM_MC_BEGIN(0, 2);
6105 IEM_MC_LOCAL(uint64_t, u64Value);
6106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6109 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6110 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6111 IEM_MC_ADVANCE_RIP();
6112 IEM_MC_END();
6113 }
6114 }
6115 return VINF_SUCCESS;
6116}
6117
6118
6119/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6120FNIEMOP_UD_STUB(iemOp_jmpe);
6121/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6122FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6123
6124
6125/** Opcode 0x0f 0xb9. */
6126FNIEMOP_DEF(iemOp_Grp10)
6127{
6128 Log(("iemOp_Grp10 -> #UD\n"));
6129 return IEMOP_RAISE_INVALID_OPCODE();
6130}
6131
6132
6133/** Opcode 0x0f 0xba. */
6134FNIEMOP_DEF(iemOp_Grp8)
6135{
6136 IEMOP_HLP_MIN_386();
6137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6138 PCIEMOPBINSIZES pImpl;
6139 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6140 {
6141 case 0: case 1: case 2: case 3:
6142 return IEMOP_RAISE_INVALID_OPCODE();
6143 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6144 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6145 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6146 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6147 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6148 }
6149 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6150
6151 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6152 {
6153 /* register destination. */
6154 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6156
6157 switch (pVCpu->iem.s.enmEffOpSize)
6158 {
6159 case IEMMODE_16BIT:
6160 IEM_MC_BEGIN(3, 0);
6161 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6162 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6163 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6164
6165 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6166 IEM_MC_REF_EFLAGS(pEFlags);
6167 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6168
6169 IEM_MC_ADVANCE_RIP();
6170 IEM_MC_END();
6171 return VINF_SUCCESS;
6172
6173 case IEMMODE_32BIT:
6174 IEM_MC_BEGIN(3, 0);
6175 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6176 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6177 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6178
6179 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6180 IEM_MC_REF_EFLAGS(pEFlags);
6181 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6182
6183 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6184 IEM_MC_ADVANCE_RIP();
6185 IEM_MC_END();
6186 return VINF_SUCCESS;
6187
6188 case IEMMODE_64BIT:
6189 IEM_MC_BEGIN(3, 0);
6190 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6191 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6192 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6193
6194 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6195 IEM_MC_REF_EFLAGS(pEFlags);
6196 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6197
6198 IEM_MC_ADVANCE_RIP();
6199 IEM_MC_END();
6200 return VINF_SUCCESS;
6201
6202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6203 }
6204 }
6205 else
6206 {
6207 /* memory destination. */
6208
6209 uint32_t fAccess;
6210 if (pImpl->pfnLockedU16)
6211 fAccess = IEM_ACCESS_DATA_RW;
6212 else /* BT */
6213 fAccess = IEM_ACCESS_DATA_R;
6214
6215 /** @todo test negative bit offsets! */
6216 switch (pVCpu->iem.s.enmEffOpSize)
6217 {
6218 case IEMMODE_16BIT:
6219 IEM_MC_BEGIN(3, 1);
6220 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6221 IEM_MC_ARG(uint16_t, u16Src, 1);
6222 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6224
6225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6226 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6227 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6228 if (pImpl->pfnLockedU16)
6229 IEMOP_HLP_DONE_DECODING();
6230 else
6231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6232 IEM_MC_FETCH_EFLAGS(EFlags);
6233 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6234 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6236 else
6237 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6238 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6239
6240 IEM_MC_COMMIT_EFLAGS(EFlags);
6241 IEM_MC_ADVANCE_RIP();
6242 IEM_MC_END();
6243 return VINF_SUCCESS;
6244
6245 case IEMMODE_32BIT:
6246 IEM_MC_BEGIN(3, 1);
6247 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6248 IEM_MC_ARG(uint32_t, u32Src, 1);
6249 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6251
6252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6253 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6254 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6255 if (pImpl->pfnLockedU16)
6256 IEMOP_HLP_DONE_DECODING();
6257 else
6258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6259 IEM_MC_FETCH_EFLAGS(EFlags);
6260 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6261 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6262 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6263 else
6264 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6265 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6266
6267 IEM_MC_COMMIT_EFLAGS(EFlags);
6268 IEM_MC_ADVANCE_RIP();
6269 IEM_MC_END();
6270 return VINF_SUCCESS;
6271
6272 case IEMMODE_64BIT:
6273 IEM_MC_BEGIN(3, 1);
6274 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6275 IEM_MC_ARG(uint64_t, u64Src, 1);
6276 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6278
6279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6280 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6281 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6282 if (pImpl->pfnLockedU16)
6283 IEMOP_HLP_DONE_DECODING();
6284 else
6285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6286 IEM_MC_FETCH_EFLAGS(EFlags);
6287 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6288 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6289 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6290 else
6291 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6292 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6293
6294 IEM_MC_COMMIT_EFLAGS(EFlags);
6295 IEM_MC_ADVANCE_RIP();
6296 IEM_MC_END();
6297 return VINF_SUCCESS;
6298
6299 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6300 }
6301 }
6302
6303}
6304
6305
6306/** Opcode 0x0f 0xbb. */
6307FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6308{
6309 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6310 IEMOP_HLP_MIN_386();
6311 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6312}
6313
6314
6315/** Opcode 0x0f 0xbc. */
6316FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6317{
6318 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6319 IEMOP_HLP_MIN_386();
6320 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6321 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6322}
6323
6324
6325/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6326FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6327
6328
6329/** Opcode 0x0f 0xbd. */
6330FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6331{
6332 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6333 IEMOP_HLP_MIN_386();
6334 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6335 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6336}
6337
6338
6339/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6340FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6341
6342
6343/** Opcode 0x0f 0xbe. */
6344FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6345{
6346 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6347 IEMOP_HLP_MIN_386();
6348
6349 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6350
6351 /*
6352 * If rm is denoting a register, no more instruction bytes.
6353 */
6354 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6355 {
6356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6357 switch (pVCpu->iem.s.enmEffOpSize)
6358 {
6359 case IEMMODE_16BIT:
6360 IEM_MC_BEGIN(0, 1);
6361 IEM_MC_LOCAL(uint16_t, u16Value);
6362 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6363 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6364 IEM_MC_ADVANCE_RIP();
6365 IEM_MC_END();
6366 return VINF_SUCCESS;
6367
6368 case IEMMODE_32BIT:
6369 IEM_MC_BEGIN(0, 1);
6370 IEM_MC_LOCAL(uint32_t, u32Value);
6371 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6372 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6373 IEM_MC_ADVANCE_RIP();
6374 IEM_MC_END();
6375 return VINF_SUCCESS;
6376
6377 case IEMMODE_64BIT:
6378 IEM_MC_BEGIN(0, 1);
6379 IEM_MC_LOCAL(uint64_t, u64Value);
6380 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6381 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6382 IEM_MC_ADVANCE_RIP();
6383 IEM_MC_END();
6384 return VINF_SUCCESS;
6385
6386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6387 }
6388 }
6389 else
6390 {
6391 /*
6392 * We're loading a register from memory.
6393 */
6394 switch (pVCpu->iem.s.enmEffOpSize)
6395 {
6396 case IEMMODE_16BIT:
6397 IEM_MC_BEGIN(0, 2);
6398 IEM_MC_LOCAL(uint16_t, u16Value);
6399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6402 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6403 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6404 IEM_MC_ADVANCE_RIP();
6405 IEM_MC_END();
6406 return VINF_SUCCESS;
6407
6408 case IEMMODE_32BIT:
6409 IEM_MC_BEGIN(0, 2);
6410 IEM_MC_LOCAL(uint32_t, u32Value);
6411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6414 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6415 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6416 IEM_MC_ADVANCE_RIP();
6417 IEM_MC_END();
6418 return VINF_SUCCESS;
6419
6420 case IEMMODE_64BIT:
6421 IEM_MC_BEGIN(0, 2);
6422 IEM_MC_LOCAL(uint64_t, u64Value);
6423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6426 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6427 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6428 IEM_MC_ADVANCE_RIP();
6429 IEM_MC_END();
6430 return VINF_SUCCESS;
6431
6432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6433 }
6434 }
6435}
6436
6437
6438/** Opcode 0x0f 0xbf. */
6439FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6440{
6441 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6442 IEMOP_HLP_MIN_386();
6443
6444 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6445
6446 /** @todo Not entirely sure how the operand size prefix is handled here,
6447 * assuming that it will be ignored. Would be nice to have a few
6448 * test for this. */
6449 /*
6450 * If rm is denoting a register, no more instruction bytes.
6451 */
6452 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6453 {
6454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6455 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6456 {
6457 IEM_MC_BEGIN(0, 1);
6458 IEM_MC_LOCAL(uint32_t, u32Value);
6459 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6460 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6461 IEM_MC_ADVANCE_RIP();
6462 IEM_MC_END();
6463 }
6464 else
6465 {
6466 IEM_MC_BEGIN(0, 1);
6467 IEM_MC_LOCAL(uint64_t, u64Value);
6468 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6469 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6470 IEM_MC_ADVANCE_RIP();
6471 IEM_MC_END();
6472 }
6473 }
6474 else
6475 {
6476 /*
6477 * We're loading a register from memory.
6478 */
6479 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6480 {
6481 IEM_MC_BEGIN(0, 2);
6482 IEM_MC_LOCAL(uint32_t, u32Value);
6483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6486 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6487 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6488 IEM_MC_ADVANCE_RIP();
6489 IEM_MC_END();
6490 }
6491 else
6492 {
6493 IEM_MC_BEGIN(0, 2);
6494 IEM_MC_LOCAL(uint64_t, u64Value);
6495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6498 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6499 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6500 IEM_MC_ADVANCE_RIP();
6501 IEM_MC_END();
6502 }
6503 }
6504 return VINF_SUCCESS;
6505}
6506
6507
6508/** Opcode 0x0f 0xc0. */
6509FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6510{
6511 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6512 IEMOP_HLP_MIN_486();
6513 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6514
6515 /*
6516 * If rm is denoting a register, no more instruction bytes.
6517 */
6518 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6519 {
6520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6521
6522 IEM_MC_BEGIN(3, 0);
6523 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6524 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6525 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6526
6527 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6528 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6529 IEM_MC_REF_EFLAGS(pEFlags);
6530 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6531
6532 IEM_MC_ADVANCE_RIP();
6533 IEM_MC_END();
6534 }
6535 else
6536 {
6537 /*
6538 * We're accessing memory.
6539 */
6540 IEM_MC_BEGIN(3, 3);
6541 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6542 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6543 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6544 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6545 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6546
6547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6548 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6549 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6550 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6551 IEM_MC_FETCH_EFLAGS(EFlags);
6552 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6553 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6554 else
6555 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6556
6557 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6558 IEM_MC_COMMIT_EFLAGS(EFlags);
6559 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6560 IEM_MC_ADVANCE_RIP();
6561 IEM_MC_END();
6562 return VINF_SUCCESS;
6563 }
6564 return VINF_SUCCESS;
6565}
6566
6567
6568/** Opcode 0x0f 0xc1. */
6569FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6570{
6571 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6572 IEMOP_HLP_MIN_486();
6573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6574
6575 /*
6576 * If rm is denoting a register, no more instruction bytes.
6577 */
6578 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6579 {
6580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6581
6582 switch (pVCpu->iem.s.enmEffOpSize)
6583 {
6584 case IEMMODE_16BIT:
6585 IEM_MC_BEGIN(3, 0);
6586 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6587 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6588 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6589
6590 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6591 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6592 IEM_MC_REF_EFLAGS(pEFlags);
6593 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6594
6595 IEM_MC_ADVANCE_RIP();
6596 IEM_MC_END();
6597 return VINF_SUCCESS;
6598
6599 case IEMMODE_32BIT:
6600 IEM_MC_BEGIN(3, 0);
6601 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6602 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6603 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6604
6605 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6606 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6607 IEM_MC_REF_EFLAGS(pEFlags);
6608 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6609
6610 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6611 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6612 IEM_MC_ADVANCE_RIP();
6613 IEM_MC_END();
6614 return VINF_SUCCESS;
6615
6616 case IEMMODE_64BIT:
6617 IEM_MC_BEGIN(3, 0);
6618 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6619 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6620 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6621
6622 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6623 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6624 IEM_MC_REF_EFLAGS(pEFlags);
6625 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6626
6627 IEM_MC_ADVANCE_RIP();
6628 IEM_MC_END();
6629 return VINF_SUCCESS;
6630
6631 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6632 }
6633 }
6634 else
6635 {
6636 /*
6637 * We're accessing memory.
6638 */
6639 switch (pVCpu->iem.s.enmEffOpSize)
6640 {
6641 case IEMMODE_16BIT:
6642 IEM_MC_BEGIN(3, 3);
6643 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6644 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6645 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6646 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6648
6649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6650 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6651 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6652 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6653 IEM_MC_FETCH_EFLAGS(EFlags);
6654 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6655 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6656 else
6657 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6658
6659 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6660 IEM_MC_COMMIT_EFLAGS(EFlags);
6661 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
6662 IEM_MC_ADVANCE_RIP();
6663 IEM_MC_END();
6664 return VINF_SUCCESS;
6665
6666 case IEMMODE_32BIT:
6667 IEM_MC_BEGIN(3, 3);
6668 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6669 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6670 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6671 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6673
6674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6675 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6676 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6677 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6678 IEM_MC_FETCH_EFLAGS(EFlags);
6679 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6680 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6681 else
6682 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6683
6684 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6685 IEM_MC_COMMIT_EFLAGS(EFlags);
6686 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
6687 IEM_MC_ADVANCE_RIP();
6688 IEM_MC_END();
6689 return VINF_SUCCESS;
6690
6691 case IEMMODE_64BIT:
6692 IEM_MC_BEGIN(3, 3);
6693 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6694 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6695 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6696 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6698
6699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6700 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6701 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6702 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6703 IEM_MC_FETCH_EFLAGS(EFlags);
6704 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6705 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6706 else
6707 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6708
6709 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6710 IEM_MC_COMMIT_EFLAGS(EFlags);
6711 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
6712 IEM_MC_ADVANCE_RIP();
6713 IEM_MC_END();
6714 return VINF_SUCCESS;
6715
6716 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6717 }
6718 }
6719}
6720
6721
6722/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
6723FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
6724/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
6725FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
6726/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
6727FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
6728/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
6729FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
6730
6731
6732/** Opcode 0x0f 0xc3. */
6733FNIEMOP_DEF(iemOp_movnti_My_Gy)
6734{
6735 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
6736
6737 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6738
6739 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6740 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6741 {
6742 switch (pVCpu->iem.s.enmEffOpSize)
6743 {
6744 case IEMMODE_32BIT:
6745 IEM_MC_BEGIN(0, 2);
6746 IEM_MC_LOCAL(uint32_t, u32Value);
6747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6748
6749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6751 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6752 return IEMOP_RAISE_INVALID_OPCODE();
6753
6754 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6755 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6756 IEM_MC_ADVANCE_RIP();
6757 IEM_MC_END();
6758 break;
6759
6760 case IEMMODE_64BIT:
6761 IEM_MC_BEGIN(0, 2);
6762 IEM_MC_LOCAL(uint64_t, u64Value);
6763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6764
6765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6767 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6768 return IEMOP_RAISE_INVALID_OPCODE();
6769
6770 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6771 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
6772 IEM_MC_ADVANCE_RIP();
6773 IEM_MC_END();
6774 break;
6775
6776 case IEMMODE_16BIT:
6777 /** @todo check this form. */
6778 return IEMOP_RAISE_INVALID_OPCODE();
6779 }
6780 }
6781 else
6782 return IEMOP_RAISE_INVALID_OPCODE();
6783 return VINF_SUCCESS;
6784}
6785/* Opcode 0x66 0x0f 0xc3 - invalid */
6786/* Opcode 0xf3 0x0f 0xc3 - invalid */
6787/* Opcode 0xf2 0x0f 0xc3 - invalid */
6788
6789/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
6790FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
6791/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
6792FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
6793/* Opcode 0xf3 0x0f 0xc4 - invalid */
6794/* Opcode 0xf2 0x0f 0xc4 - invalid */
6795
6796/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
6797FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
6798/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
6799FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
6800/* Opcode 0xf3 0x0f 0xc5 - invalid */
6801/* Opcode 0xf2 0x0f 0xc5 - invalid */
6802
6803/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
6804FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
6805/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
6806FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
6807/* Opcode 0xf3 0x0f 0xc6 - invalid */
6808/* Opcode 0xf2 0x0f 0xc6 - invalid */
6809
6810
6811/** Opcode 0x0f 0xc7 !11/1. */
6812FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6813{
6814 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
6815
6816 IEM_MC_BEGIN(4, 3);
6817 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6818 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6819 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6820 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6821 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6822 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6824
6825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6826 IEMOP_HLP_DONE_DECODING();
6827 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6828
6829 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6830 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6831 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6832
6833 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6834 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6835 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6836
6837 IEM_MC_FETCH_EFLAGS(EFlags);
6838 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6839 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6840 else
6841 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6842
6843 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6844 IEM_MC_COMMIT_EFLAGS(EFlags);
6845 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6846 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6847 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6848 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6849 IEM_MC_ENDIF();
6850 IEM_MC_ADVANCE_RIP();
6851
6852 IEM_MC_END();
6853 return VINF_SUCCESS;
6854}
6855
6856
6857/** Opcode REX.W 0x0f 0xc7 !11/1. */
6858FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
6859{
6860 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
6861 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6862 {
6863#if 0
6864 RT_NOREF(bRm);
6865 IEMOP_BITCH_ABOUT_STUB();
6866 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6867#else
6868 IEM_MC_BEGIN(4, 3);
6869 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
6870 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
6871 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
6872 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6873 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
6874 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
6875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6876
6877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6878 IEMOP_HLP_DONE_DECODING();
6879 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
6880 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6881
6882 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
6883 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
6884 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
6885
6886 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
6887 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
6888 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
6889
6890 IEM_MC_FETCH_EFLAGS(EFlags);
6891# ifdef RT_ARCH_AMD64
6892 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6893 {
6894 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6895 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6896 else
6897 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6898 }
6899 else
6900# endif
6901 {
6902 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
6903 accesses and not all all atomic, which works fine on in UNI CPU guest
6904 configuration (ignoring DMA). If guest SMP is active we have no choice
6905 but to use a rendezvous callback here. Sigh. */
6906 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
6907 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6908 else
6909 {
6910 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6911 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
6912 }
6913 }
6914
6915 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
6916 IEM_MC_COMMIT_EFLAGS(EFlags);
6917 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6918 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
6919 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
6920 IEM_MC_ENDIF();
6921 IEM_MC_ADVANCE_RIP();
6922
6923 IEM_MC_END();
6924 return VINF_SUCCESS;
6925#endif
6926 }
6927 Log(("cmpxchg16b -> #UD\n"));
6928 return IEMOP_RAISE_INVALID_OPCODE();
6929}
6930
6931
6932/** Opcode 0x0f 0xc7 11/6. */
6933FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6934
6935/** Opcode 0x0f 0xc7 !11/6. */
6936FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6937
6938/** Opcode 0x66 0x0f 0xc7 !11/6. */
6939FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6940
6941/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6942FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6943
6944/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6945FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6946
6947
6948/** Opcode 0x0f 0xc7. */
6949FNIEMOP_DEF(iemOp_Grp9)
6950{
6951 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6952 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6953 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6954 {
6955 case 0: case 2: case 3: case 4: case 5:
6956 return IEMOP_RAISE_INVALID_OPCODE();
6957 case 1:
6958 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6959 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6960 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6961 return IEMOP_RAISE_INVALID_OPCODE();
6962 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6963 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6964 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6965 case 6:
6966 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6967 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6968 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6969 {
6970 case 0:
6971 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6972 case IEM_OP_PRF_SIZE_OP:
6973 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6974 case IEM_OP_PRF_REPZ:
6975 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6976 default:
6977 return IEMOP_RAISE_INVALID_OPCODE();
6978 }
6979 case 7:
6980 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6981 {
6982 case 0:
6983 case IEM_OP_PRF_REPZ:
6984 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6985 default:
6986 return IEMOP_RAISE_INVALID_OPCODE();
6987 }
6988 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6989 }
6990}
6991
6992
6993/**
6994 * Common 'bswap register' helper.
6995 */
6996FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6997{
6998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6999 switch (pVCpu->iem.s.enmEffOpSize)
7000 {
7001 case IEMMODE_16BIT:
7002 IEM_MC_BEGIN(1, 0);
7003 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7004 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7005 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7006 IEM_MC_ADVANCE_RIP();
7007 IEM_MC_END();
7008 return VINF_SUCCESS;
7009
7010 case IEMMODE_32BIT:
7011 IEM_MC_BEGIN(1, 0);
7012 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7013 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7014 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7015 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7016 IEM_MC_ADVANCE_RIP();
7017 IEM_MC_END();
7018 return VINF_SUCCESS;
7019
7020 case IEMMODE_64BIT:
7021 IEM_MC_BEGIN(1, 0);
7022 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7023 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7024 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7025 IEM_MC_ADVANCE_RIP();
7026 IEM_MC_END();
7027 return VINF_SUCCESS;
7028
7029 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7030 }
7031}
7032
7033
7034/** Opcode 0x0f 0xc8. */
7035FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7036{
7037 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7038 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7039 prefix. REX.B is the correct prefix it appears. For a parallel
7040 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7041 IEMOP_HLP_MIN_486();
7042 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7043}
7044
7045
7046/** Opcode 0x0f 0xc9. */
7047FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7048{
7049 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7050 IEMOP_HLP_MIN_486();
7051 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7052}
7053
7054
7055/** Opcode 0x0f 0xca. */
7056FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7057{
7058 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7059 IEMOP_HLP_MIN_486();
7060 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7061}
7062
7063
7064/** Opcode 0x0f 0xcb. */
7065FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7066{
7067 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7068 IEMOP_HLP_MIN_486();
7069 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7070}
7071
7072
7073/** Opcode 0x0f 0xcc. */
7074FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7075{
7076 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7077 IEMOP_HLP_MIN_486();
7078 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7079}
7080
7081
7082/** Opcode 0x0f 0xcd. */
7083FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7084{
7085 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7086 IEMOP_HLP_MIN_486();
7087 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7088}
7089
7090
7091/** Opcode 0x0f 0xce. */
7092FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7093{
7094 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7095 IEMOP_HLP_MIN_486();
7096 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7097}
7098
7099
7100/** Opcode 0x0f 0xcf. */
7101FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7102{
7103 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7104 IEMOP_HLP_MIN_486();
7105 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7106}
7107
7108
7109/* Opcode 0x0f 0xd0 - invalid */
7110/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7111FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7112/* Opcode 0xf3 0x0f 0xd0 - invalid */
7113/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7114FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7115
7116/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7117FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7118/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7119FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7120/* Opcode 0xf3 0x0f 0xd1 - invalid */
7121/* Opcode 0xf2 0x0f 0xd1 - invalid */
7122
7123/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7124FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7125/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7126FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7127/* Opcode 0xf3 0x0f 0xd2 - invalid */
7128/* Opcode 0xf2 0x0f 0xd2 - invalid */
7129
7130/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7131FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7132/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7133FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7134/* Opcode 0xf3 0x0f 0xd3 - invalid */
7135/* Opcode 0xf2 0x0f 0xd3 - invalid */
7136
7137/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7138FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7139/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7140FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7141/* Opcode 0xf3 0x0f 0xd4 - invalid */
7142/* Opcode 0xf2 0x0f 0xd4 - invalid */
7143
7144/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7145FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7146/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7147FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7148/* Opcode 0xf3 0x0f 0xd5 - invalid */
7149/* Opcode 0xf2 0x0f 0xd5 - invalid */
7150
7151/* Opcode 0x0f 0xd6 - invalid */
7152/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7153FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7154/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7155FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7156/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7157FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7158#if 0
7159FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7160{
7161 /* Docs says register only. */
7162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7163
7164 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7165 {
7166 case IEM_OP_PRF_SIZE_OP: /* SSE */
7167 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7168 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7169 IEM_MC_BEGIN(2, 0);
7170 IEM_MC_ARG(uint64_t *, pDst, 0);
7171 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7172 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7173 IEM_MC_PREPARE_SSE_USAGE();
7174 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7175 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7176 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7177 IEM_MC_ADVANCE_RIP();
7178 IEM_MC_END();
7179 return VINF_SUCCESS;
7180
7181 case 0: /* MMX */
7182 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7183 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7184 IEM_MC_BEGIN(2, 0);
7185 IEM_MC_ARG(uint64_t *, pDst, 0);
7186 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7187 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7188 IEM_MC_PREPARE_FPU_USAGE();
7189 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7190 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7191 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7192 IEM_MC_ADVANCE_RIP();
7193 IEM_MC_END();
7194 return VINF_SUCCESS;
7195
7196 default:
7197 return IEMOP_RAISE_INVALID_OPCODE();
7198 }
7199}
7200#endif
7201
7202
7203/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7204FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7205{
7206 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7207 /** @todo testcase: Check that the instruction implicitly clears the high
7208 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7209 * and opcode modifications are made to work with the whole width (not
7210 * just 128). */
7211 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
7212 /* Docs says register only. */
7213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7214 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7215 {
7216 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7217 IEM_MC_BEGIN(2, 0);
7218 IEM_MC_ARG(uint64_t *, pDst, 0);
7219 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7220 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7221 IEM_MC_PREPARE_FPU_USAGE();
7222 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7223 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7224 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7225 IEM_MC_ADVANCE_RIP();
7226 IEM_MC_END();
7227 return VINF_SUCCESS;
7228 }
7229 return IEMOP_RAISE_INVALID_OPCODE();
7230}
7231
7232/** Opcode 0x66 0x0f 0xd7 - */
7233FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
7234{
7235 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7236 /** @todo testcase: Check that the instruction implicitly clears the high
7237 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7238 * and opcode modifications are made to work with the whole width (not
7239 * just 128). */
7240 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
7241 /* Docs says register only. */
7242 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7243 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7244 {
7245 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7246 IEM_MC_BEGIN(2, 0);
7247 IEM_MC_ARG(uint64_t *, pDst, 0);
7248 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7249 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7250 IEM_MC_PREPARE_SSE_USAGE();
7251 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7252 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7253 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7254 IEM_MC_ADVANCE_RIP();
7255 IEM_MC_END();
7256 return VINF_SUCCESS;
7257 }
7258 return IEMOP_RAISE_INVALID_OPCODE();
7259}
7260
7261/* Opcode 0xf3 0x0f 0xd7 - invalid */
7262/* Opcode 0xf2 0x0f 0xd7 - invalid */
7263
7264
7265/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7266FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7267/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7268FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7269/* Opcode 0xf3 0x0f 0xd8 - invalid */
7270/* Opcode 0xf2 0x0f 0xd8 - invalid */
7271
7272/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7273FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7274/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7275FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7276/* Opcode 0xf3 0x0f 0xd9 - invalid */
7277/* Opcode 0xf2 0x0f 0xd9 - invalid */
7278
7279/** Opcode 0x0f 0xda - pminub Pq, Qq */
7280FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7281/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7282FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7283/* Opcode 0xf3 0x0f 0xda - invalid */
7284/* Opcode 0xf2 0x0f 0xda - invalid */
7285
7286/** Opcode 0x0f 0xdb - pand Pq, Qq */
7287FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7288/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7289FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7290/* Opcode 0xf3 0x0f 0xdb - invalid */
7291/* Opcode 0xf2 0x0f 0xdb - invalid */
7292
7293/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7294FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7295/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7296FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7297/* Opcode 0xf3 0x0f 0xdc - invalid */
7298/* Opcode 0xf2 0x0f 0xdc - invalid */
7299
7300/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7301FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7302/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7303FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7304/* Opcode 0xf3 0x0f 0xdd - invalid */
7305/* Opcode 0xf2 0x0f 0xdd - invalid */
7306
7307/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7308FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7309/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7310FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7311/* Opcode 0xf3 0x0f 0xde - invalid */
7312/* Opcode 0xf2 0x0f 0xde - invalid */
7313
7314/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7315FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7316/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7317FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7318/* Opcode 0xf3 0x0f 0xdf - invalid */
7319/* Opcode 0xf2 0x0f 0xdf - invalid */
7320
7321/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7322FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7323/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7324FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7325/* Opcode 0xf3 0x0f 0xe0 - invalid */
7326/* Opcode 0xf2 0x0f 0xe0 - invalid */
7327
7328/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7329FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7330/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7331FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7332/* Opcode 0xf3 0x0f 0xe1 - invalid */
7333/* Opcode 0xf2 0x0f 0xe1 - invalid */
7334
7335/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7336FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7337/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7338FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7339/* Opcode 0xf3 0x0f 0xe2 - invalid */
7340/* Opcode 0xf2 0x0f 0xe2 - invalid */
7341
7342/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7343FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7344/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7345FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7346/* Opcode 0xf3 0x0f 0xe3 - invalid */
7347/* Opcode 0xf2 0x0f 0xe3 - invalid */
7348
7349/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7350FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7351/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7352FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7353/* Opcode 0xf3 0x0f 0xe4 - invalid */
7354/* Opcode 0xf2 0x0f 0xe4 - invalid */
7355
7356/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7357FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7358/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7359FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7360/* Opcode 0xf3 0x0f 0xe5 - invalid */
7361/* Opcode 0xf2 0x0f 0xe5 - invalid */
7362
7363/* Opcode 0x0f 0xe6 - invalid */
7364/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7365FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7366/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7367FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7368/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7369FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7370
7371
7372/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
7373FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
7374{
7375 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7376 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7377 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7378 {
7379 /* Register, memory. */
7380 IEM_MC_BEGIN(0, 2);
7381 IEM_MC_LOCAL(uint64_t, uSrc);
7382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7383
7384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7386 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7387 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7388
7389 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7390 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7391
7392 IEM_MC_ADVANCE_RIP();
7393 IEM_MC_END();
7394 return VINF_SUCCESS;
7395 }
7396 /* The register, register encoding is invalid. */
7397 return IEMOP_RAISE_INVALID_OPCODE();
7398}
7399
7400/** Opcode 0x66 0x0f 0xe7 - vmovntdq Mx, Vx */
7401FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
7402{
7403 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7404 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7405 {
7406 /* Register, memory. */
7407 IEMOP_MNEMONIC(vmovntdq_Mx_Vx, "vmovntdq Mx,Vx");
7408 IEM_MC_BEGIN(0, 2);
7409 IEM_MC_LOCAL(uint128_t, uSrc);
7410 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7411
7412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7414 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7415 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7416
7417 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7418 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7419
7420 IEM_MC_ADVANCE_RIP();
7421 IEM_MC_END();
7422 return VINF_SUCCESS;
7423 }
7424
7425 /* The register, register encoding is invalid. */
7426 return IEMOP_RAISE_INVALID_OPCODE();
7427}
7428
7429/* Opcode 0xf3 0x0f 0xe7 - invalid */
7430/* Opcode 0xf2 0x0f 0xe7 - invalid */
7431
7432
7433/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7434FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7435/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7436FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7437/* Opcode 0xf3 0x0f 0xe8 - invalid */
7438/* Opcode 0xf2 0x0f 0xe8 - invalid */
7439
7440/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7441FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7442/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7443FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7444/* Opcode 0xf3 0x0f 0xe9 - invalid */
7445/* Opcode 0xf2 0x0f 0xe9 - invalid */
7446
7447/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7448FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7449/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7450FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7451/* Opcode 0xf3 0x0f 0xea - invalid */
7452/* Opcode 0xf2 0x0f 0xea - invalid */
7453
7454/** Opcode 0x0f 0xeb - por Pq, Qq */
7455FNIEMOP_STUB(iemOp_por_Pq_Qq);
7456/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7457FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7458/* Opcode 0xf3 0x0f 0xeb - invalid */
7459/* Opcode 0xf2 0x0f 0xeb - invalid */
7460
7461/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7462FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7463/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7464FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7465/* Opcode 0xf3 0x0f 0xec - invalid */
7466/* Opcode 0xf2 0x0f 0xec - invalid */
7467
7468/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7469FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7470/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7471FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7472/* Opcode 0xf3 0x0f 0xed - invalid */
7473/* Opcode 0xf2 0x0f 0xed - invalid */
7474
7475/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7476FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7477/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7478FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7479/* Opcode 0xf3 0x0f 0xee - invalid */
7480/* Opcode 0xf2 0x0f 0xee - invalid */
7481
7482
7483/** Opcode 0x0f 0xef - pxor Pq, Qq */
7484FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
7485{
7486 IEMOP_MNEMONIC(pxor, "pxor");
7487 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
7488}
7489
7490/** Opcode 0x66 0x0f 0xef - vpxor Vx, Hx, Wx */
7491FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
7492{
7493 IEMOP_MNEMONIC(vpxor, "vpxor");
7494 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7495}
7496
7497/* Opcode 0xf3 0x0f 0xef - invalid */
7498/* Opcode 0xf2 0x0f 0xef - invalid */
7499
7500/* Opcode 0x0f 0xf0 - invalid */
7501/* Opcode 0x66 0x0f 0xf0 - invalid */
7502/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7503FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7504
7505/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7506FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7507/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7508FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7509/* Opcode 0xf2 0x0f 0xf1 - invalid */
7510
7511/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7512FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7513/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7514FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7515/* Opcode 0xf2 0x0f 0xf2 - invalid */
7516
7517/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7518FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7519/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7520FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7521/* Opcode 0xf2 0x0f 0xf3 - invalid */
7522
7523/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7524FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7525/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7526FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7527/* Opcode 0xf2 0x0f 0xf4 - invalid */
7528
7529/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7530FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7531/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7532FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7533/* Opcode 0xf2 0x0f 0xf5 - invalid */
7534
7535/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7536FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7537/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7538FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7539/* Opcode 0xf2 0x0f 0xf6 - invalid */
7540
7541/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7542FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7543/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7544FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7545/* Opcode 0xf2 0x0f 0xf7 - invalid */
7546
7547/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7548FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7549/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7550FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7551/* Opcode 0xf2 0x0f 0xf8 - invalid */
7552
7553/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7554FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7555/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7556FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7557/* Opcode 0xf2 0x0f 0xf9 - invalid */
7558
7559/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7560FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7561/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7562FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7563/* Opcode 0xf2 0x0f 0xfa - invalid */
7564
7565/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7566FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7567/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7568FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7569/* Opcode 0xf2 0x0f 0xfb - invalid */
7570
7571/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7572FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7573/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7574FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7575/* Opcode 0xf2 0x0f 0xfc - invalid */
7576
7577/** Opcode 0x0f 0xfd - paddw Pq, Qq */
7578FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
7579/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
7580FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
7581/* Opcode 0xf2 0x0f 0xfd - invalid */
7582
7583/** Opcode 0x0f 0xfe - paddd Pq, Qq */
7584FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
7585/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
7586FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
7587/* Opcode 0xf2 0x0f 0xfe - invalid */
7588
7589
7590/** Opcode **** 0x0f 0xff - UD0 */
7591FNIEMOP_DEF(iemOp_ud0)
7592{
7593 IEMOP_MNEMONIC(ud0, "ud0");
7594 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7595 {
7596 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7597#ifndef TST_IEM_CHECK_MC
7598 RTGCPTR GCPtrEff;
7599 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
7600 if (rcStrict != VINF_SUCCESS)
7601 return rcStrict;
7602#endif
7603 IEMOP_HLP_DONE_DECODING();
7604 }
7605 return IEMOP_RAISE_INVALID_OPCODE();
7606}
7607
7608
7609
7610/** Repeats a_fn four times. For decoding tables. */
7611#define IEMOP_X4(a_fn) a_fn, a_fn, a_fn, a_fn
7612
7613/**
7614 * Two byte opcode map, first byte 0x0f.
7615 *
7616 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
7617 * check if it needs updating as well when making changes.
7618 */
7619IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
7620{
7621 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7622 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
7623 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
7624 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
7625 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
7626 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
7627 /* 0x05 */ IEMOP_X4(iemOp_syscall),
7628 /* 0x06 */ IEMOP_X4(iemOp_clts),
7629 /* 0x07 */ IEMOP_X4(iemOp_sysret),
7630 /* 0x08 */ IEMOP_X4(iemOp_invd),
7631 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
7632 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
7633 /* 0x0b */ IEMOP_X4(iemOp_ud2),
7634 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
7635 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
7636 /* 0x0e */ IEMOP_X4(iemOp_femms),
7637 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
7638
7639 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7640 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7641 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7642 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7643 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7644 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7645 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7646 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7647 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
7648 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
7649 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
7650 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
7651 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
7652 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
7653 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
7654 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
7655
7656 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
7657 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
7658 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
7659 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
7660 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
7661 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7662 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
7663 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7664 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7665 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7666 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7667 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, /** @todo split me */
7668 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7669 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7670 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7671 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7672
7673 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
7674 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
7675 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
7676 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
7677 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
7678 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
7679 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
7680 /* 0x37 */ IEMOP_X4(iemOp_getsec),
7681 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
7682 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7683 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
7684 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7685 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7686 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7687 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7688 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7689
7690 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
7691 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
7692 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
7693 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
7694 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
7695 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
7696 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
7697 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
7698 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
7699 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
7700 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
7701 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
7702 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
7703 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
7704 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
7705 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
7706
7707 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7708 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
7709 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7710 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7711 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7712 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7713 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7714 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7715 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
7716 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
7717 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
7718 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
7719 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
7720 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
7721 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
7722 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
7723
7724 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7725 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7726 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7727 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7728 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7729 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7730 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7731 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7732 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7733 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7734 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7735 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7736 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7737 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7738 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7739 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
7740
7741 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
7742 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
7743 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
7744 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
7745 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7746 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7747 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7748 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7749
7750 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7751 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7752 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7753 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7754 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
7755 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
7756 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
7757 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
7758
7759 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
7760 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
7761 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
7762 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
7763 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
7764 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
7765 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
7766 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
7767 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
7768 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
7769 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
7770 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
7771 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
7772 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
7773 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
7774 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
7775
7776 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
7777 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
7778 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
7779 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
7780 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
7781 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
7782 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
7783 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
7784 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
7785 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
7786 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
7787 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
7788 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
7789 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
7790 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
7791 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
7792
7793 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
7794 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
7795 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
7796 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
7797 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
7798 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
7799 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
7800 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7801 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
7802 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
7803 /* 0xaa */ IEMOP_X4(iemOp_rsm),
7804 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
7805 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
7806 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
7807 /* 0xae */ IEMOP_X4(iemOp_Grp15),
7808 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
7809
7810 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
7811 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
7812 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
7813 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
7814 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
7815 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
7816 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
7817 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
7818 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
7819 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
7820 /* 0xba */ IEMOP_X4(iemOp_Grp8),
7821 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
7822 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
7823 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
7824 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
7825 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
7826
7827 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
7828 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
7829 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
7830 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7831 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7832 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7833 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
7834 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
7835 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
7836 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
7837 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
7838 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
7839 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
7840 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
7841 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
7842 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
7843
7844 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
7845 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7846 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7847 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7848 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7849 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7850 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
7851 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7852 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7853 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7854 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7855 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7856 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7857 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7858 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7859 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7860
7861 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7862 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7863 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7864 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7865 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7866 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7867 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
7868 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7869 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7870 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7871 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7872 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7873 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7874 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7875 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7876 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7877
7878 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
7879 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7880 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7881 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7882 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7883 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7884 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7885 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7886 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7887 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7888 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7889 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7890 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7891 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7892 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7893 /* 0xff */ IEMOP_X4(iemOp_ud0),
7894};
7895AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
7896
7897
7898/**
7899 * VEX opcode map \#1.
7900 *
7901 * @remarks This is (currently) a subset of g_apfnTwoByteMap, so please check if
7902 * it it needs updating too when making changes.
7903 */
7904IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
7905{
7906 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7907 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
7908 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
7909 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
7910 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
7911 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
7912 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
7913 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
7914 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
7915 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
7916 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
7917 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
7918 /* 0x0b */ IEMOP_X4(iemOp_InvalidNeedRM),
7919 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
7920 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
7921 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
7922 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
7923
7924 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7925 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7926 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7927 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7928 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7929 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7930 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7931 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7932 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
7933 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
7934 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
7935 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
7936 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
7937 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
7938 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
7939 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
7940
7941 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
7942 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
7943 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
7944 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
7945 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
7946 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
7947 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
7948 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
7949 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7950 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7951 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7952 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, /** @todo split me */
7953 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7954 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7955 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7956 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7957
7958 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
7959 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
7960 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
7961 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
7962 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
7963 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
7964 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
7965 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
7966 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7967 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7968 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7969 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7970 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7971 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7972 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7973 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7974
7975 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
7976 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
7977 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
7978 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
7979 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
7980 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
7981 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
7982 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
7983 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
7984 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
7985 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
7986 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
7987 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
7988 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
7989 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
7990 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
7991
7992 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7993 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
7994 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7995 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7996 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7997 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7998 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7999 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8000 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8001 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8002 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8003 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8004 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8005 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8006 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8007 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8008
8009 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8010 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8011 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8012 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8013 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8014 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8015 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8016 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8017 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8018 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8019 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8020 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8021 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8022 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8023 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8024 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8025
8026 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8027 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_Grp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8028 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_Grp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8029 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_Grp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8030 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8031 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8032 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8033 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8034 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
8035 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
8036 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
8037 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
8038 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8039 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8040 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
8041 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
8042
8043 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
8044 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
8045 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
8046 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
8047 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
8048 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
8049 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
8050 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
8051 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
8052 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
8053 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
8054 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
8055 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
8056 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
8057 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
8058 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
8059 IEMOP_X4(iemOp_InvalidNeedRM),
8060 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
8061 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
8062 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
8063 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
8064 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
8065 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
8066 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
8067 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
8068 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
8069 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
8070 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
8071 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
8072 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
8073 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
8074 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
8075 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
8076
8077 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8078 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8079 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8080 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8081 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8082 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8083 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8084 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8085 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8086 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8087 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
8088 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
8089 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
8090 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
8091 /* 0xae */ IEMOP_X4(iemOp_Grp15), /** @todo groups and vex */
8092 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
8093
8094 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8095 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8096 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8097 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8098 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8099 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8100 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8101 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8102 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8103 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8104 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
8105 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
8106 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
8107 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
8108 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
8109 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
8110
8111 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8112 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8113 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8114 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8115 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8116 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8117 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8118 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8119 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8120 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8121 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
8122 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
8123 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
8124 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
8125 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
8126 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
8127
8128 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8129 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8130 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8131 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8132 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8133 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8134 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8135 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8136 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8137 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8138 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8139 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8140 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8141 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8142 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8143 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8144
8145 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8146 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8147 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8148 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8149 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8150 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8151 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8152 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8153 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8154 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8155 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8156 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8157 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8158 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8159 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8160 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8161
8162 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8163 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8164 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8165 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8166 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8167 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8168 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8169 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8170 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8171 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8172 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8173 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8174 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8175 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8176 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8177 /* 0xff */ IEMOP_X4(iemOp_ud0),
8178};
8179AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8180/** @} */
8181
8182
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette