VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 65780

Last change on this file since 65780 was 65780, checked in by vboxsync, 8 years ago

IEM: Group 12, 13, and 14 jump table size checks.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 306.0 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 65780 2017-02-13 17:40:26Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/** @name ..... opcodes.
26 *
27 * @{
28 */
29
30/** @} */
31
32
33/** @name Two byte opcodes (first byte 0x0f).
34 *
35 * @{
36 */
37
38/** Opcode 0x0f 0x00 /0. */
39FNIEMOPRM_DEF(iemOp_Grp6_sldt)
40{
41 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
42 IEMOP_HLP_MIN_286();
43 IEMOP_HLP_NO_REAL_OR_V86_MODE();
44
45 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
46 {
47 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
48 switch (pVCpu->iem.s.enmEffOpSize)
49 {
50 case IEMMODE_16BIT:
51 IEM_MC_BEGIN(0, 1);
52 IEM_MC_LOCAL(uint16_t, u16Ldtr);
53 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
54 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
55 IEM_MC_ADVANCE_RIP();
56 IEM_MC_END();
57 break;
58
59 case IEMMODE_32BIT:
60 IEM_MC_BEGIN(0, 1);
61 IEM_MC_LOCAL(uint32_t, u32Ldtr);
62 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
63 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
64 IEM_MC_ADVANCE_RIP();
65 IEM_MC_END();
66 break;
67
68 case IEMMODE_64BIT:
69 IEM_MC_BEGIN(0, 1);
70 IEM_MC_LOCAL(uint64_t, u64Ldtr);
71 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
72 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
73 IEM_MC_ADVANCE_RIP();
74 IEM_MC_END();
75 break;
76
77 IEM_NOT_REACHED_DEFAULT_CASE_RET();
78 }
79 }
80 else
81 {
82 IEM_MC_BEGIN(0, 2);
83 IEM_MC_LOCAL(uint16_t, u16Ldtr);
84 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
85 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
86 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
87 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
88 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
89 IEM_MC_ADVANCE_RIP();
90 IEM_MC_END();
91 }
92 return VINF_SUCCESS;
93}
94
95
96/** Opcode 0x0f 0x00 /1. */
97FNIEMOPRM_DEF(iemOp_Grp6_str)
98{
99 IEMOP_MNEMONIC(str, "str Rv/Mw");
100 IEMOP_HLP_MIN_286();
101 IEMOP_HLP_NO_REAL_OR_V86_MODE();
102
103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
104 {
105 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
106 switch (pVCpu->iem.s.enmEffOpSize)
107 {
108 case IEMMODE_16BIT:
109 IEM_MC_BEGIN(0, 1);
110 IEM_MC_LOCAL(uint16_t, u16Tr);
111 IEM_MC_FETCH_TR_U16(u16Tr);
112 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 break;
116
117 case IEMMODE_32BIT:
118 IEM_MC_BEGIN(0, 1);
119 IEM_MC_LOCAL(uint32_t, u32Tr);
120 IEM_MC_FETCH_TR_U32(u32Tr);
121 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
122 IEM_MC_ADVANCE_RIP();
123 IEM_MC_END();
124 break;
125
126 case IEMMODE_64BIT:
127 IEM_MC_BEGIN(0, 1);
128 IEM_MC_LOCAL(uint64_t, u64Tr);
129 IEM_MC_FETCH_TR_U64(u64Tr);
130 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
131 IEM_MC_ADVANCE_RIP();
132 IEM_MC_END();
133 break;
134
135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
136 }
137 }
138 else
139 {
140 IEM_MC_BEGIN(0, 2);
141 IEM_MC_LOCAL(uint16_t, u16Tr);
142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
144 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
145 IEM_MC_FETCH_TR_U16(u16Tr);
146 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
147 IEM_MC_ADVANCE_RIP();
148 IEM_MC_END();
149 }
150 return VINF_SUCCESS;
151}
152
153
154/** Opcode 0x0f 0x00 /2. */
155FNIEMOPRM_DEF(iemOp_Grp6_lldt)
156{
157 IEMOP_MNEMONIC(lldt, "lldt Ew");
158 IEMOP_HLP_MIN_286();
159 IEMOP_HLP_NO_REAL_OR_V86_MODE();
160
161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
162 {
163 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
164 IEM_MC_BEGIN(1, 0);
165 IEM_MC_ARG(uint16_t, u16Sel, 0);
166 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
167 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
168 IEM_MC_END();
169 }
170 else
171 {
172 IEM_MC_BEGIN(1, 1);
173 IEM_MC_ARG(uint16_t, u16Sel, 0);
174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
176 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
177 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
178 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
179 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
180 IEM_MC_END();
181 }
182 return VINF_SUCCESS;
183}
184
185
186/** Opcode 0x0f 0x00 /3. */
187FNIEMOPRM_DEF(iemOp_Grp6_ltr)
188{
189 IEMOP_MNEMONIC(ltr, "ltr Ew");
190 IEMOP_HLP_MIN_286();
191 IEMOP_HLP_NO_REAL_OR_V86_MODE();
192
193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
194 {
195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
196 IEM_MC_BEGIN(1, 0);
197 IEM_MC_ARG(uint16_t, u16Sel, 0);
198 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
199 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
200 IEM_MC_END();
201 }
202 else
203 {
204 IEM_MC_BEGIN(1, 1);
205 IEM_MC_ARG(uint16_t, u16Sel, 0);
206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
209 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
210 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
211 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
212 IEM_MC_END();
213 }
214 return VINF_SUCCESS;
215}
216
217
218/** Opcode 0x0f 0x00 /3. */
219FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
220{
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
223
224 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
225 {
226 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
227 IEM_MC_BEGIN(2, 0);
228 IEM_MC_ARG(uint16_t, u16Sel, 0);
229 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
230 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
231 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
232 IEM_MC_END();
233 }
234 else
235 {
236 IEM_MC_BEGIN(2, 1);
237 IEM_MC_ARG(uint16_t, u16Sel, 0);
238 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
241 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
242 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
243 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
244 IEM_MC_END();
245 }
246 return VINF_SUCCESS;
247}
248
249
250/** Opcode 0x0f 0x00 /4. */
251FNIEMOPRM_DEF(iemOp_Grp6_verr)
252{
253 IEMOP_MNEMONIC(verr, "verr Ew");
254 IEMOP_HLP_MIN_286();
255 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
256}
257
258
259/** Opcode 0x0f 0x00 /5. */
260FNIEMOPRM_DEF(iemOp_Grp6_verw)
261{
262 IEMOP_MNEMONIC(verw, "verw Ew");
263 IEMOP_HLP_MIN_286();
264 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
265}
266
267
268/**
269 * Group 6 jump table.
270 */
271IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
272{
273 iemOp_Grp6_sldt,
274 iemOp_Grp6_str,
275 iemOp_Grp6_lldt,
276 iemOp_Grp6_ltr,
277 iemOp_Grp6_verr,
278 iemOp_Grp6_verw,
279 iemOp_InvalidWithRM,
280 iemOp_InvalidWithRM
281};
282
283/** Opcode 0x0f 0x00. */
284FNIEMOP_DEF(iemOp_Grp6)
285{
286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
287 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
288}
289
290
291/** Opcode 0x0f 0x01 /0. */
292FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
293{
294 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
295 IEMOP_HLP_MIN_286();
296 IEMOP_HLP_64BIT_OP_SIZE();
297 IEM_MC_BEGIN(2, 1);
298 IEM_MC_ARG(uint8_t, iEffSeg, 0);
299 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
302 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
303 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
304 IEM_MC_END();
305 return VINF_SUCCESS;
306}
307
308
309/** Opcode 0x0f 0x01 /0. */
310FNIEMOP_DEF(iemOp_Grp7_vmcall)
311{
312 IEMOP_BITCH_ABOUT_STUB();
313 return IEMOP_RAISE_INVALID_OPCODE();
314}
315
316
317/** Opcode 0x0f 0x01 /0. */
318FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
319{
320 IEMOP_BITCH_ABOUT_STUB();
321 return IEMOP_RAISE_INVALID_OPCODE();
322}
323
324
325/** Opcode 0x0f 0x01 /0. */
326FNIEMOP_DEF(iemOp_Grp7_vmresume)
327{
328 IEMOP_BITCH_ABOUT_STUB();
329 return IEMOP_RAISE_INVALID_OPCODE();
330}
331
332
333/** Opcode 0x0f 0x01 /0. */
334FNIEMOP_DEF(iemOp_Grp7_vmxoff)
335{
336 IEMOP_BITCH_ABOUT_STUB();
337 return IEMOP_RAISE_INVALID_OPCODE();
338}
339
340
341/** Opcode 0x0f 0x01 /1. */
342FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
343{
344 IEMOP_MNEMONIC(sidt, "sidt Ms");
345 IEMOP_HLP_MIN_286();
346 IEMOP_HLP_64BIT_OP_SIZE();
347 IEM_MC_BEGIN(2, 1);
348 IEM_MC_ARG(uint8_t, iEffSeg, 0);
349 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
352 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
353 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
354 IEM_MC_END();
355 return VINF_SUCCESS;
356}
357
358
359/** Opcode 0x0f 0x01 /1. */
360FNIEMOP_DEF(iemOp_Grp7_monitor)
361{
362 IEMOP_MNEMONIC(monitor, "monitor");
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
364 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
365}
366
367
368/** Opcode 0x0f 0x01 /1. */
369FNIEMOP_DEF(iemOp_Grp7_mwait)
370{
371 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
374}
375
376
377/** Opcode 0x0f 0x01 /2. */
378FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
379{
380 IEMOP_MNEMONIC(lgdt, "lgdt");
381 IEMOP_HLP_64BIT_OP_SIZE();
382 IEM_MC_BEGIN(3, 1);
383 IEM_MC_ARG(uint8_t, iEffSeg, 0);
384 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
385 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
389 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
390 IEM_MC_END();
391 return VINF_SUCCESS;
392}
393
394
395/** Opcode 0x0f 0x01 0xd0. */
396FNIEMOP_DEF(iemOp_Grp7_xgetbv)
397{
398 IEMOP_MNEMONIC(xgetbv, "xgetbv");
399 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
400 {
401 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
402 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
403 }
404 return IEMOP_RAISE_INVALID_OPCODE();
405}
406
407
408/** Opcode 0x0f 0x01 0xd1. */
409FNIEMOP_DEF(iemOp_Grp7_xsetbv)
410{
411 IEMOP_MNEMONIC(xsetbv, "xsetbv");
412 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
413 {
414 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
416 }
417 return IEMOP_RAISE_INVALID_OPCODE();
418}
419
420
421/** Opcode 0x0f 0x01 /3. */
422FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
423{
424 IEMOP_MNEMONIC(lidt, "lidt");
425 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
426 ? IEMMODE_64BIT
427 : pVCpu->iem.s.enmEffOpSize;
428 IEM_MC_BEGIN(3, 1);
429 IEM_MC_ARG(uint8_t, iEffSeg, 0);
430 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
431 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
434 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
435 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
436 IEM_MC_END();
437 return VINF_SUCCESS;
438}
439
440
441/** Opcode 0x0f 0x01 0xd8. */
442FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
443
444/** Opcode 0x0f 0x01 0xd9. */
445FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
446
447/** Opcode 0x0f 0x01 0xda. */
448FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
449
450/** Opcode 0x0f 0x01 0xdb. */
451FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
452
453/** Opcode 0x0f 0x01 0xdc. */
454FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
455
456/** Opcode 0x0f 0x01 0xdd. */
457FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
458
459/** Opcode 0x0f 0x01 0xde. */
460FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
461
462/** Opcode 0x0f 0x01 0xdf. */
463FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
464
465/** Opcode 0x0f 0x01 /4. */
466FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
467{
468 IEMOP_MNEMONIC(smsw, "smsw");
469 IEMOP_HLP_MIN_286();
470 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
471 {
472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
473 switch (pVCpu->iem.s.enmEffOpSize)
474 {
475 case IEMMODE_16BIT:
476 IEM_MC_BEGIN(0, 1);
477 IEM_MC_LOCAL(uint16_t, u16Tmp);
478 IEM_MC_FETCH_CR0_U16(u16Tmp);
479 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
480 { /* likely */ }
481 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
482 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
483 else
484 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
485 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
486 IEM_MC_ADVANCE_RIP();
487 IEM_MC_END();
488 return VINF_SUCCESS;
489
490 case IEMMODE_32BIT:
491 IEM_MC_BEGIN(0, 1);
492 IEM_MC_LOCAL(uint32_t, u32Tmp);
493 IEM_MC_FETCH_CR0_U32(u32Tmp);
494 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
495 IEM_MC_ADVANCE_RIP();
496 IEM_MC_END();
497 return VINF_SUCCESS;
498
499 case IEMMODE_64BIT:
500 IEM_MC_BEGIN(0, 1);
501 IEM_MC_LOCAL(uint64_t, u64Tmp);
502 IEM_MC_FETCH_CR0_U64(u64Tmp);
503 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
504 IEM_MC_ADVANCE_RIP();
505 IEM_MC_END();
506 return VINF_SUCCESS;
507
508 IEM_NOT_REACHED_DEFAULT_CASE_RET();
509 }
510 }
511 else
512 {
513 /* Ignore operand size here, memory refs are always 16-bit. */
514 IEM_MC_BEGIN(0, 2);
515 IEM_MC_LOCAL(uint16_t, u16Tmp);
516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
519 IEM_MC_FETCH_CR0_U16(u16Tmp);
520 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
521 { /* likely */ }
522 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
523 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
524 else
525 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
526 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
527 IEM_MC_ADVANCE_RIP();
528 IEM_MC_END();
529 return VINF_SUCCESS;
530 }
531}
532
533
534/** Opcode 0x0f 0x01 /6. */
535FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
536{
537 /* The operand size is effectively ignored, all is 16-bit and only the
538 lower 3-bits are used. */
539 IEMOP_MNEMONIC(lmsw, "lmsw");
540 IEMOP_HLP_MIN_286();
541 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
542 {
543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
544 IEM_MC_BEGIN(1, 0);
545 IEM_MC_ARG(uint16_t, u16Tmp, 0);
546 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
547 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
548 IEM_MC_END();
549 }
550 else
551 {
552 IEM_MC_BEGIN(1, 1);
553 IEM_MC_ARG(uint16_t, u16Tmp, 0);
554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
557 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
558 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
559 IEM_MC_END();
560 }
561 return VINF_SUCCESS;
562}
563
564
565/** Opcode 0x0f 0x01 /7. */
566FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
567{
568 IEMOP_MNEMONIC(invlpg, "invlpg");
569 IEMOP_HLP_MIN_486();
570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
571 IEM_MC_BEGIN(1, 1);
572 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
574 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
575 IEM_MC_END();
576 return VINF_SUCCESS;
577}
578
579
580/** Opcode 0x0f 0x01 /7. */
581FNIEMOP_DEF(iemOp_Grp7_swapgs)
582{
583 IEMOP_MNEMONIC(swapgs, "swapgs");
584 IEMOP_HLP_ONLY_64BIT();
585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
586 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
587}
588
589
590/** Opcode 0x0f 0x01 /7. */
591FNIEMOP_DEF(iemOp_Grp7_rdtscp)
592{
593 NOREF(pVCpu);
594 IEMOP_BITCH_ABOUT_STUB();
595 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
596}
597
598
599/**
600 * Group 7 jump table, memory variant.
601 */
602IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
603{
604 iemOp_Grp7_sgdt,
605 iemOp_Grp7_sidt,
606 iemOp_Grp7_lgdt,
607 iemOp_Grp7_lidt,
608 iemOp_Grp7_smsw,
609 iemOp_InvalidWithRM,
610 iemOp_Grp7_lmsw,
611 iemOp_Grp7_invlpg
612};
613
614
615/** Opcode 0x0f 0x01. */
616FNIEMOP_DEF(iemOp_Grp7)
617{
618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
619 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
620 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
621
622 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
623 {
624 case 0:
625 switch (bRm & X86_MODRM_RM_MASK)
626 {
627 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
628 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
629 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
630 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
631 }
632 return IEMOP_RAISE_INVALID_OPCODE();
633
634 case 1:
635 switch (bRm & X86_MODRM_RM_MASK)
636 {
637 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
638 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
639 }
640 return IEMOP_RAISE_INVALID_OPCODE();
641
642 case 2:
643 switch (bRm & X86_MODRM_RM_MASK)
644 {
645 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
646 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
647 }
648 return IEMOP_RAISE_INVALID_OPCODE();
649
650 case 3:
651 switch (bRm & X86_MODRM_RM_MASK)
652 {
653 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
654 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
655 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
656 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
657 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
658 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
659 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
660 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
661 IEM_NOT_REACHED_DEFAULT_CASE_RET();
662 }
663
664 case 4:
665 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
666
667 case 5:
668 return IEMOP_RAISE_INVALID_OPCODE();
669
670 case 6:
671 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
672
673 case 7:
674 switch (bRm & X86_MODRM_RM_MASK)
675 {
676 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
677 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
678 }
679 return IEMOP_RAISE_INVALID_OPCODE();
680
681 IEM_NOT_REACHED_DEFAULT_CASE_RET();
682 }
683}
684
685/** Opcode 0x0f 0x00 /3. */
686FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
687{
688 IEMOP_HLP_NO_REAL_OR_V86_MODE();
689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
690
691 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
692 {
693 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
694 switch (pVCpu->iem.s.enmEffOpSize)
695 {
696 case IEMMODE_16BIT:
697 {
698 IEM_MC_BEGIN(3, 0);
699 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
700 IEM_MC_ARG(uint16_t, u16Sel, 1);
701 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
702
703 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
704 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
705 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
706
707 IEM_MC_END();
708 return VINF_SUCCESS;
709 }
710
711 case IEMMODE_32BIT:
712 case IEMMODE_64BIT:
713 {
714 IEM_MC_BEGIN(3, 0);
715 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
716 IEM_MC_ARG(uint16_t, u16Sel, 1);
717 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
718
719 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
720 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
721 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
722
723 IEM_MC_END();
724 return VINF_SUCCESS;
725 }
726
727 IEM_NOT_REACHED_DEFAULT_CASE_RET();
728 }
729 }
730 else
731 {
732 switch (pVCpu->iem.s.enmEffOpSize)
733 {
734 case IEMMODE_16BIT:
735 {
736 IEM_MC_BEGIN(3, 1);
737 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
738 IEM_MC_ARG(uint16_t, u16Sel, 1);
739 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
741
742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
743 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
744
745 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
746 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
747 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
748
749 IEM_MC_END();
750 return VINF_SUCCESS;
751 }
752
753 case IEMMODE_32BIT:
754 case IEMMODE_64BIT:
755 {
756 IEM_MC_BEGIN(3, 1);
757 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
758 IEM_MC_ARG(uint16_t, u16Sel, 1);
759 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
761
762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
763 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
764/** @todo testcase: make sure it's a 16-bit read. */
765
766 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
767 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
768 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
769
770 IEM_MC_END();
771 return VINF_SUCCESS;
772 }
773
774 IEM_NOT_REACHED_DEFAULT_CASE_RET();
775 }
776 }
777}
778
779
780
781/** Opcode 0x0f 0x02. */
782FNIEMOP_DEF(iemOp_lar_Gv_Ew)
783{
784 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
785 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
786}
787
788
789/** Opcode 0x0f 0x03. */
790FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
791{
792 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
793 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
794}
795
796
797/** Opcode 0x0f 0x05. */
798FNIEMOP_DEF(iemOp_syscall)
799{
800 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
802 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
803}
804
805
806/** Opcode 0x0f 0x06. */
807FNIEMOP_DEF(iemOp_clts)
808{
809 IEMOP_MNEMONIC(clts, "clts");
810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
811 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
812}
813
814
815/** Opcode 0x0f 0x07. */
816FNIEMOP_DEF(iemOp_sysret)
817{
818 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
820 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
821}
822
823
824/** Opcode 0x0f 0x08. */
825FNIEMOP_STUB(iemOp_invd);
826// IEMOP_HLP_MIN_486();
827
828
829/** Opcode 0x0f 0x09. */
830FNIEMOP_DEF(iemOp_wbinvd)
831{
832 IEMOP_MNEMONIC(wbinvd, "wbinvd");
833 IEMOP_HLP_MIN_486();
834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
835 IEM_MC_BEGIN(0, 0);
836 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
837 IEM_MC_ADVANCE_RIP();
838 IEM_MC_END();
839 return VINF_SUCCESS; /* ignore for now */
840}
841
842
843/** Opcode 0x0f 0x0b. */
844FNIEMOP_DEF(iemOp_ud2)
845{
846 IEMOP_MNEMONIC(ud2, "ud2");
847 return IEMOP_RAISE_INVALID_OPCODE();
848}
849
850/** Opcode 0x0f 0x0d. */
851FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
852{
853 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
854 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
855 {
856 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
857 return IEMOP_RAISE_INVALID_OPCODE();
858 }
859
860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
861 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
862 {
863 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
864 return IEMOP_RAISE_INVALID_OPCODE();
865 }
866
867 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
868 {
869 case 2: /* Aliased to /0 for the time being. */
870 case 4: /* Aliased to /0 for the time being. */
871 case 5: /* Aliased to /0 for the time being. */
872 case 6: /* Aliased to /0 for the time being. */
873 case 7: /* Aliased to /0 for the time being. */
874 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
875 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
876 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
878 }
879
880 IEM_MC_BEGIN(0, 1);
881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
884 /* Currently a NOP. */
885 NOREF(GCPtrEffSrc);
886 IEM_MC_ADVANCE_RIP();
887 IEM_MC_END();
888 return VINF_SUCCESS;
889}
890
891
892/** Opcode 0x0f 0x0e. */
893FNIEMOP_STUB(iemOp_femms);
894
895
896/** Opcode 0x0f 0x0f 0x0c. */
897FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
898
899/** Opcode 0x0f 0x0f 0x0d. */
900FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
901
902/** Opcode 0x0f 0x0f 0x1c. */
903FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
904
905/** Opcode 0x0f 0x0f 0x1d. */
906FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
907
908/** Opcode 0x0f 0x0f 0x8a. */
909FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
910
911/** Opcode 0x0f 0x0f 0x8e. */
912FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
913
914/** Opcode 0x0f 0x0f 0x90. */
915FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
916
917/** Opcode 0x0f 0x0f 0x94. */
918FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
919
920/** Opcode 0x0f 0x0f 0x96. */
921FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
922
923/** Opcode 0x0f 0x0f 0x97. */
924FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
925
926/** Opcode 0x0f 0x0f 0x9a. */
927FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
928
929/** Opcode 0x0f 0x0f 0x9e. */
930FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
931
932/** Opcode 0x0f 0x0f 0xa0. */
933FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
934
935/** Opcode 0x0f 0x0f 0xa4. */
936FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
937
938/** Opcode 0x0f 0x0f 0xa6. */
939FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
940
941/** Opcode 0x0f 0x0f 0xa7. */
942FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
943
944/** Opcode 0x0f 0x0f 0xaa. */
945FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
946
947/** Opcode 0x0f 0x0f 0xae. */
948FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
949
950/** Opcode 0x0f 0x0f 0xb0. */
951FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
952
953/** Opcode 0x0f 0x0f 0xb4. */
954FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
955
956/** Opcode 0x0f 0x0f 0xb6. */
957FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
958
959/** Opcode 0x0f 0x0f 0xb7. */
960FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
961
962/** Opcode 0x0f 0x0f 0xbb. */
963FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
964
965/** Opcode 0x0f 0x0f 0xbf. */
966FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
967
968
969/** Opcode 0x0f 0x0f. */
970FNIEMOP_DEF(iemOp_3Dnow)
971{
972 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
973 {
974 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
975 return IEMOP_RAISE_INVALID_OPCODE();
976 }
977
978 /* This is pretty sparse, use switch instead of table. */
979 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
980 switch (b)
981 {
982 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
983 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
984 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
985 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
986 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
987 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
988 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
989 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
990 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
991 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
992 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
993 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
994 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
995 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
996 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
997 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
998 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
999 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1000 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1001 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1002 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1003 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1004 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1005 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1006 default:
1007 return IEMOP_RAISE_INVALID_OPCODE();
1008 }
1009}
1010
1011
1012/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1013FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1014/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1015FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1016/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1017FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1018/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1019FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1020
1021
1022/** Opcode 0x0f 0x11 - vmovups Wps, Vps */
1023FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1024{
1025 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1027 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1028 {
1029 /*
1030 * Register, register.
1031 */
1032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1033 IEM_MC_BEGIN(0, 0);
1034 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1035 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1036 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1037 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1038 IEM_MC_ADVANCE_RIP();
1039 IEM_MC_END();
1040 }
1041 else
1042 {
1043 /*
1044 * Memory, register.
1045 */
1046 IEM_MC_BEGIN(0, 2);
1047 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1049
1050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1052 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1053 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1054
1055 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1056 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1057
1058 IEM_MC_ADVANCE_RIP();
1059 IEM_MC_END();
1060 }
1061 return VINF_SUCCESS;
1062}
1063
1064
1065/** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */
1066FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
1067
1068/** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */
1069FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
1070
1071/** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */
1072FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1073{
1074 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1076 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1077 {
1078 /*
1079 * Register, register.
1080 */
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_BEGIN(0, 1);
1083 IEM_MC_LOCAL(uint64_t, uSrc);
1084
1085 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1086 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1087 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1088 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1089
1090 IEM_MC_ADVANCE_RIP();
1091 IEM_MC_END();
1092 }
1093 else
1094 {
1095 /*
1096 * Memory, register.
1097 */
1098 IEM_MC_BEGIN(0, 2);
1099 IEM_MC_LOCAL(uint64_t, uSrc);
1100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1101
1102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1104 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1105 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1106
1107 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1108 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1109
1110 IEM_MC_ADVANCE_RIP();
1111 IEM_MC_END();
1112 }
1113 return VINF_SUCCESS;
1114}
1115
1116
1117/** Opcode 0x0f 0x12. */
1118FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1119
1120/** Opcode 0x66 0x0f 0x12. */
1121FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1122
1123/** Opcode 0xf3 0x0f 0x12. */
1124FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1125
1126/** Opcode 0xf2 0x0f 0x12. */
1127FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1128
1129/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1130FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1131
1132/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1133FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1134{
1135 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1136 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1137 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1138 {
1139#if 0
1140 /*
1141 * Register, register.
1142 */
1143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1144 IEM_MC_BEGIN(0, 1);
1145 IEM_MC_LOCAL(uint64_t, uSrc);
1146 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1147 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1148 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1149 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1150 IEM_MC_ADVANCE_RIP();
1151 IEM_MC_END();
1152#else
1153 return IEMOP_RAISE_INVALID_OPCODE();
1154#endif
1155 }
1156 else
1157 {
1158 /*
1159 * Memory, register.
1160 */
1161 IEM_MC_BEGIN(0, 2);
1162 IEM_MC_LOCAL(uint64_t, uSrc);
1163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1164
1165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1167 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1168 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1169
1170 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1171 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1172
1173 IEM_MC_ADVANCE_RIP();
1174 IEM_MC_END();
1175 }
1176 return VINF_SUCCESS;
1177}
1178
1179/* Opcode 0xf3 0x0f 0x13 - invalid */
1180/* Opcode 0xf2 0x0f 0x13 - invalid */
1181
1182/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1183FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1184/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1185FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1186/* Opcode 0xf3 0x0f 0x14 - invalid */
1187/* Opcode 0xf2 0x0f 0x14 - invalid */
1188/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1189FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1190/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1191FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1192/* Opcode 0xf3 0x0f 0x15 - invalid */
1193/* Opcode 0xf2 0x0f 0x15 - invalid */
1194/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1195FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1196/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1197FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1198/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1199FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1200/* Opcode 0xf2 0x0f 0x16 - invalid */
1201/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1202FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1203/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1204FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1205/* Opcode 0xf3 0x0f 0x17 - invalid */
1206/* Opcode 0xf2 0x0f 0x17 - invalid */
1207
1208
1209/** Opcode 0x0f 0x18. */
1210FNIEMOP_DEF(iemOp_prefetch_Grp16)
1211{
1212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1213 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1214 {
1215 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1216 {
1217 case 4: /* Aliased to /0 for the time being according to AMD. */
1218 case 5: /* Aliased to /0 for the time being according to AMD. */
1219 case 6: /* Aliased to /0 for the time being according to AMD. */
1220 case 7: /* Aliased to /0 for the time being according to AMD. */
1221 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1222 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1223 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1224 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1226 }
1227
1228 IEM_MC_BEGIN(0, 1);
1229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1232 /* Currently a NOP. */
1233 NOREF(GCPtrEffSrc);
1234 IEM_MC_ADVANCE_RIP();
1235 IEM_MC_END();
1236 return VINF_SUCCESS;
1237 }
1238
1239 return IEMOP_RAISE_INVALID_OPCODE();
1240}
1241
1242
1243/** Opcode 0x0f 0x19..0x1f. */
1244FNIEMOP_DEF(iemOp_nop_Ev)
1245{
1246 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1247 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1248 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1249 {
1250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1251 IEM_MC_BEGIN(0, 0);
1252 IEM_MC_ADVANCE_RIP();
1253 IEM_MC_END();
1254 }
1255 else
1256 {
1257 IEM_MC_BEGIN(0, 1);
1258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1261 /* Currently a NOP. */
1262 NOREF(GCPtrEffSrc);
1263 IEM_MC_ADVANCE_RIP();
1264 IEM_MC_END();
1265 }
1266 return VINF_SUCCESS;
1267}
1268
1269
1270/** Opcode 0x0f 0x20. */
1271FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1272{
1273 /* mod is ignored, as is operand size overrides. */
1274 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1275 IEMOP_HLP_MIN_386();
1276 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1277 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1278 else
1279 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1280
1281 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1282 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1283 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1284 {
1285 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1286 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1287 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1288 iCrReg |= 8;
1289 }
1290 switch (iCrReg)
1291 {
1292 case 0: case 2: case 3: case 4: case 8:
1293 break;
1294 default:
1295 return IEMOP_RAISE_INVALID_OPCODE();
1296 }
1297 IEMOP_HLP_DONE_DECODING();
1298
1299 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1300}
1301
1302
1303/** Opcode 0x0f 0x21. */
1304FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1305{
1306 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1307 IEMOP_HLP_MIN_386();
1308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1310 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1311 return IEMOP_RAISE_INVALID_OPCODE();
1312 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1313 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1314 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1315}
1316
1317
1318/** Opcode 0x0f 0x22. */
1319FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1320{
1321 /* mod is ignored, as is operand size overrides. */
1322 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1323 IEMOP_HLP_MIN_386();
1324 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1325 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1326 else
1327 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1328
1329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1330 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1331 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1332 {
1333 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1334 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1335 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1336 iCrReg |= 8;
1337 }
1338 switch (iCrReg)
1339 {
1340 case 0: case 2: case 3: case 4: case 8:
1341 break;
1342 default:
1343 return IEMOP_RAISE_INVALID_OPCODE();
1344 }
1345 IEMOP_HLP_DONE_DECODING();
1346
1347 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1348}
1349
1350
1351/** Opcode 0x0f 0x23. */
1352FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1353{
1354 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1355 IEMOP_HLP_MIN_386();
1356 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1358 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1359 return IEMOP_RAISE_INVALID_OPCODE();
1360 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1361 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1362 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1363}
1364
1365
1366/** Opcode 0x0f 0x24. */
1367FNIEMOP_DEF(iemOp_mov_Rd_Td)
1368{
1369 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1370 /** @todo works on 386 and 486. */
1371 /* The RM byte is not considered, see testcase. */
1372 return IEMOP_RAISE_INVALID_OPCODE();
1373}
1374
1375
1376/** Opcode 0x0f 0x26. */
1377FNIEMOP_DEF(iemOp_mov_Td_Rd)
1378{
1379 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1380 /** @todo works on 386 and 486. */
1381 /* The RM byte is not considered, see testcase. */
1382 return IEMOP_RAISE_INVALID_OPCODE();
1383}
1384
1385
1386/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
1387FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1388{
1389 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1391 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1392 {
1393 /*
1394 * Register, register.
1395 */
1396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1397 IEM_MC_BEGIN(0, 0);
1398 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1399 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1400 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1401 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1402 IEM_MC_ADVANCE_RIP();
1403 IEM_MC_END();
1404 }
1405 else
1406 {
1407 /*
1408 * Register, memory.
1409 */
1410 IEM_MC_BEGIN(0, 2);
1411 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1413
1414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1416 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1417 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1418
1419 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1420 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1421
1422 IEM_MC_ADVANCE_RIP();
1423 IEM_MC_END();
1424 }
1425 return VINF_SUCCESS;
1426}
1427
1428/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
1429FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1430{
1431 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1433 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1434 {
1435 /*
1436 * Register, register.
1437 */
1438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1439 IEM_MC_BEGIN(0, 0);
1440 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1441 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1442 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1443 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1444 IEM_MC_ADVANCE_RIP();
1445 IEM_MC_END();
1446 }
1447 else
1448 {
1449 /*
1450 * Register, memory.
1451 */
1452 IEM_MC_BEGIN(0, 2);
1453 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1455
1456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1458 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1459 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1460
1461 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1462 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1463
1464 IEM_MC_ADVANCE_RIP();
1465 IEM_MC_END();
1466 }
1467 return VINF_SUCCESS;
1468}
1469
1470/* Opcode 0xf3 0x0f 0x28 - invalid */
1471/* Opcode 0xf2 0x0f 0x28 - invalid */
1472
1473/** Opcode 0x0f 0x29 - vmovaps Wps, Vps */
1474FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1475{
1476 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1477 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1478 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1479 {
1480 /*
1481 * Register, register.
1482 */
1483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1484 IEM_MC_BEGIN(0, 0);
1485 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1486 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1487 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1488 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1489 IEM_MC_ADVANCE_RIP();
1490 IEM_MC_END();
1491 }
1492 else
1493 {
1494 /*
1495 * Memory, register.
1496 */
1497 IEM_MC_BEGIN(0, 2);
1498 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1500
1501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1503 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1504 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1505
1506 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1507 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1508
1509 IEM_MC_ADVANCE_RIP();
1510 IEM_MC_END();
1511 }
1512 return VINF_SUCCESS;
1513}
1514
1515/** Opcode 0x66 0x0f 0x29 - vmovapd Wpd,Vpd */
1516FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
1517{
1518 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1521 {
1522 /*
1523 * Register, register.
1524 */
1525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1526 IEM_MC_BEGIN(0, 0);
1527 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1528 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1529 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1530 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1531 IEM_MC_ADVANCE_RIP();
1532 IEM_MC_END();
1533 }
1534 else
1535 {
1536 /*
1537 * Memory, register.
1538 */
1539 IEM_MC_BEGIN(0, 2);
1540 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1542
1543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1545 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1546 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1547
1548 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1549 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1550
1551 IEM_MC_ADVANCE_RIP();
1552 IEM_MC_END();
1553 }
1554 return VINF_SUCCESS;
1555}
1556
1557/* Opcode 0xf3 0x0f 0x29 - invalid */
1558/* Opcode 0xf2 0x0f 0x29 - invalid */
1559
1560
1561/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
1562FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
1563/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
1564FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
1565/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
1566FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
1567/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1568FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
1569
1570
1571/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
1572FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
1573{
1574 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1576 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1577 {
1578 /*
1579 * memory, register.
1580 */
1581 IEM_MC_BEGIN(0, 2);
1582 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1584
1585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1587 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1588 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1589
1590 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1591 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1592
1593 IEM_MC_ADVANCE_RIP();
1594 IEM_MC_END();
1595 }
1596 /* The register, register encoding is invalid. */
1597 else
1598 return IEMOP_RAISE_INVALID_OPCODE();
1599 return VINF_SUCCESS;
1600}
1601
1602/** Opcode 0x66 0x0f 0x2b - vmovntpd Mpd, Vpd */
1603FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
1604{
1605 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1607 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1608 {
1609 /*
1610 * memory, register.
1611 */
1612 IEM_MC_BEGIN(0, 2);
1613 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1615
1616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1618 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1619 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1620
1621 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1622 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1623
1624 IEM_MC_ADVANCE_RIP();
1625 IEM_MC_END();
1626 }
1627 /* The register, register encoding is invalid. */
1628 else
1629 return IEMOP_RAISE_INVALID_OPCODE();
1630 return VINF_SUCCESS;
1631}
1632/* Opcode 0xf3 0x0f 0x2b - invalid */
1633/* Opcode 0xf2 0x0f 0x2b - invalid */
1634
1635
1636/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
1637FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
1638/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
1639FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
1640/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
1641FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
1642/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
1643FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
1644
1645/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
1646FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
1647/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
1648FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
1649/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
1650FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
1651/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
1652FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
1653
1654/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
1655FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
1656/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
1657FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
1658/* Opcode 0xf3 0x0f 0x2e - invalid */
1659/* Opcode 0xf2 0x0f 0x2e - invalid */
1660
1661/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
1662FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
1663/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
1664FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
1665/* Opcode 0xf3 0x0f 0x2f - invalid */
1666/* Opcode 0xf2 0x0f 0x2f - invalid */
1667
1668/** Opcode 0x0f 0x30. */
1669FNIEMOP_DEF(iemOp_wrmsr)
1670{
1671 IEMOP_MNEMONIC(wrmsr, "wrmsr");
1672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1673 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1674}
1675
1676
1677/** Opcode 0x0f 0x31. */
1678FNIEMOP_DEF(iemOp_rdtsc)
1679{
1680 IEMOP_MNEMONIC(rdtsc, "rdtsc");
1681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1682 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1683}
1684
1685
1686/** Opcode 0x0f 0x33. */
1687FNIEMOP_DEF(iemOp_rdmsr)
1688{
1689 IEMOP_MNEMONIC(rdmsr, "rdmsr");
1690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1691 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1692}
1693
1694
1695/** Opcode 0x0f 0x34. */
1696FNIEMOP_STUB(iemOp_rdpmc);
1697/** Opcode 0x0f 0x34. */
1698FNIEMOP_STUB(iemOp_sysenter);
1699/** Opcode 0x0f 0x35. */
1700FNIEMOP_STUB(iemOp_sysexit);
1701/** Opcode 0x0f 0x37. */
1702FNIEMOP_STUB(iemOp_getsec);
1703/** Opcode 0x0f 0x38. */
1704FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1705/** Opcode 0x0f 0x3a. */
1706FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1707
1708
1709/**
1710 * Implements a conditional move.
1711 *
1712 * Wish there was an obvious way to do this where we could share and reduce
1713 * code bloat.
1714 *
1715 * @param a_Cnd The conditional "microcode" operation.
1716 */
1717#define CMOV_X(a_Cnd) \
1718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1719 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1720 { \
1721 switch (pVCpu->iem.s.enmEffOpSize) \
1722 { \
1723 case IEMMODE_16BIT: \
1724 IEM_MC_BEGIN(0, 1); \
1725 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1726 a_Cnd { \
1727 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1728 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
1729 } IEM_MC_ENDIF(); \
1730 IEM_MC_ADVANCE_RIP(); \
1731 IEM_MC_END(); \
1732 return VINF_SUCCESS; \
1733 \
1734 case IEMMODE_32BIT: \
1735 IEM_MC_BEGIN(0, 1); \
1736 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1737 a_Cnd { \
1738 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1739 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
1740 } IEM_MC_ELSE() { \
1741 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
1742 } IEM_MC_ENDIF(); \
1743 IEM_MC_ADVANCE_RIP(); \
1744 IEM_MC_END(); \
1745 return VINF_SUCCESS; \
1746 \
1747 case IEMMODE_64BIT: \
1748 IEM_MC_BEGIN(0, 1); \
1749 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1750 a_Cnd { \
1751 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1752 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
1753 } IEM_MC_ENDIF(); \
1754 IEM_MC_ADVANCE_RIP(); \
1755 IEM_MC_END(); \
1756 return VINF_SUCCESS; \
1757 \
1758 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1759 } \
1760 } \
1761 else \
1762 { \
1763 switch (pVCpu->iem.s.enmEffOpSize) \
1764 { \
1765 case IEMMODE_16BIT: \
1766 IEM_MC_BEGIN(0, 2); \
1767 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1768 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1770 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1771 a_Cnd { \
1772 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
1773 } IEM_MC_ENDIF(); \
1774 IEM_MC_ADVANCE_RIP(); \
1775 IEM_MC_END(); \
1776 return VINF_SUCCESS; \
1777 \
1778 case IEMMODE_32BIT: \
1779 IEM_MC_BEGIN(0, 2); \
1780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1781 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1783 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1784 a_Cnd { \
1785 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
1786 } IEM_MC_ELSE() { \
1787 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
1788 } IEM_MC_ENDIF(); \
1789 IEM_MC_ADVANCE_RIP(); \
1790 IEM_MC_END(); \
1791 return VINF_SUCCESS; \
1792 \
1793 case IEMMODE_64BIT: \
1794 IEM_MC_BEGIN(0, 2); \
1795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1796 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1798 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1799 a_Cnd { \
1800 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
1801 } IEM_MC_ENDIF(); \
1802 IEM_MC_ADVANCE_RIP(); \
1803 IEM_MC_END(); \
1804 return VINF_SUCCESS; \
1805 \
1806 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1807 } \
1808 } do {} while (0)
1809
1810
1811
1812/** Opcode 0x0f 0x40. */
1813FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1814{
1815 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
1816 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1817}
1818
1819
1820/** Opcode 0x0f 0x41. */
1821FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1822{
1823 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
1824 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1825}
1826
1827
1828/** Opcode 0x0f 0x42. */
1829FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1830{
1831 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
1832 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1833}
1834
1835
1836/** Opcode 0x0f 0x43. */
1837FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1838{
1839 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
1840 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1841}
1842
1843
1844/** Opcode 0x0f 0x44. */
1845FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1846{
1847 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
1848 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1849}
1850
1851
1852/** Opcode 0x0f 0x45. */
1853FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1854{
1855 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
1856 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1857}
1858
1859
1860/** Opcode 0x0f 0x46. */
1861FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1862{
1863 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
1864 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1865}
1866
1867
1868/** Opcode 0x0f 0x47. */
1869FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1870{
1871 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
1872 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1873}
1874
1875
1876/** Opcode 0x0f 0x48. */
1877FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1878{
1879 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
1880 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1881}
1882
1883
1884/** Opcode 0x0f 0x49. */
1885FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1886{
1887 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
1888 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1889}
1890
1891
1892/** Opcode 0x0f 0x4a. */
1893FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1894{
1895 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
1896 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1897}
1898
1899
1900/** Opcode 0x0f 0x4b. */
1901FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1902{
1903 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
1904 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1905}
1906
1907
1908/** Opcode 0x0f 0x4c. */
1909FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1910{
1911 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
1912 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1913}
1914
1915
1916/** Opcode 0x0f 0x4d. */
1917FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1918{
1919 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
1920 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1921}
1922
1923
1924/** Opcode 0x0f 0x4e. */
1925FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1926{
1927 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
1928 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1929}
1930
1931
1932/** Opcode 0x0f 0x4f. */
1933FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1934{
1935 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
1936 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1937}
1938
1939#undef CMOV_X
1940
1941/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
1942FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
1943/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
1944FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
1945/* Opcode 0xf3 0x0f 0x50 - invalid */
1946/* Opcode 0xf2 0x0f 0x50 - invalid */
1947
1948/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
1949FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
1950/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
1951FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
1952/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
1953FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
1954/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
1955FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
1956
1957/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
1958FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
1959/* Opcode 0x66 0x0f 0x52 - invalid */
1960/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
1961FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
1962/* Opcode 0xf2 0x0f 0x52 - invalid */
1963
1964/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
1965FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
1966/* Opcode 0x66 0x0f 0x53 - invalid */
1967/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
1968FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
1969/* Opcode 0xf2 0x0f 0x53 - invalid */
1970
1971/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
1972FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
1973/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
1974FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
1975/* Opcode 0xf3 0x0f 0x54 - invalid */
1976/* Opcode 0xf2 0x0f 0x54 - invalid */
1977
1978/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
1979FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
1980/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
1981FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
1982/* Opcode 0xf3 0x0f 0x55 - invalid */
1983/* Opcode 0xf2 0x0f 0x55 - invalid */
1984
1985/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
1986FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
1987/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
1988FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
1989/* Opcode 0xf3 0x0f 0x56 - invalid */
1990/* Opcode 0xf2 0x0f 0x56 - invalid */
1991
1992/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
1993FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
1994/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
1995FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
1996/* Opcode 0xf3 0x0f 0x57 - invalid */
1997/* Opcode 0xf2 0x0f 0x57 - invalid */
1998
1999/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2000FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2001/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2002FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2003/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2004FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2005/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2006FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2007
2008/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2009FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2010/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2011FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2012/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2013FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2014/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2015FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2016
2017/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2018FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2019/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2020FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2021/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2022FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2023/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2024FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2025
2026/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2027FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2028/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2029FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2030/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2031FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2032/* Opcode 0xf2 0x0f 0x5b - invalid */
2033
2034/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2035FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2036/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2037FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2038/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2039FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2040/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2041FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2042
2043/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2044FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2045/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2046FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2047/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2048FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2049/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2050FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2051
2052/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2053FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2054/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2055FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2056/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2057FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2058/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2059FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2060
2061/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2062FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2063/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2064FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2065/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2066FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2067/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2068FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2069
2070/**
2071 * Common worker for MMX instructions on the forms:
2072 * pxxxx mm1, mm2/mem32
2073 *
2074 * The 2nd operand is the first half of a register, which in the memory case
2075 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2076 * memory accessed for MMX.
2077 *
2078 * Exceptions type 4.
2079 */
2080FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2081{
2082 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2083 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2084 {
2085 /*
2086 * Register, register.
2087 */
2088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2089 IEM_MC_BEGIN(2, 0);
2090 IEM_MC_ARG(uint128_t *, pDst, 0);
2091 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2092 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2093 IEM_MC_PREPARE_SSE_USAGE();
2094 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2095 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2096 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2097 IEM_MC_ADVANCE_RIP();
2098 IEM_MC_END();
2099 }
2100 else
2101 {
2102 /*
2103 * Register, memory.
2104 */
2105 IEM_MC_BEGIN(2, 2);
2106 IEM_MC_ARG(uint128_t *, pDst, 0);
2107 IEM_MC_LOCAL(uint64_t, uSrc);
2108 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2110
2111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2113 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2114 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2115
2116 IEM_MC_PREPARE_SSE_USAGE();
2117 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2118 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2119
2120 IEM_MC_ADVANCE_RIP();
2121 IEM_MC_END();
2122 }
2123 return VINF_SUCCESS;
2124}
2125
2126
2127/**
2128 * Common worker for SSE2 instructions on the forms:
2129 * pxxxx xmm1, xmm2/mem128
2130 *
2131 * The 2nd operand is the first half of a register, which in the memory case
2132 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2133 * memory accessed for MMX.
2134 *
2135 * Exceptions type 4.
2136 */
2137FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2138{
2139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2140 if (!pImpl->pfnU64)
2141 return IEMOP_RAISE_INVALID_OPCODE();
2142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2143 {
2144 /*
2145 * Register, register.
2146 */
2147 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2148 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2150 IEM_MC_BEGIN(2, 0);
2151 IEM_MC_ARG(uint64_t *, pDst, 0);
2152 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2153 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2154 IEM_MC_PREPARE_FPU_USAGE();
2155 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2156 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2157 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2158 IEM_MC_ADVANCE_RIP();
2159 IEM_MC_END();
2160 }
2161 else
2162 {
2163 /*
2164 * Register, memory.
2165 */
2166 IEM_MC_BEGIN(2, 2);
2167 IEM_MC_ARG(uint64_t *, pDst, 0);
2168 IEM_MC_LOCAL(uint32_t, uSrc);
2169 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2171
2172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2174 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2175 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2176
2177 IEM_MC_PREPARE_FPU_USAGE();
2178 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2179 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2180
2181 IEM_MC_ADVANCE_RIP();
2182 IEM_MC_END();
2183 }
2184 return VINF_SUCCESS;
2185}
2186
2187
2188/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2189FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2190{
2191 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2192 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2193}
2194
2195/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2196FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2197{
2198 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2199 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2200}
2201
2202/* Opcode 0xf3 0x0f 0x60 - invalid */
2203
2204
2205/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2206FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2207{
2208 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2209 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2210}
2211
2212/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2213FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2214{
2215 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2216 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2217}
2218
2219/* Opcode 0xf3 0x0f 0x61 - invalid */
2220
2221
2222/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2223FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2224{
2225 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2226 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2227}
2228
2229/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2230FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2231{
2232 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2233 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2234}
2235
2236/* Opcode 0xf3 0x0f 0x62 - invalid */
2237
2238
2239
2240/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2241FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2242/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2243FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2244/* Opcode 0xf3 0x0f 0x63 - invalid */
2245
2246/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2247FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2248/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2249FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2250/* Opcode 0xf3 0x0f 0x64 - invalid */
2251
2252/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2253FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2254/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2255FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2256/* Opcode 0xf3 0x0f 0x65 - invalid */
2257
2258/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2259FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2260/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2261FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2262/* Opcode 0xf3 0x0f 0x66 - invalid */
2263
2264/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2265FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2266/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2267FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2268/* Opcode 0xf3 0x0f 0x67 - invalid */
2269
2270
2271/**
2272 * Common worker for MMX instructions on the form:
2273 * pxxxx mm1, mm2/mem64
2274 *
2275 * The 2nd operand is the second half of a register, which in the memory case
2276 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2277 * where it may read the full 128 bits or only the upper 64 bits.
2278 *
2279 * Exceptions type 4.
2280 */
2281FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2282{
2283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2284 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2285 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2286 {
2287 /*
2288 * Register, register.
2289 */
2290 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2291 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2293 IEM_MC_BEGIN(2, 0);
2294 IEM_MC_ARG(uint64_t *, pDst, 0);
2295 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2296 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2297 IEM_MC_PREPARE_FPU_USAGE();
2298 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2299 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2300 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2301 IEM_MC_ADVANCE_RIP();
2302 IEM_MC_END();
2303 }
2304 else
2305 {
2306 /*
2307 * Register, memory.
2308 */
2309 IEM_MC_BEGIN(2, 2);
2310 IEM_MC_ARG(uint64_t *, pDst, 0);
2311 IEM_MC_LOCAL(uint64_t, uSrc);
2312 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2314
2315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2317 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2318 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2319
2320 IEM_MC_PREPARE_FPU_USAGE();
2321 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2322 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2323
2324 IEM_MC_ADVANCE_RIP();
2325 IEM_MC_END();
2326 }
2327 return VINF_SUCCESS;
2328}
2329
2330
2331/**
2332 * Common worker for SSE2 instructions on the form:
2333 * pxxxx xmm1, xmm2/mem128
2334 *
2335 * The 2nd operand is the second half of a register, which in the memory case
2336 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2337 * where it may read the full 128 bits or only the upper 64 bits.
2338 *
2339 * Exceptions type 4.
2340 */
2341FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2342{
2343 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2344 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2345 {
2346 /*
2347 * Register, register.
2348 */
2349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2350 IEM_MC_BEGIN(2, 0);
2351 IEM_MC_ARG(uint128_t *, pDst, 0);
2352 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2353 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2354 IEM_MC_PREPARE_SSE_USAGE();
2355 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2356 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2357 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2358 IEM_MC_ADVANCE_RIP();
2359 IEM_MC_END();
2360 }
2361 else
2362 {
2363 /*
2364 * Register, memory.
2365 */
2366 IEM_MC_BEGIN(2, 2);
2367 IEM_MC_ARG(uint128_t *, pDst, 0);
2368 IEM_MC_LOCAL(uint128_t, uSrc);
2369 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2371
2372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2374 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2375 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2376
2377 IEM_MC_PREPARE_SSE_USAGE();
2378 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2379 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2380
2381 IEM_MC_ADVANCE_RIP();
2382 IEM_MC_END();
2383 }
2384 return VINF_SUCCESS;
2385}
2386
2387
2388/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2389FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2390{
2391 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2392 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2393}
2394
2395/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2396FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2397{
2398 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2399 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2400}
2401/* Opcode 0xf3 0x0f 0x68 - invalid */
2402
2403
2404/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2405FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2406{
2407 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2408 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2409}
2410
2411/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2412FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2413{
2414 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2415 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2416
2417}
2418/* Opcode 0xf3 0x0f 0x69 - invalid */
2419
2420
2421/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2422FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2423{
2424 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2425 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2426}
2427
2428/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2429FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2430{
2431 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2432 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2433}
2434/* Opcode 0xf3 0x0f 0x6a - invalid */
2435
2436
2437/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2438FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2439/** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */
2440FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2441/* Opcode 0xf3 0x0f 0x6b - invalid */
2442
2443
2444/* Opcode 0x0f 0x6c - invalid */
2445
2446/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2447FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2448{
2449 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
2450 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2451}
2452
2453/* Opcode 0xf3 0x0f 0x6c - invalid */
2454/* Opcode 0xf2 0x0f 0x6c - invalid */
2455
2456
2457/* Opcode 0x0f 0x6d - invalid */
2458
2459/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
2460FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
2461{
2462 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2463 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2464}
2465
2466/* Opcode 0xf3 0x0f 0x6d - invalid */
2467
2468
2469/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
2470FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
2471{
2472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2473 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2474 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2475 else
2476 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2477 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2478 {
2479 /* MMX, greg */
2480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2481 IEM_MC_BEGIN(0, 1);
2482 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2483 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2484 IEM_MC_LOCAL(uint64_t, u64Tmp);
2485 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2486 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2487 else
2488 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2489 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2490 IEM_MC_ADVANCE_RIP();
2491 IEM_MC_END();
2492 }
2493 else
2494 {
2495 /* MMX, [mem] */
2496 IEM_MC_BEGIN(0, 2);
2497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2498 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2501 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2502 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2503 {
2504 IEM_MC_LOCAL(uint64_t, u64Tmp);
2505 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2506 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2507 }
2508 else
2509 {
2510 IEM_MC_LOCAL(uint32_t, u32Tmp);
2511 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2512 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2513 }
2514 IEM_MC_ADVANCE_RIP();
2515 IEM_MC_END();
2516 }
2517 return VINF_SUCCESS;
2518}
2519
2520/** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */
2521FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
2522{
2523 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2524 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2525 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
2526 else
2527 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
2528 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2529 {
2530 /* XMM, greg*/
2531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2532 IEM_MC_BEGIN(0, 1);
2533 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2534 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2535 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2536 {
2537 IEM_MC_LOCAL(uint64_t, u64Tmp);
2538 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2539 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2540 }
2541 else
2542 {
2543 IEM_MC_LOCAL(uint32_t, u32Tmp);
2544 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2545 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2546 }
2547 IEM_MC_ADVANCE_RIP();
2548 IEM_MC_END();
2549 }
2550 else
2551 {
2552 /* XMM, [mem] */
2553 IEM_MC_BEGIN(0, 2);
2554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2555 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2558 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2559 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2560 {
2561 IEM_MC_LOCAL(uint64_t, u64Tmp);
2562 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2563 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2564 }
2565 else
2566 {
2567 IEM_MC_LOCAL(uint32_t, u32Tmp);
2568 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2569 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2570 }
2571 IEM_MC_ADVANCE_RIP();
2572 IEM_MC_END();
2573 }
2574 return VINF_SUCCESS;
2575}
2576
2577/* Opcode 0xf3 0x0f 0x6e - invalid */
2578
2579
2580/** Opcode 0x0f 0x6f - movq Pq, Qq */
2581FNIEMOP_DEF(iemOp_movq_Pq_Qq)
2582{
2583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2584 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2585 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2586 {
2587 /*
2588 * Register, register.
2589 */
2590 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2591 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2593 IEM_MC_BEGIN(0, 1);
2594 IEM_MC_LOCAL(uint64_t, u64Tmp);
2595 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2596 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2597 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2598 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2599 IEM_MC_ADVANCE_RIP();
2600 IEM_MC_END();
2601 }
2602 else
2603 {
2604 /*
2605 * Register, memory.
2606 */
2607 IEM_MC_BEGIN(0, 2);
2608 IEM_MC_LOCAL(uint64_t, u64Tmp);
2609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2610
2611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2613 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2614 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2615 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2616 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2617
2618 IEM_MC_ADVANCE_RIP();
2619 IEM_MC_END();
2620 }
2621 return VINF_SUCCESS;
2622}
2623
2624/** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */
2625FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
2626{
2627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2628 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
2629 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2630 {
2631 /*
2632 * Register, register.
2633 */
2634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2635 IEM_MC_BEGIN(0, 0);
2636 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2637 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2638 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2639 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2640 IEM_MC_ADVANCE_RIP();
2641 IEM_MC_END();
2642 }
2643 else
2644 {
2645 /*
2646 * Register, memory.
2647 */
2648 IEM_MC_BEGIN(0, 2);
2649 IEM_MC_LOCAL(uint128_t, u128Tmp);
2650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2651
2652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2654 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2655 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2656 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2657 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2658
2659 IEM_MC_ADVANCE_RIP();
2660 IEM_MC_END();
2661 }
2662 return VINF_SUCCESS;
2663}
2664
2665/** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */
2666FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
2667{
2668 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2669 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
2670 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2671 {
2672 /*
2673 * Register, register.
2674 */
2675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2676 IEM_MC_BEGIN(0, 0);
2677 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2678 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2679 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2680 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2681 IEM_MC_ADVANCE_RIP();
2682 IEM_MC_END();
2683 }
2684 else
2685 {
2686 /*
2687 * Register, memory.
2688 */
2689 IEM_MC_BEGIN(0, 2);
2690 IEM_MC_LOCAL(uint128_t, u128Tmp);
2691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2692
2693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2695 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2696 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2697 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2698 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2699
2700 IEM_MC_ADVANCE_RIP();
2701 IEM_MC_END();
2702 }
2703 return VINF_SUCCESS;
2704}
2705
2706
2707/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
2708FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
2709{
2710 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
2711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2712 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2713 {
2714 /*
2715 * Register, register.
2716 */
2717 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2719
2720 IEM_MC_BEGIN(3, 0);
2721 IEM_MC_ARG(uint64_t *, pDst, 0);
2722 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2723 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2724 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2725 IEM_MC_PREPARE_FPU_USAGE();
2726 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2727 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2728 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2729 IEM_MC_ADVANCE_RIP();
2730 IEM_MC_END();
2731 }
2732 else
2733 {
2734 /*
2735 * Register, memory.
2736 */
2737 IEM_MC_BEGIN(3, 2);
2738 IEM_MC_ARG(uint64_t *, pDst, 0);
2739 IEM_MC_LOCAL(uint64_t, uSrc);
2740 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2742
2743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2744 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2745 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2747 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2748
2749 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2750 IEM_MC_PREPARE_FPU_USAGE();
2751 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2752 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2753
2754 IEM_MC_ADVANCE_RIP();
2755 IEM_MC_END();
2756 }
2757 return VINF_SUCCESS;
2758}
2759
2760/** Opcode 0x66 0x0f 0x70 - vpshufd Vx, Wx, Ib */
2761FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
2762{
2763 IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
2764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2765 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2766 {
2767 /*
2768 * Register, register.
2769 */
2770 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2772
2773 IEM_MC_BEGIN(3, 0);
2774 IEM_MC_ARG(uint128_t *, pDst, 0);
2775 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2776 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2777 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2778 IEM_MC_PREPARE_SSE_USAGE();
2779 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2780 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2781 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2782 IEM_MC_ADVANCE_RIP();
2783 IEM_MC_END();
2784 }
2785 else
2786 {
2787 /*
2788 * Register, memory.
2789 */
2790 IEM_MC_BEGIN(3, 2);
2791 IEM_MC_ARG(uint128_t *, pDst, 0);
2792 IEM_MC_LOCAL(uint128_t, uSrc);
2793 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2795
2796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2797 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2798 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2800 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2801
2802 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2803 IEM_MC_PREPARE_SSE_USAGE();
2804 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2805 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2806
2807 IEM_MC_ADVANCE_RIP();
2808 IEM_MC_END();
2809 }
2810 return VINF_SUCCESS;
2811}
2812
2813/** Opcode 0xf3 0x0f 0x70 - vpshufhw Vx, Wx, Ib */
2814FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
2815{
2816 IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
2817 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2818 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2819 {
2820 /*
2821 * Register, register.
2822 */
2823 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2825
2826 IEM_MC_BEGIN(3, 0);
2827 IEM_MC_ARG(uint128_t *, pDst, 0);
2828 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2829 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2830 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2831 IEM_MC_PREPARE_SSE_USAGE();
2832 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2833 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2834 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2835 IEM_MC_ADVANCE_RIP();
2836 IEM_MC_END();
2837 }
2838 else
2839 {
2840 /*
2841 * Register, memory.
2842 */
2843 IEM_MC_BEGIN(3, 2);
2844 IEM_MC_ARG(uint128_t *, pDst, 0);
2845 IEM_MC_LOCAL(uint128_t, uSrc);
2846 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2848
2849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2850 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2851 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2853 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2854
2855 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2856 IEM_MC_PREPARE_SSE_USAGE();
2857 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2858 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2859
2860 IEM_MC_ADVANCE_RIP();
2861 IEM_MC_END();
2862 }
2863 return VINF_SUCCESS;
2864}
2865
2866/** Opcode 0xf2 0x0f 0x70 - vpshuflw Vx, Wx, Ib */
2867FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
2868{
2869 IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
2870 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2871 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2872 {
2873 /*
2874 * Register, register.
2875 */
2876 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2878
2879 IEM_MC_BEGIN(3, 0);
2880 IEM_MC_ARG(uint128_t *, pDst, 0);
2881 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2882 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2883 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2884 IEM_MC_PREPARE_SSE_USAGE();
2885 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2886 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2887 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2888 IEM_MC_ADVANCE_RIP();
2889 IEM_MC_END();
2890 }
2891 else
2892 {
2893 /*
2894 * Register, memory.
2895 */
2896 IEM_MC_BEGIN(3, 2);
2897 IEM_MC_ARG(uint128_t *, pDst, 0);
2898 IEM_MC_LOCAL(uint128_t, uSrc);
2899 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2901
2902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2903 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2904 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2906 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2907
2908 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2909 IEM_MC_PREPARE_SSE_USAGE();
2910 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2911 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2912
2913 IEM_MC_ADVANCE_RIP();
2914 IEM_MC_END();
2915 }
2916 return VINF_SUCCESS;
2917}
2918
2919
2920/** Opcode 0x0f 0x71 11/2. */
2921FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2922
2923/** Opcode 0x66 0x0f 0x71 11/2. */
2924FNIEMOP_STUB_1(iemOp_Grp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
2925
2926/** Opcode 0x0f 0x71 11/4. */
2927FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2928
2929/** Opcode 0x66 0x0f 0x71 11/4. */
2930FNIEMOP_STUB_1(iemOp_Grp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
2931
2932/** Opcode 0x0f 0x71 11/6. */
2933FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2934
2935/** Opcode 0x66 0x0f 0x71 11/6. */
2936FNIEMOP_STUB_1(iemOp_Grp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
2937
2938
2939/**
2940 * Group 12 jump table for register variant.
2941 */
2942IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
2943{
2944 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2945 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2946 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2947 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2948 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2949 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2950 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2951 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
2952};
2953AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
2954
2955
2956/** Opcode 0x0f 0x71. */
2957FNIEMOP_DEF(iemOp_Grp12)
2958{
2959 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2960 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2961 /* register, register */
2962 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
2963 + pVCpu->iem.s.idxPrefix], bRm);
2964 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
2965}
2966
2967
2968/** Opcode 0x0f 0x72 11/2. */
2969FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2970
2971/** Opcode 0x66 0x0f 0x72 11/2. */
2972FNIEMOP_STUB_1(iemOp_Grp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
2973
2974/** Opcode 0x0f 0x72 11/4. */
2975FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2976
2977/** Opcode 0x66 0x0f 0x72 11/4. */
2978FNIEMOP_STUB_1(iemOp_Grp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
2979
2980/** Opcode 0x0f 0x72 11/6. */
2981FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2982
2983/** Opcode 0x66 0x0f 0x72 11/6. */
2984FNIEMOP_STUB_1(iemOp_Grp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
2985
2986
2987/**
2988 * Group 13 jump table for register variant.
2989 */
2990IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
2991{
2992 /** @todo decode imm8? */
2993 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2994 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2995 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2996 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2997 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2998 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2999 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3000 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3001};
3002AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3003
3004/** Opcode 0x0f 0x72. */
3005FNIEMOP_DEF(iemOp_Grp13)
3006{
3007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3008 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3009 /* register, register */
3010 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3011 + pVCpu->iem.s.idxPrefix], bRm);
3012 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3013}
3014
3015
3016/** Opcode 0x0f 0x73 11/2. */
3017FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3018
3019/** Opcode 0x66 0x0f 0x73 11/2. */
3020FNIEMOP_STUB_1(iemOp_Grp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
3021
3022/** Opcode 0x66 0x0f 0x73 11/3. */
3023FNIEMOP_STUB_1(iemOp_Grp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3024
3025/** Opcode 0x0f 0x73 11/6. */
3026FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3027
3028/** Opcode 0x66 0x0f 0x73 11/6. */
3029FNIEMOP_STUB_1(iemOp_Grp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
3030
3031/** Opcode 0x66 0x0f 0x73 11/7. */
3032FNIEMOP_STUB_1(iemOp_Grp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3033
3034/**
3035 * Group 14 jump table for register variant.
3036 */
3037IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
3038{
3039 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3040 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3041 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3042 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3043 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3044 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3045 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3046 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3047};
3048AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
3049
3050
3051/** Opcode 0x0f 0x73. */
3052FNIEMOP_DEF(iemOp_Grp14)
3053{
3054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3055 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3056 /* register, register */
3057 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3058 + pVCpu->iem.s.idxPrefix], bRm);
3059 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3060}
3061
3062
3063/**
3064 * Common worker for MMX instructions on the form:
3065 * pxxx mm1, mm2/mem64
3066 */
3067FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3068{
3069 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3070 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3071 {
3072 /*
3073 * Register, register.
3074 */
3075 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3076 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3078 IEM_MC_BEGIN(2, 0);
3079 IEM_MC_ARG(uint64_t *, pDst, 0);
3080 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3081 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3082 IEM_MC_PREPARE_FPU_USAGE();
3083 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3084 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3085 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3086 IEM_MC_ADVANCE_RIP();
3087 IEM_MC_END();
3088 }
3089 else
3090 {
3091 /*
3092 * Register, memory.
3093 */
3094 IEM_MC_BEGIN(2, 2);
3095 IEM_MC_ARG(uint64_t *, pDst, 0);
3096 IEM_MC_LOCAL(uint64_t, uSrc);
3097 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3099
3100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3102 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3103 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3104
3105 IEM_MC_PREPARE_FPU_USAGE();
3106 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3107 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3108
3109 IEM_MC_ADVANCE_RIP();
3110 IEM_MC_END();
3111 }
3112 return VINF_SUCCESS;
3113}
3114
3115
3116/**
3117 * Common worker for SSE2 instructions on the forms:
3118 * pxxx xmm1, xmm2/mem128
3119 *
3120 * Proper alignment of the 128-bit operand is enforced.
3121 * Exceptions type 4. SSE2 cpuid checks.
3122 */
3123FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3124{
3125 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3126 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3127 {
3128 /*
3129 * Register, register.
3130 */
3131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3132 IEM_MC_BEGIN(2, 0);
3133 IEM_MC_ARG(uint128_t *, pDst, 0);
3134 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3135 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3136 IEM_MC_PREPARE_SSE_USAGE();
3137 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3138 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3139 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3140 IEM_MC_ADVANCE_RIP();
3141 IEM_MC_END();
3142 }
3143 else
3144 {
3145 /*
3146 * Register, memory.
3147 */
3148 IEM_MC_BEGIN(2, 2);
3149 IEM_MC_ARG(uint128_t *, pDst, 0);
3150 IEM_MC_LOCAL(uint128_t, uSrc);
3151 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3152 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3153
3154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3156 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3157 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3158
3159 IEM_MC_PREPARE_SSE_USAGE();
3160 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3161 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3162
3163 IEM_MC_ADVANCE_RIP();
3164 IEM_MC_END();
3165 }
3166 return VINF_SUCCESS;
3167}
3168
3169
3170/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3171FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3172{
3173 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3174 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3175}
3176
3177/** Opcode 0x66 0x0f 0x74 - vpcmpeqb Vx, Hx, Wx */
3178FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3179{
3180 IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb");
3181 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3182}
3183
3184/* Opcode 0xf3 0x0f 0x74 - invalid */
3185/* Opcode 0xf2 0x0f 0x74 - invalid */
3186
3187
3188/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3189FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3190{
3191 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3192 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3193}
3194
3195/** Opcode 0x66 0x0f 0x75 - vpcmpeqw Vx, Hx, Wx */
3196FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3197{
3198 IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw");
3199 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3200}
3201
3202/* Opcode 0xf3 0x0f 0x75 - invalid */
3203/* Opcode 0xf2 0x0f 0x75 - invalid */
3204
3205
3206/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3207FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3208{
3209 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3210 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3211}
3212
3213/** Opcode 0x66 0x0f 0x76 - vpcmpeqd Vx, Hx, Wx */
3214FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3215{
3216 IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd");
3217 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3218}
3219
3220/* Opcode 0xf3 0x0f 0x76 - invalid */
3221/* Opcode 0xf2 0x0f 0x76 - invalid */
3222
3223
3224/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3225FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3226/* Opcode 0x66 0x0f 0x77 - invalid */
3227/* Opcode 0xf3 0x0f 0x77 - invalid */
3228/* Opcode 0xf2 0x0f 0x77 - invalid */
3229
3230/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3231FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3232/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3233FNIEMOP_STUB(iemOp_AmdGrp17);
3234/* Opcode 0xf3 0x0f 0x78 - invalid */
3235/* Opcode 0xf2 0x0f 0x78 - invalid */
3236
3237/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3238FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3239/* Opcode 0x66 0x0f 0x79 - invalid */
3240/* Opcode 0xf3 0x0f 0x79 - invalid */
3241/* Opcode 0xf2 0x0f 0x79 - invalid */
3242
3243/* Opcode 0x0f 0x7a - invalid */
3244/* Opcode 0x66 0x0f 0x7a - invalid */
3245/* Opcode 0xf3 0x0f 0x7a - invalid */
3246/* Opcode 0xf2 0x0f 0x7a - invalid */
3247
3248/* Opcode 0x0f 0x7b - invalid */
3249/* Opcode 0x66 0x0f 0x7b - invalid */
3250/* Opcode 0xf3 0x0f 0x7b - invalid */
3251/* Opcode 0xf2 0x0f 0x7b - invalid */
3252
3253/* Opcode 0x0f 0x7c - invalid */
3254/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3255FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3256/* Opcode 0xf3 0x0f 0x7c - invalid */
3257/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3258FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3259
3260/* Opcode 0x0f 0x7d - invalid */
3261/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3262FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3263/* Opcode 0xf3 0x0f 0x7d - invalid */
3264/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3265FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3266
3267
3268/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3269FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3270{
3271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3272 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3273 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3274 else
3275 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3276 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3277 {
3278 /* greg, MMX */
3279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3280 IEM_MC_BEGIN(0, 1);
3281 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3282 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3283 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3284 {
3285 IEM_MC_LOCAL(uint64_t, u64Tmp);
3286 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3287 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3288 }
3289 else
3290 {
3291 IEM_MC_LOCAL(uint32_t, u32Tmp);
3292 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3293 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3294 }
3295 IEM_MC_ADVANCE_RIP();
3296 IEM_MC_END();
3297 }
3298 else
3299 {
3300 /* [mem], MMX */
3301 IEM_MC_BEGIN(0, 2);
3302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3303 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3306 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3307 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3308 {
3309 IEM_MC_LOCAL(uint64_t, u64Tmp);
3310 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3311 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3312 }
3313 else
3314 {
3315 IEM_MC_LOCAL(uint32_t, u32Tmp);
3316 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3317 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3318 }
3319 IEM_MC_ADVANCE_RIP();
3320 IEM_MC_END();
3321 }
3322 return VINF_SUCCESS;
3323}
3324
3325/** Opcode 0x66 0x0f 0x7e - vmovd_q Ey, Vy */
3326FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3327{
3328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3329 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3330 IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq");
3331 else
3332 IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd");
3333 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3334 {
3335 /* greg, XMM */
3336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3337 IEM_MC_BEGIN(0, 1);
3338 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3339 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3340 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3341 {
3342 IEM_MC_LOCAL(uint64_t, u64Tmp);
3343 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3344 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3345 }
3346 else
3347 {
3348 IEM_MC_LOCAL(uint32_t, u32Tmp);
3349 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3350 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3351 }
3352 IEM_MC_ADVANCE_RIP();
3353 IEM_MC_END();
3354 }
3355 else
3356 {
3357 /* [mem], XMM */
3358 IEM_MC_BEGIN(0, 2);
3359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3360 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3363 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3364 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3365 {
3366 IEM_MC_LOCAL(uint64_t, u64Tmp);
3367 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3368 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3369 }
3370 else
3371 {
3372 IEM_MC_LOCAL(uint32_t, u32Tmp);
3373 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3374 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3375 }
3376 IEM_MC_ADVANCE_RIP();
3377 IEM_MC_END();
3378 }
3379 return VINF_SUCCESS;
3380}
3381
3382/** Opcode 0xf3 0x0f 0x7e - vmovq Vq, Wq */
3383FNIEMOP_STUB(iemOp_vmovq_Vq_Wq);
3384/* Opcode 0xf2 0x0f 0x7e - invalid */
3385
3386
3387/** Opcode 0x0f 0x7f - movq Qq, Pq */
3388FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3389{
3390 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3392 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3393 {
3394 /*
3395 * Register, register.
3396 */
3397 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3398 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3400 IEM_MC_BEGIN(0, 1);
3401 IEM_MC_LOCAL(uint64_t, u64Tmp);
3402 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3403 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3404 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3405 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3406 IEM_MC_ADVANCE_RIP();
3407 IEM_MC_END();
3408 }
3409 else
3410 {
3411 /*
3412 * Register, memory.
3413 */
3414 IEM_MC_BEGIN(0, 2);
3415 IEM_MC_LOCAL(uint64_t, u64Tmp);
3416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3417
3418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3420 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3421 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3422
3423 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3424 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3425
3426 IEM_MC_ADVANCE_RIP();
3427 IEM_MC_END();
3428 }
3429 return VINF_SUCCESS;
3430}
3431
3432/** Opcode 0x66 0x0f 0x7f - vmovdqa Wx,Vx */
3433FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3434{
3435 IEMOP_MNEMONIC(vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx");
3436 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3437 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3438 {
3439 /*
3440 * Register, register.
3441 */
3442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3443 IEM_MC_BEGIN(0, 0);
3444 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3445 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3446 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3447 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3448 IEM_MC_ADVANCE_RIP();
3449 IEM_MC_END();
3450 }
3451 else
3452 {
3453 /*
3454 * Register, memory.
3455 */
3456 IEM_MC_BEGIN(0, 2);
3457 IEM_MC_LOCAL(uint128_t, u128Tmp);
3458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3459
3460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3462 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3463 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3464
3465 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3466 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3467
3468 IEM_MC_ADVANCE_RIP();
3469 IEM_MC_END();
3470 }
3471 return VINF_SUCCESS;
3472}
3473
3474/** Opcode 0xf3 0x0f 0x7f - vmovdqu Wx,Vx */
3475FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
3476{
3477 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3478 IEMOP_MNEMONIC(vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx");
3479 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3480 {
3481 /*
3482 * Register, register.
3483 */
3484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3485 IEM_MC_BEGIN(0, 0);
3486 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3487 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3488 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3489 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3490 IEM_MC_ADVANCE_RIP();
3491 IEM_MC_END();
3492 }
3493 else
3494 {
3495 /*
3496 * Register, memory.
3497 */
3498 IEM_MC_BEGIN(0, 2);
3499 IEM_MC_LOCAL(uint128_t, u128Tmp);
3500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3501
3502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3504 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3505 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3506
3507 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3508 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3509
3510 IEM_MC_ADVANCE_RIP();
3511 IEM_MC_END();
3512 }
3513 return VINF_SUCCESS;
3514}
3515
3516/* Opcode 0xf2 0x0f 0x7f - invalid */
3517
3518
3519
3520/** Opcode 0x0f 0x80. */
3521FNIEMOP_DEF(iemOp_jo_Jv)
3522{
3523 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3524 IEMOP_HLP_MIN_386();
3525 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3526 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3527 {
3528 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3530
3531 IEM_MC_BEGIN(0, 0);
3532 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3533 IEM_MC_REL_JMP_S16(i16Imm);
3534 } IEM_MC_ELSE() {
3535 IEM_MC_ADVANCE_RIP();
3536 } IEM_MC_ENDIF();
3537 IEM_MC_END();
3538 }
3539 else
3540 {
3541 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3543
3544 IEM_MC_BEGIN(0, 0);
3545 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3546 IEM_MC_REL_JMP_S32(i32Imm);
3547 } IEM_MC_ELSE() {
3548 IEM_MC_ADVANCE_RIP();
3549 } IEM_MC_ENDIF();
3550 IEM_MC_END();
3551 }
3552 return VINF_SUCCESS;
3553}
3554
3555
3556/** Opcode 0x0f 0x81. */
3557FNIEMOP_DEF(iemOp_jno_Jv)
3558{
3559 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3560 IEMOP_HLP_MIN_386();
3561 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3562 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3563 {
3564 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3566
3567 IEM_MC_BEGIN(0, 0);
3568 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3569 IEM_MC_ADVANCE_RIP();
3570 } IEM_MC_ELSE() {
3571 IEM_MC_REL_JMP_S16(i16Imm);
3572 } IEM_MC_ENDIF();
3573 IEM_MC_END();
3574 }
3575 else
3576 {
3577 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3579
3580 IEM_MC_BEGIN(0, 0);
3581 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3582 IEM_MC_ADVANCE_RIP();
3583 } IEM_MC_ELSE() {
3584 IEM_MC_REL_JMP_S32(i32Imm);
3585 } IEM_MC_ENDIF();
3586 IEM_MC_END();
3587 }
3588 return VINF_SUCCESS;
3589}
3590
3591
3592/** Opcode 0x0f 0x82. */
3593FNIEMOP_DEF(iemOp_jc_Jv)
3594{
3595 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3596 IEMOP_HLP_MIN_386();
3597 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3598 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3599 {
3600 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3602
3603 IEM_MC_BEGIN(0, 0);
3604 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3605 IEM_MC_REL_JMP_S16(i16Imm);
3606 } IEM_MC_ELSE() {
3607 IEM_MC_ADVANCE_RIP();
3608 } IEM_MC_ENDIF();
3609 IEM_MC_END();
3610 }
3611 else
3612 {
3613 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3615
3616 IEM_MC_BEGIN(0, 0);
3617 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3618 IEM_MC_REL_JMP_S32(i32Imm);
3619 } IEM_MC_ELSE() {
3620 IEM_MC_ADVANCE_RIP();
3621 } IEM_MC_ENDIF();
3622 IEM_MC_END();
3623 }
3624 return VINF_SUCCESS;
3625}
3626
3627
3628/** Opcode 0x0f 0x83. */
3629FNIEMOP_DEF(iemOp_jnc_Jv)
3630{
3631 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3632 IEMOP_HLP_MIN_386();
3633 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3634 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3635 {
3636 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3638
3639 IEM_MC_BEGIN(0, 0);
3640 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3641 IEM_MC_ADVANCE_RIP();
3642 } IEM_MC_ELSE() {
3643 IEM_MC_REL_JMP_S16(i16Imm);
3644 } IEM_MC_ENDIF();
3645 IEM_MC_END();
3646 }
3647 else
3648 {
3649 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3651
3652 IEM_MC_BEGIN(0, 0);
3653 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3654 IEM_MC_ADVANCE_RIP();
3655 } IEM_MC_ELSE() {
3656 IEM_MC_REL_JMP_S32(i32Imm);
3657 } IEM_MC_ENDIF();
3658 IEM_MC_END();
3659 }
3660 return VINF_SUCCESS;
3661}
3662
3663
3664/** Opcode 0x0f 0x84. */
3665FNIEMOP_DEF(iemOp_je_Jv)
3666{
3667 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
3668 IEMOP_HLP_MIN_386();
3669 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3670 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3671 {
3672 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3674
3675 IEM_MC_BEGIN(0, 0);
3676 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3677 IEM_MC_REL_JMP_S16(i16Imm);
3678 } IEM_MC_ELSE() {
3679 IEM_MC_ADVANCE_RIP();
3680 } IEM_MC_ENDIF();
3681 IEM_MC_END();
3682 }
3683 else
3684 {
3685 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3687
3688 IEM_MC_BEGIN(0, 0);
3689 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3690 IEM_MC_REL_JMP_S32(i32Imm);
3691 } IEM_MC_ELSE() {
3692 IEM_MC_ADVANCE_RIP();
3693 } IEM_MC_ENDIF();
3694 IEM_MC_END();
3695 }
3696 return VINF_SUCCESS;
3697}
3698
3699
3700/** Opcode 0x0f 0x85. */
3701FNIEMOP_DEF(iemOp_jne_Jv)
3702{
3703 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
3704 IEMOP_HLP_MIN_386();
3705 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3706 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3707 {
3708 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3710
3711 IEM_MC_BEGIN(0, 0);
3712 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3713 IEM_MC_ADVANCE_RIP();
3714 } IEM_MC_ELSE() {
3715 IEM_MC_REL_JMP_S16(i16Imm);
3716 } IEM_MC_ENDIF();
3717 IEM_MC_END();
3718 }
3719 else
3720 {
3721 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3723
3724 IEM_MC_BEGIN(0, 0);
3725 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3726 IEM_MC_ADVANCE_RIP();
3727 } IEM_MC_ELSE() {
3728 IEM_MC_REL_JMP_S32(i32Imm);
3729 } IEM_MC_ENDIF();
3730 IEM_MC_END();
3731 }
3732 return VINF_SUCCESS;
3733}
3734
3735
3736/** Opcode 0x0f 0x86. */
3737FNIEMOP_DEF(iemOp_jbe_Jv)
3738{
3739 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
3740 IEMOP_HLP_MIN_386();
3741 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3742 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3743 {
3744 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3746
3747 IEM_MC_BEGIN(0, 0);
3748 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3749 IEM_MC_REL_JMP_S16(i16Imm);
3750 } IEM_MC_ELSE() {
3751 IEM_MC_ADVANCE_RIP();
3752 } IEM_MC_ENDIF();
3753 IEM_MC_END();
3754 }
3755 else
3756 {
3757 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3759
3760 IEM_MC_BEGIN(0, 0);
3761 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3762 IEM_MC_REL_JMP_S32(i32Imm);
3763 } IEM_MC_ELSE() {
3764 IEM_MC_ADVANCE_RIP();
3765 } IEM_MC_ENDIF();
3766 IEM_MC_END();
3767 }
3768 return VINF_SUCCESS;
3769}
3770
3771
3772/** Opcode 0x0f 0x87. */
3773FNIEMOP_DEF(iemOp_jnbe_Jv)
3774{
3775 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
3776 IEMOP_HLP_MIN_386();
3777 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3778 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3779 {
3780 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3782
3783 IEM_MC_BEGIN(0, 0);
3784 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3785 IEM_MC_ADVANCE_RIP();
3786 } IEM_MC_ELSE() {
3787 IEM_MC_REL_JMP_S16(i16Imm);
3788 } IEM_MC_ENDIF();
3789 IEM_MC_END();
3790 }
3791 else
3792 {
3793 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3795
3796 IEM_MC_BEGIN(0, 0);
3797 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3798 IEM_MC_ADVANCE_RIP();
3799 } IEM_MC_ELSE() {
3800 IEM_MC_REL_JMP_S32(i32Imm);
3801 } IEM_MC_ENDIF();
3802 IEM_MC_END();
3803 }
3804 return VINF_SUCCESS;
3805}
3806
3807
3808/** Opcode 0x0f 0x88. */
3809FNIEMOP_DEF(iemOp_js_Jv)
3810{
3811 IEMOP_MNEMONIC(js_Jv, "js Jv");
3812 IEMOP_HLP_MIN_386();
3813 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3814 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3815 {
3816 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3818
3819 IEM_MC_BEGIN(0, 0);
3820 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3821 IEM_MC_REL_JMP_S16(i16Imm);
3822 } IEM_MC_ELSE() {
3823 IEM_MC_ADVANCE_RIP();
3824 } IEM_MC_ENDIF();
3825 IEM_MC_END();
3826 }
3827 else
3828 {
3829 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3831
3832 IEM_MC_BEGIN(0, 0);
3833 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3834 IEM_MC_REL_JMP_S32(i32Imm);
3835 } IEM_MC_ELSE() {
3836 IEM_MC_ADVANCE_RIP();
3837 } IEM_MC_ENDIF();
3838 IEM_MC_END();
3839 }
3840 return VINF_SUCCESS;
3841}
3842
3843
3844/** Opcode 0x0f 0x89. */
3845FNIEMOP_DEF(iemOp_jns_Jv)
3846{
3847 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
3848 IEMOP_HLP_MIN_386();
3849 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3850 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3851 {
3852 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3854
3855 IEM_MC_BEGIN(0, 0);
3856 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3857 IEM_MC_ADVANCE_RIP();
3858 } IEM_MC_ELSE() {
3859 IEM_MC_REL_JMP_S16(i16Imm);
3860 } IEM_MC_ENDIF();
3861 IEM_MC_END();
3862 }
3863 else
3864 {
3865 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3867
3868 IEM_MC_BEGIN(0, 0);
3869 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3870 IEM_MC_ADVANCE_RIP();
3871 } IEM_MC_ELSE() {
3872 IEM_MC_REL_JMP_S32(i32Imm);
3873 } IEM_MC_ENDIF();
3874 IEM_MC_END();
3875 }
3876 return VINF_SUCCESS;
3877}
3878
3879
3880/** Opcode 0x0f 0x8a. */
3881FNIEMOP_DEF(iemOp_jp_Jv)
3882{
3883 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
3884 IEMOP_HLP_MIN_386();
3885 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3886 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3887 {
3888 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3890
3891 IEM_MC_BEGIN(0, 0);
3892 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3893 IEM_MC_REL_JMP_S16(i16Imm);
3894 } IEM_MC_ELSE() {
3895 IEM_MC_ADVANCE_RIP();
3896 } IEM_MC_ENDIF();
3897 IEM_MC_END();
3898 }
3899 else
3900 {
3901 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3903
3904 IEM_MC_BEGIN(0, 0);
3905 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3906 IEM_MC_REL_JMP_S32(i32Imm);
3907 } IEM_MC_ELSE() {
3908 IEM_MC_ADVANCE_RIP();
3909 } IEM_MC_ENDIF();
3910 IEM_MC_END();
3911 }
3912 return VINF_SUCCESS;
3913}
3914
3915
3916/** Opcode 0x0f 0x8b. */
3917FNIEMOP_DEF(iemOp_jnp_Jv)
3918{
3919 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
3920 IEMOP_HLP_MIN_386();
3921 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3922 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3923 {
3924 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3926
3927 IEM_MC_BEGIN(0, 0);
3928 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3929 IEM_MC_ADVANCE_RIP();
3930 } IEM_MC_ELSE() {
3931 IEM_MC_REL_JMP_S16(i16Imm);
3932 } IEM_MC_ENDIF();
3933 IEM_MC_END();
3934 }
3935 else
3936 {
3937 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3939
3940 IEM_MC_BEGIN(0, 0);
3941 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3942 IEM_MC_ADVANCE_RIP();
3943 } IEM_MC_ELSE() {
3944 IEM_MC_REL_JMP_S32(i32Imm);
3945 } IEM_MC_ENDIF();
3946 IEM_MC_END();
3947 }
3948 return VINF_SUCCESS;
3949}
3950
3951
3952/** Opcode 0x0f 0x8c. */
3953FNIEMOP_DEF(iemOp_jl_Jv)
3954{
3955 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
3956 IEMOP_HLP_MIN_386();
3957 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3958 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3959 {
3960 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3962
3963 IEM_MC_BEGIN(0, 0);
3964 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3965 IEM_MC_REL_JMP_S16(i16Imm);
3966 } IEM_MC_ELSE() {
3967 IEM_MC_ADVANCE_RIP();
3968 } IEM_MC_ENDIF();
3969 IEM_MC_END();
3970 }
3971 else
3972 {
3973 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3975
3976 IEM_MC_BEGIN(0, 0);
3977 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3978 IEM_MC_REL_JMP_S32(i32Imm);
3979 } IEM_MC_ELSE() {
3980 IEM_MC_ADVANCE_RIP();
3981 } IEM_MC_ENDIF();
3982 IEM_MC_END();
3983 }
3984 return VINF_SUCCESS;
3985}
3986
3987
3988/** Opcode 0x0f 0x8d. */
3989FNIEMOP_DEF(iemOp_jnl_Jv)
3990{
3991 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
3992 IEMOP_HLP_MIN_386();
3993 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3994 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3995 {
3996 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3998
3999 IEM_MC_BEGIN(0, 0);
4000 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4001 IEM_MC_ADVANCE_RIP();
4002 } IEM_MC_ELSE() {
4003 IEM_MC_REL_JMP_S16(i16Imm);
4004 } IEM_MC_ENDIF();
4005 IEM_MC_END();
4006 }
4007 else
4008 {
4009 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4011
4012 IEM_MC_BEGIN(0, 0);
4013 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4014 IEM_MC_ADVANCE_RIP();
4015 } IEM_MC_ELSE() {
4016 IEM_MC_REL_JMP_S32(i32Imm);
4017 } IEM_MC_ENDIF();
4018 IEM_MC_END();
4019 }
4020 return VINF_SUCCESS;
4021}
4022
4023
4024/** Opcode 0x0f 0x8e. */
4025FNIEMOP_DEF(iemOp_jle_Jv)
4026{
4027 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4028 IEMOP_HLP_MIN_386();
4029 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4030 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4031 {
4032 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4034
4035 IEM_MC_BEGIN(0, 0);
4036 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4037 IEM_MC_REL_JMP_S16(i16Imm);
4038 } IEM_MC_ELSE() {
4039 IEM_MC_ADVANCE_RIP();
4040 } IEM_MC_ENDIF();
4041 IEM_MC_END();
4042 }
4043 else
4044 {
4045 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4047
4048 IEM_MC_BEGIN(0, 0);
4049 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4050 IEM_MC_REL_JMP_S32(i32Imm);
4051 } IEM_MC_ELSE() {
4052 IEM_MC_ADVANCE_RIP();
4053 } IEM_MC_ENDIF();
4054 IEM_MC_END();
4055 }
4056 return VINF_SUCCESS;
4057}
4058
4059
4060/** Opcode 0x0f 0x8f. */
4061FNIEMOP_DEF(iemOp_jnle_Jv)
4062{
4063 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4064 IEMOP_HLP_MIN_386();
4065 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4066 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4067 {
4068 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4070
4071 IEM_MC_BEGIN(0, 0);
4072 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4073 IEM_MC_ADVANCE_RIP();
4074 } IEM_MC_ELSE() {
4075 IEM_MC_REL_JMP_S16(i16Imm);
4076 } IEM_MC_ENDIF();
4077 IEM_MC_END();
4078 }
4079 else
4080 {
4081 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4083
4084 IEM_MC_BEGIN(0, 0);
4085 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4086 IEM_MC_ADVANCE_RIP();
4087 } IEM_MC_ELSE() {
4088 IEM_MC_REL_JMP_S32(i32Imm);
4089 } IEM_MC_ENDIF();
4090 IEM_MC_END();
4091 }
4092 return VINF_SUCCESS;
4093}
4094
4095
4096/** Opcode 0x0f 0x90. */
4097FNIEMOP_DEF(iemOp_seto_Eb)
4098{
4099 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4100 IEMOP_HLP_MIN_386();
4101 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4102
4103 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4104 * any way. AMD says it's "unused", whatever that means. We're
4105 * ignoring for now. */
4106 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4107 {
4108 /* register target */
4109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4110 IEM_MC_BEGIN(0, 0);
4111 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4112 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4113 } IEM_MC_ELSE() {
4114 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4115 } IEM_MC_ENDIF();
4116 IEM_MC_ADVANCE_RIP();
4117 IEM_MC_END();
4118 }
4119 else
4120 {
4121 /* memory target */
4122 IEM_MC_BEGIN(0, 1);
4123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4126 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4127 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4128 } IEM_MC_ELSE() {
4129 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4130 } IEM_MC_ENDIF();
4131 IEM_MC_ADVANCE_RIP();
4132 IEM_MC_END();
4133 }
4134 return VINF_SUCCESS;
4135}
4136
4137
4138/** Opcode 0x0f 0x91. */
4139FNIEMOP_DEF(iemOp_setno_Eb)
4140{
4141 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4142 IEMOP_HLP_MIN_386();
4143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4144
4145 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4146 * any way. AMD says it's "unused", whatever that means. We're
4147 * ignoring for now. */
4148 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4149 {
4150 /* register target */
4151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4152 IEM_MC_BEGIN(0, 0);
4153 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4154 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4155 } IEM_MC_ELSE() {
4156 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4157 } IEM_MC_ENDIF();
4158 IEM_MC_ADVANCE_RIP();
4159 IEM_MC_END();
4160 }
4161 else
4162 {
4163 /* memory target */
4164 IEM_MC_BEGIN(0, 1);
4165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4168 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4169 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4170 } IEM_MC_ELSE() {
4171 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4172 } IEM_MC_ENDIF();
4173 IEM_MC_ADVANCE_RIP();
4174 IEM_MC_END();
4175 }
4176 return VINF_SUCCESS;
4177}
4178
4179
4180/** Opcode 0x0f 0x92. */
4181FNIEMOP_DEF(iemOp_setc_Eb)
4182{
4183 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4184 IEMOP_HLP_MIN_386();
4185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4186
4187 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4188 * any way. AMD says it's "unused", whatever that means. We're
4189 * ignoring for now. */
4190 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4191 {
4192 /* register target */
4193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4194 IEM_MC_BEGIN(0, 0);
4195 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4196 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4197 } IEM_MC_ELSE() {
4198 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4199 } IEM_MC_ENDIF();
4200 IEM_MC_ADVANCE_RIP();
4201 IEM_MC_END();
4202 }
4203 else
4204 {
4205 /* memory target */
4206 IEM_MC_BEGIN(0, 1);
4207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4210 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4211 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4212 } IEM_MC_ELSE() {
4213 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4214 } IEM_MC_ENDIF();
4215 IEM_MC_ADVANCE_RIP();
4216 IEM_MC_END();
4217 }
4218 return VINF_SUCCESS;
4219}
4220
4221
4222/** Opcode 0x0f 0x93. */
4223FNIEMOP_DEF(iemOp_setnc_Eb)
4224{
4225 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4226 IEMOP_HLP_MIN_386();
4227 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4228
4229 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4230 * any way. AMD says it's "unused", whatever that means. We're
4231 * ignoring for now. */
4232 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4233 {
4234 /* register target */
4235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4236 IEM_MC_BEGIN(0, 0);
4237 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4238 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4239 } IEM_MC_ELSE() {
4240 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4241 } IEM_MC_ENDIF();
4242 IEM_MC_ADVANCE_RIP();
4243 IEM_MC_END();
4244 }
4245 else
4246 {
4247 /* memory target */
4248 IEM_MC_BEGIN(0, 1);
4249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4252 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4253 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4254 } IEM_MC_ELSE() {
4255 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4256 } IEM_MC_ENDIF();
4257 IEM_MC_ADVANCE_RIP();
4258 IEM_MC_END();
4259 }
4260 return VINF_SUCCESS;
4261}
4262
4263
4264/** Opcode 0x0f 0x94. */
4265FNIEMOP_DEF(iemOp_sete_Eb)
4266{
4267 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4268 IEMOP_HLP_MIN_386();
4269 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4270
4271 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4272 * any way. AMD says it's "unused", whatever that means. We're
4273 * ignoring for now. */
4274 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4275 {
4276 /* register target */
4277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4278 IEM_MC_BEGIN(0, 0);
4279 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4280 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4281 } IEM_MC_ELSE() {
4282 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4283 } IEM_MC_ENDIF();
4284 IEM_MC_ADVANCE_RIP();
4285 IEM_MC_END();
4286 }
4287 else
4288 {
4289 /* memory target */
4290 IEM_MC_BEGIN(0, 1);
4291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4294 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4295 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4296 } IEM_MC_ELSE() {
4297 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4298 } IEM_MC_ENDIF();
4299 IEM_MC_ADVANCE_RIP();
4300 IEM_MC_END();
4301 }
4302 return VINF_SUCCESS;
4303}
4304
4305
4306/** Opcode 0x0f 0x95. */
4307FNIEMOP_DEF(iemOp_setne_Eb)
4308{
4309 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4310 IEMOP_HLP_MIN_386();
4311 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4312
4313 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4314 * any way. AMD says it's "unused", whatever that means. We're
4315 * ignoring for now. */
4316 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4317 {
4318 /* register target */
4319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4320 IEM_MC_BEGIN(0, 0);
4321 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4322 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4323 } IEM_MC_ELSE() {
4324 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4325 } IEM_MC_ENDIF();
4326 IEM_MC_ADVANCE_RIP();
4327 IEM_MC_END();
4328 }
4329 else
4330 {
4331 /* memory target */
4332 IEM_MC_BEGIN(0, 1);
4333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4336 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4337 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4338 } IEM_MC_ELSE() {
4339 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4340 } IEM_MC_ENDIF();
4341 IEM_MC_ADVANCE_RIP();
4342 IEM_MC_END();
4343 }
4344 return VINF_SUCCESS;
4345}
4346
4347
4348/** Opcode 0x0f 0x96. */
4349FNIEMOP_DEF(iemOp_setbe_Eb)
4350{
4351 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4352 IEMOP_HLP_MIN_386();
4353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4354
4355 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4356 * any way. AMD says it's "unused", whatever that means. We're
4357 * ignoring for now. */
4358 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4359 {
4360 /* register target */
4361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4362 IEM_MC_BEGIN(0, 0);
4363 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4364 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4365 } IEM_MC_ELSE() {
4366 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4367 } IEM_MC_ENDIF();
4368 IEM_MC_ADVANCE_RIP();
4369 IEM_MC_END();
4370 }
4371 else
4372 {
4373 /* memory target */
4374 IEM_MC_BEGIN(0, 1);
4375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4378 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4379 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4380 } IEM_MC_ELSE() {
4381 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4382 } IEM_MC_ENDIF();
4383 IEM_MC_ADVANCE_RIP();
4384 IEM_MC_END();
4385 }
4386 return VINF_SUCCESS;
4387}
4388
4389
4390/** Opcode 0x0f 0x97. */
4391FNIEMOP_DEF(iemOp_setnbe_Eb)
4392{
4393 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4394 IEMOP_HLP_MIN_386();
4395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4396
4397 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4398 * any way. AMD says it's "unused", whatever that means. We're
4399 * ignoring for now. */
4400 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4401 {
4402 /* register target */
4403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4404 IEM_MC_BEGIN(0, 0);
4405 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4406 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4407 } IEM_MC_ELSE() {
4408 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4409 } IEM_MC_ENDIF();
4410 IEM_MC_ADVANCE_RIP();
4411 IEM_MC_END();
4412 }
4413 else
4414 {
4415 /* memory target */
4416 IEM_MC_BEGIN(0, 1);
4417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4420 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4421 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4422 } IEM_MC_ELSE() {
4423 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4424 } IEM_MC_ENDIF();
4425 IEM_MC_ADVANCE_RIP();
4426 IEM_MC_END();
4427 }
4428 return VINF_SUCCESS;
4429}
4430
4431
4432/** Opcode 0x0f 0x98. */
4433FNIEMOP_DEF(iemOp_sets_Eb)
4434{
4435 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4436 IEMOP_HLP_MIN_386();
4437 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4438
4439 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4440 * any way. AMD says it's "unused", whatever that means. We're
4441 * ignoring for now. */
4442 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4443 {
4444 /* register target */
4445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4446 IEM_MC_BEGIN(0, 0);
4447 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4448 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4449 } IEM_MC_ELSE() {
4450 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4451 } IEM_MC_ENDIF();
4452 IEM_MC_ADVANCE_RIP();
4453 IEM_MC_END();
4454 }
4455 else
4456 {
4457 /* memory target */
4458 IEM_MC_BEGIN(0, 1);
4459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4462 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4463 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4464 } IEM_MC_ELSE() {
4465 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4466 } IEM_MC_ENDIF();
4467 IEM_MC_ADVANCE_RIP();
4468 IEM_MC_END();
4469 }
4470 return VINF_SUCCESS;
4471}
4472
4473
4474/** Opcode 0x0f 0x99. */
4475FNIEMOP_DEF(iemOp_setns_Eb)
4476{
4477 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4478 IEMOP_HLP_MIN_386();
4479 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4480
4481 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4482 * any way. AMD says it's "unused", whatever that means. We're
4483 * ignoring for now. */
4484 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4485 {
4486 /* register target */
4487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4488 IEM_MC_BEGIN(0, 0);
4489 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4490 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4491 } IEM_MC_ELSE() {
4492 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4493 } IEM_MC_ENDIF();
4494 IEM_MC_ADVANCE_RIP();
4495 IEM_MC_END();
4496 }
4497 else
4498 {
4499 /* memory target */
4500 IEM_MC_BEGIN(0, 1);
4501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4504 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4505 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4506 } IEM_MC_ELSE() {
4507 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4508 } IEM_MC_ENDIF();
4509 IEM_MC_ADVANCE_RIP();
4510 IEM_MC_END();
4511 }
4512 return VINF_SUCCESS;
4513}
4514
4515
4516/** Opcode 0x0f 0x9a. */
4517FNIEMOP_DEF(iemOp_setp_Eb)
4518{
4519 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4520 IEMOP_HLP_MIN_386();
4521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4522
4523 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4524 * any way. AMD says it's "unused", whatever that means. We're
4525 * ignoring for now. */
4526 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4527 {
4528 /* register target */
4529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4530 IEM_MC_BEGIN(0, 0);
4531 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4532 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4533 } IEM_MC_ELSE() {
4534 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4535 } IEM_MC_ENDIF();
4536 IEM_MC_ADVANCE_RIP();
4537 IEM_MC_END();
4538 }
4539 else
4540 {
4541 /* memory target */
4542 IEM_MC_BEGIN(0, 1);
4543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4546 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4547 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4548 } IEM_MC_ELSE() {
4549 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4550 } IEM_MC_ENDIF();
4551 IEM_MC_ADVANCE_RIP();
4552 IEM_MC_END();
4553 }
4554 return VINF_SUCCESS;
4555}
4556
4557
4558/** Opcode 0x0f 0x9b. */
4559FNIEMOP_DEF(iemOp_setnp_Eb)
4560{
4561 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4562 IEMOP_HLP_MIN_386();
4563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4564
4565 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4566 * any way. AMD says it's "unused", whatever that means. We're
4567 * ignoring for now. */
4568 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4569 {
4570 /* register target */
4571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4572 IEM_MC_BEGIN(0, 0);
4573 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4574 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4575 } IEM_MC_ELSE() {
4576 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4577 } IEM_MC_ENDIF();
4578 IEM_MC_ADVANCE_RIP();
4579 IEM_MC_END();
4580 }
4581 else
4582 {
4583 /* memory target */
4584 IEM_MC_BEGIN(0, 1);
4585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4588 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4589 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4590 } IEM_MC_ELSE() {
4591 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4592 } IEM_MC_ENDIF();
4593 IEM_MC_ADVANCE_RIP();
4594 IEM_MC_END();
4595 }
4596 return VINF_SUCCESS;
4597}
4598
4599
4600/** Opcode 0x0f 0x9c. */
4601FNIEMOP_DEF(iemOp_setl_Eb)
4602{
4603 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4604 IEMOP_HLP_MIN_386();
4605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4606
4607 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4608 * any way. AMD says it's "unused", whatever that means. We're
4609 * ignoring for now. */
4610 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4611 {
4612 /* register target */
4613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4614 IEM_MC_BEGIN(0, 0);
4615 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4616 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4617 } IEM_MC_ELSE() {
4618 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4619 } IEM_MC_ENDIF();
4620 IEM_MC_ADVANCE_RIP();
4621 IEM_MC_END();
4622 }
4623 else
4624 {
4625 /* memory target */
4626 IEM_MC_BEGIN(0, 1);
4627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4630 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4631 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4632 } IEM_MC_ELSE() {
4633 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4634 } IEM_MC_ENDIF();
4635 IEM_MC_ADVANCE_RIP();
4636 IEM_MC_END();
4637 }
4638 return VINF_SUCCESS;
4639}
4640
4641
4642/** Opcode 0x0f 0x9d. */
4643FNIEMOP_DEF(iemOp_setnl_Eb)
4644{
4645 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4646 IEMOP_HLP_MIN_386();
4647 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4648
4649 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4650 * any way. AMD says it's "unused", whatever that means. We're
4651 * ignoring for now. */
4652 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4653 {
4654 /* register target */
4655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4656 IEM_MC_BEGIN(0, 0);
4657 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4658 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4659 } IEM_MC_ELSE() {
4660 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4661 } IEM_MC_ENDIF();
4662 IEM_MC_ADVANCE_RIP();
4663 IEM_MC_END();
4664 }
4665 else
4666 {
4667 /* memory target */
4668 IEM_MC_BEGIN(0, 1);
4669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4672 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4673 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4674 } IEM_MC_ELSE() {
4675 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4676 } IEM_MC_ENDIF();
4677 IEM_MC_ADVANCE_RIP();
4678 IEM_MC_END();
4679 }
4680 return VINF_SUCCESS;
4681}
4682
4683
4684/** Opcode 0x0f 0x9e. */
4685FNIEMOP_DEF(iemOp_setle_Eb)
4686{
4687 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
4688 IEMOP_HLP_MIN_386();
4689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4690
4691 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4692 * any way. AMD says it's "unused", whatever that means. We're
4693 * ignoring for now. */
4694 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4695 {
4696 /* register target */
4697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4698 IEM_MC_BEGIN(0, 0);
4699 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4700 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4701 } IEM_MC_ELSE() {
4702 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4703 } IEM_MC_ENDIF();
4704 IEM_MC_ADVANCE_RIP();
4705 IEM_MC_END();
4706 }
4707 else
4708 {
4709 /* memory target */
4710 IEM_MC_BEGIN(0, 1);
4711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4714 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4715 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4716 } IEM_MC_ELSE() {
4717 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4718 } IEM_MC_ENDIF();
4719 IEM_MC_ADVANCE_RIP();
4720 IEM_MC_END();
4721 }
4722 return VINF_SUCCESS;
4723}
4724
4725
4726/** Opcode 0x0f 0x9f. */
4727FNIEMOP_DEF(iemOp_setnle_Eb)
4728{
4729 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
4730 IEMOP_HLP_MIN_386();
4731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4732
4733 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4734 * any way. AMD says it's "unused", whatever that means. We're
4735 * ignoring for now. */
4736 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4737 {
4738 /* register target */
4739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4740 IEM_MC_BEGIN(0, 0);
4741 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4742 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4743 } IEM_MC_ELSE() {
4744 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4745 } IEM_MC_ENDIF();
4746 IEM_MC_ADVANCE_RIP();
4747 IEM_MC_END();
4748 }
4749 else
4750 {
4751 /* memory target */
4752 IEM_MC_BEGIN(0, 1);
4753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4756 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4757 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4758 } IEM_MC_ELSE() {
4759 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4760 } IEM_MC_ENDIF();
4761 IEM_MC_ADVANCE_RIP();
4762 IEM_MC_END();
4763 }
4764 return VINF_SUCCESS;
4765}
4766
4767
4768/**
4769 * Common 'push segment-register' helper.
4770 */
4771FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4772{
4773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4774 if (iReg < X86_SREG_FS)
4775 IEMOP_HLP_NO_64BIT();
4776 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4777
4778 switch (pVCpu->iem.s.enmEffOpSize)
4779 {
4780 case IEMMODE_16BIT:
4781 IEM_MC_BEGIN(0, 1);
4782 IEM_MC_LOCAL(uint16_t, u16Value);
4783 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4784 IEM_MC_PUSH_U16(u16Value);
4785 IEM_MC_ADVANCE_RIP();
4786 IEM_MC_END();
4787 break;
4788
4789 case IEMMODE_32BIT:
4790 IEM_MC_BEGIN(0, 1);
4791 IEM_MC_LOCAL(uint32_t, u32Value);
4792 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4793 IEM_MC_PUSH_U32_SREG(u32Value);
4794 IEM_MC_ADVANCE_RIP();
4795 IEM_MC_END();
4796 break;
4797
4798 case IEMMODE_64BIT:
4799 IEM_MC_BEGIN(0, 1);
4800 IEM_MC_LOCAL(uint64_t, u64Value);
4801 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4802 IEM_MC_PUSH_U64(u64Value);
4803 IEM_MC_ADVANCE_RIP();
4804 IEM_MC_END();
4805 break;
4806 }
4807
4808 return VINF_SUCCESS;
4809}
4810
4811
4812/** Opcode 0x0f 0xa0. */
4813FNIEMOP_DEF(iemOp_push_fs)
4814{
4815 IEMOP_MNEMONIC(push_fs, "push fs");
4816 IEMOP_HLP_MIN_386();
4817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4818 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4819}
4820
4821
4822/** Opcode 0x0f 0xa1. */
4823FNIEMOP_DEF(iemOp_pop_fs)
4824{
4825 IEMOP_MNEMONIC(pop_fs, "pop fs");
4826 IEMOP_HLP_MIN_386();
4827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4828 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
4829}
4830
4831
4832/** Opcode 0x0f 0xa2. */
4833FNIEMOP_DEF(iemOp_cpuid)
4834{
4835 IEMOP_MNEMONIC(cpuid, "cpuid");
4836 IEMOP_HLP_MIN_486(); /* not all 486es. */
4837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4838 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4839}
4840
4841
4842/**
4843 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4844 * iemOp_bts_Ev_Gv.
4845 */
4846FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4847{
4848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4849 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4850
4851 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4852 {
4853 /* register destination. */
4854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4855 switch (pVCpu->iem.s.enmEffOpSize)
4856 {
4857 case IEMMODE_16BIT:
4858 IEM_MC_BEGIN(3, 0);
4859 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4860 IEM_MC_ARG(uint16_t, u16Src, 1);
4861 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4862
4863 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4864 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4865 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4866 IEM_MC_REF_EFLAGS(pEFlags);
4867 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4868
4869 IEM_MC_ADVANCE_RIP();
4870 IEM_MC_END();
4871 return VINF_SUCCESS;
4872
4873 case IEMMODE_32BIT:
4874 IEM_MC_BEGIN(3, 0);
4875 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4876 IEM_MC_ARG(uint32_t, u32Src, 1);
4877 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4878
4879 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4880 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4881 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4882 IEM_MC_REF_EFLAGS(pEFlags);
4883 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4884
4885 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4886 IEM_MC_ADVANCE_RIP();
4887 IEM_MC_END();
4888 return VINF_SUCCESS;
4889
4890 case IEMMODE_64BIT:
4891 IEM_MC_BEGIN(3, 0);
4892 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4893 IEM_MC_ARG(uint64_t, u64Src, 1);
4894 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4895
4896 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4897 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4898 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4899 IEM_MC_REF_EFLAGS(pEFlags);
4900 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4901
4902 IEM_MC_ADVANCE_RIP();
4903 IEM_MC_END();
4904 return VINF_SUCCESS;
4905
4906 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4907 }
4908 }
4909 else
4910 {
4911 /* memory destination. */
4912
4913 uint32_t fAccess;
4914 if (pImpl->pfnLockedU16)
4915 fAccess = IEM_ACCESS_DATA_RW;
4916 else /* BT */
4917 fAccess = IEM_ACCESS_DATA_R;
4918
4919 /** @todo test negative bit offsets! */
4920 switch (pVCpu->iem.s.enmEffOpSize)
4921 {
4922 case IEMMODE_16BIT:
4923 IEM_MC_BEGIN(3, 2);
4924 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4925 IEM_MC_ARG(uint16_t, u16Src, 1);
4926 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4927 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4928 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4929
4930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4931 if (pImpl->pfnLockedU16)
4932 IEMOP_HLP_DONE_DECODING();
4933 else
4934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4935 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4936 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4937 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4938 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4939 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
4940 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4941 IEM_MC_FETCH_EFLAGS(EFlags);
4942
4943 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4944 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4945 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4946 else
4947 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4948 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
4949
4950 IEM_MC_COMMIT_EFLAGS(EFlags);
4951 IEM_MC_ADVANCE_RIP();
4952 IEM_MC_END();
4953 return VINF_SUCCESS;
4954
4955 case IEMMODE_32BIT:
4956 IEM_MC_BEGIN(3, 2);
4957 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4958 IEM_MC_ARG(uint32_t, u32Src, 1);
4959 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4961 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4962
4963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4964 if (pImpl->pfnLockedU16)
4965 IEMOP_HLP_DONE_DECODING();
4966 else
4967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4968 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4969 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4970 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4971 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4972 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4973 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4974 IEM_MC_FETCH_EFLAGS(EFlags);
4975
4976 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4977 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4978 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4979 else
4980 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4981 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
4982
4983 IEM_MC_COMMIT_EFLAGS(EFlags);
4984 IEM_MC_ADVANCE_RIP();
4985 IEM_MC_END();
4986 return VINF_SUCCESS;
4987
4988 case IEMMODE_64BIT:
4989 IEM_MC_BEGIN(3, 2);
4990 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4991 IEM_MC_ARG(uint64_t, u64Src, 1);
4992 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4994 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4995
4996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4997 if (pImpl->pfnLockedU16)
4998 IEMOP_HLP_DONE_DECODING();
4999 else
5000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5001 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5002 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5003 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5004 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5005 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5006 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5007 IEM_MC_FETCH_EFLAGS(EFlags);
5008
5009 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5010 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5011 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5012 else
5013 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5014 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5015
5016 IEM_MC_COMMIT_EFLAGS(EFlags);
5017 IEM_MC_ADVANCE_RIP();
5018 IEM_MC_END();
5019 return VINF_SUCCESS;
5020
5021 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5022 }
5023 }
5024}
5025
5026
5027/** Opcode 0x0f 0xa3. */
5028FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5029{
5030 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5031 IEMOP_HLP_MIN_386();
5032 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5033}
5034
5035
5036/**
5037 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5038 */
5039FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5040{
5041 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5042 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5043
5044 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5045 {
5046 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5048
5049 switch (pVCpu->iem.s.enmEffOpSize)
5050 {
5051 case IEMMODE_16BIT:
5052 IEM_MC_BEGIN(4, 0);
5053 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5054 IEM_MC_ARG(uint16_t, u16Src, 1);
5055 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5056 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5057
5058 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5059 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5060 IEM_MC_REF_EFLAGS(pEFlags);
5061 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5062
5063 IEM_MC_ADVANCE_RIP();
5064 IEM_MC_END();
5065 return VINF_SUCCESS;
5066
5067 case IEMMODE_32BIT:
5068 IEM_MC_BEGIN(4, 0);
5069 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5070 IEM_MC_ARG(uint32_t, u32Src, 1);
5071 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5072 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5073
5074 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5075 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5076 IEM_MC_REF_EFLAGS(pEFlags);
5077 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5078
5079 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5080 IEM_MC_ADVANCE_RIP();
5081 IEM_MC_END();
5082 return VINF_SUCCESS;
5083
5084 case IEMMODE_64BIT:
5085 IEM_MC_BEGIN(4, 0);
5086 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5087 IEM_MC_ARG(uint64_t, u64Src, 1);
5088 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5089 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5090
5091 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5092 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5093 IEM_MC_REF_EFLAGS(pEFlags);
5094 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5095
5096 IEM_MC_ADVANCE_RIP();
5097 IEM_MC_END();
5098 return VINF_SUCCESS;
5099
5100 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5101 }
5102 }
5103 else
5104 {
5105 switch (pVCpu->iem.s.enmEffOpSize)
5106 {
5107 case IEMMODE_16BIT:
5108 IEM_MC_BEGIN(4, 2);
5109 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5110 IEM_MC_ARG(uint16_t, u16Src, 1);
5111 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5112 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5114
5115 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5116 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5117 IEM_MC_ASSIGN(cShiftArg, cShift);
5118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5119 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5120 IEM_MC_FETCH_EFLAGS(EFlags);
5121 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5122 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5123
5124 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5125 IEM_MC_COMMIT_EFLAGS(EFlags);
5126 IEM_MC_ADVANCE_RIP();
5127 IEM_MC_END();
5128 return VINF_SUCCESS;
5129
5130 case IEMMODE_32BIT:
5131 IEM_MC_BEGIN(4, 2);
5132 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5133 IEM_MC_ARG(uint32_t, u32Src, 1);
5134 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5135 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5137
5138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5139 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5140 IEM_MC_ASSIGN(cShiftArg, cShift);
5141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5142 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5143 IEM_MC_FETCH_EFLAGS(EFlags);
5144 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5145 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5146
5147 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5148 IEM_MC_COMMIT_EFLAGS(EFlags);
5149 IEM_MC_ADVANCE_RIP();
5150 IEM_MC_END();
5151 return VINF_SUCCESS;
5152
5153 case IEMMODE_64BIT:
5154 IEM_MC_BEGIN(4, 2);
5155 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5156 IEM_MC_ARG(uint64_t, u64Src, 1);
5157 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5158 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5160
5161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5162 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5163 IEM_MC_ASSIGN(cShiftArg, cShift);
5164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5165 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5166 IEM_MC_FETCH_EFLAGS(EFlags);
5167 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5168 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5169
5170 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5171 IEM_MC_COMMIT_EFLAGS(EFlags);
5172 IEM_MC_ADVANCE_RIP();
5173 IEM_MC_END();
5174 return VINF_SUCCESS;
5175
5176 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5177 }
5178 }
5179}
5180
5181
5182/**
5183 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5184 */
5185FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5186{
5187 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5188 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5189
5190 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5191 {
5192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5193
5194 switch (pVCpu->iem.s.enmEffOpSize)
5195 {
5196 case IEMMODE_16BIT:
5197 IEM_MC_BEGIN(4, 0);
5198 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5199 IEM_MC_ARG(uint16_t, u16Src, 1);
5200 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5201 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5202
5203 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5204 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5205 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5206 IEM_MC_REF_EFLAGS(pEFlags);
5207 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5208
5209 IEM_MC_ADVANCE_RIP();
5210 IEM_MC_END();
5211 return VINF_SUCCESS;
5212
5213 case IEMMODE_32BIT:
5214 IEM_MC_BEGIN(4, 0);
5215 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5216 IEM_MC_ARG(uint32_t, u32Src, 1);
5217 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5218 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5219
5220 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5221 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5222 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5223 IEM_MC_REF_EFLAGS(pEFlags);
5224 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5225
5226 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5227 IEM_MC_ADVANCE_RIP();
5228 IEM_MC_END();
5229 return VINF_SUCCESS;
5230
5231 case IEMMODE_64BIT:
5232 IEM_MC_BEGIN(4, 0);
5233 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5234 IEM_MC_ARG(uint64_t, u64Src, 1);
5235 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5236 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5237
5238 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5239 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5240 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5241 IEM_MC_REF_EFLAGS(pEFlags);
5242 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5243
5244 IEM_MC_ADVANCE_RIP();
5245 IEM_MC_END();
5246 return VINF_SUCCESS;
5247
5248 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5249 }
5250 }
5251 else
5252 {
5253 switch (pVCpu->iem.s.enmEffOpSize)
5254 {
5255 case IEMMODE_16BIT:
5256 IEM_MC_BEGIN(4, 2);
5257 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5258 IEM_MC_ARG(uint16_t, u16Src, 1);
5259 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5260 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5262
5263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5265 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5266 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5267 IEM_MC_FETCH_EFLAGS(EFlags);
5268 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5269 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5270
5271 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5272 IEM_MC_COMMIT_EFLAGS(EFlags);
5273 IEM_MC_ADVANCE_RIP();
5274 IEM_MC_END();
5275 return VINF_SUCCESS;
5276
5277 case IEMMODE_32BIT:
5278 IEM_MC_BEGIN(4, 2);
5279 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5280 IEM_MC_ARG(uint32_t, u32Src, 1);
5281 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5282 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5284
5285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5287 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5288 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5289 IEM_MC_FETCH_EFLAGS(EFlags);
5290 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5291 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5292
5293 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5294 IEM_MC_COMMIT_EFLAGS(EFlags);
5295 IEM_MC_ADVANCE_RIP();
5296 IEM_MC_END();
5297 return VINF_SUCCESS;
5298
5299 case IEMMODE_64BIT:
5300 IEM_MC_BEGIN(4, 2);
5301 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5302 IEM_MC_ARG(uint64_t, u64Src, 1);
5303 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5304 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5306
5307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5309 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5310 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5311 IEM_MC_FETCH_EFLAGS(EFlags);
5312 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5313 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5314
5315 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5316 IEM_MC_COMMIT_EFLAGS(EFlags);
5317 IEM_MC_ADVANCE_RIP();
5318 IEM_MC_END();
5319 return VINF_SUCCESS;
5320
5321 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5322 }
5323 }
5324}
5325
5326
5327
5328/** Opcode 0x0f 0xa4. */
5329FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5330{
5331 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5332 IEMOP_HLP_MIN_386();
5333 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5334}
5335
5336
5337/** Opcode 0x0f 0xa5. */
5338FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5339{
5340 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5341 IEMOP_HLP_MIN_386();
5342 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5343}
5344
5345
5346/** Opcode 0x0f 0xa8. */
5347FNIEMOP_DEF(iemOp_push_gs)
5348{
5349 IEMOP_MNEMONIC(push_gs, "push gs");
5350 IEMOP_HLP_MIN_386();
5351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5352 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5353}
5354
5355
5356/** Opcode 0x0f 0xa9. */
5357FNIEMOP_DEF(iemOp_pop_gs)
5358{
5359 IEMOP_MNEMONIC(pop_gs, "pop gs");
5360 IEMOP_HLP_MIN_386();
5361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5362 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5363}
5364
5365
5366/** Opcode 0x0f 0xaa. */
5367FNIEMOP_STUB(iemOp_rsm);
5368//IEMOP_HLP_MIN_386();
5369
5370
5371/** Opcode 0x0f 0xab. */
5372FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5373{
5374 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5375 IEMOP_HLP_MIN_386();
5376 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5377}
5378
5379
5380/** Opcode 0x0f 0xac. */
5381FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5382{
5383 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5384 IEMOP_HLP_MIN_386();
5385 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5386}
5387
5388
5389/** Opcode 0x0f 0xad. */
5390FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5391{
5392 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5393 IEMOP_HLP_MIN_386();
5394 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5395}
5396
5397
5398/** Opcode 0x0f 0xae mem/0. */
5399FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5400{
5401 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5402 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5403 return IEMOP_RAISE_INVALID_OPCODE();
5404
5405 IEM_MC_BEGIN(3, 1);
5406 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5407 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5408 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5411 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5412 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5413 IEM_MC_END();
5414 return VINF_SUCCESS;
5415}
5416
5417
5418/** Opcode 0x0f 0xae mem/1. */
5419FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5420{
5421 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5422 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5423 return IEMOP_RAISE_INVALID_OPCODE();
5424
5425 IEM_MC_BEGIN(3, 1);
5426 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5427 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5428 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5431 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5432 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5433 IEM_MC_END();
5434 return VINF_SUCCESS;
5435}
5436
5437
5438/** Opcode 0x0f 0xae mem/2. */
5439FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5440
5441/** Opcode 0x0f 0xae mem/3. */
5442FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5443
5444/** Opcode 0x0f 0xae mem/4. */
5445FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5446
5447/** Opcode 0x0f 0xae mem/5. */
5448FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5449
5450/** Opcode 0x0f 0xae mem/6. */
5451FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5452
5453/** Opcode 0x0f 0xae mem/7. */
5454FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5455
5456
5457/** Opcode 0x0f 0xae 11b/5. */
5458FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5459{
5460 RT_NOREF_PV(bRm);
5461 IEMOP_MNEMONIC(lfence, "lfence");
5462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5463 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5464 return IEMOP_RAISE_INVALID_OPCODE();
5465
5466 IEM_MC_BEGIN(0, 0);
5467 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5468 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5469 else
5470 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5471 IEM_MC_ADVANCE_RIP();
5472 IEM_MC_END();
5473 return VINF_SUCCESS;
5474}
5475
5476
5477/** Opcode 0x0f 0xae 11b/6. */
5478FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5479{
5480 RT_NOREF_PV(bRm);
5481 IEMOP_MNEMONIC(mfence, "mfence");
5482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5483 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5484 return IEMOP_RAISE_INVALID_OPCODE();
5485
5486 IEM_MC_BEGIN(0, 0);
5487 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5488 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5489 else
5490 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5491 IEM_MC_ADVANCE_RIP();
5492 IEM_MC_END();
5493 return VINF_SUCCESS;
5494}
5495
5496
5497/** Opcode 0x0f 0xae 11b/7. */
5498FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5499{
5500 RT_NOREF_PV(bRm);
5501 IEMOP_MNEMONIC(sfence, "sfence");
5502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5503 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5504 return IEMOP_RAISE_INVALID_OPCODE();
5505
5506 IEM_MC_BEGIN(0, 0);
5507 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5508 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5509 else
5510 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5511 IEM_MC_ADVANCE_RIP();
5512 IEM_MC_END();
5513 return VINF_SUCCESS;
5514}
5515
5516
5517/** Opcode 0xf3 0x0f 0xae 11b/0. */
5518FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5519
5520/** Opcode 0xf3 0x0f 0xae 11b/1. */
5521FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5522
5523/** Opcode 0xf3 0x0f 0xae 11b/2. */
5524FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5525
5526/** Opcode 0xf3 0x0f 0xae 11b/3. */
5527FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5528
5529
5530/** Opcode 0x0f 0xae. */
5531FNIEMOP_DEF(iemOp_Grp15)
5532{
5533 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5535 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5536 {
5537 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5538 {
5539 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5540 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5541 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5542 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5543 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5544 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5545 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5546 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5547 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5548 }
5549 }
5550 else
5551 {
5552 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5553 {
5554 case 0:
5555 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5556 {
5557 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5558 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5559 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5560 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5561 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5562 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5563 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5564 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5565 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5566 }
5567 break;
5568
5569 case IEM_OP_PRF_REPZ:
5570 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5571 {
5572 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5573 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5574 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5575 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5576 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5577 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5578 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5579 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5580 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5581 }
5582 break;
5583
5584 default:
5585 return IEMOP_RAISE_INVALID_OPCODE();
5586 }
5587 }
5588}
5589
5590
5591/** Opcode 0x0f 0xaf. */
5592FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5593{
5594 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5595 IEMOP_HLP_MIN_386();
5596 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5597 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5598}
5599
5600
5601/** Opcode 0x0f 0xb0. */
5602FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5603{
5604 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5605 IEMOP_HLP_MIN_486();
5606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5607
5608 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5609 {
5610 IEMOP_HLP_DONE_DECODING();
5611 IEM_MC_BEGIN(4, 0);
5612 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5613 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5614 IEM_MC_ARG(uint8_t, u8Src, 2);
5615 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5616
5617 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5618 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5619 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5620 IEM_MC_REF_EFLAGS(pEFlags);
5621 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5622 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5623 else
5624 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5625
5626 IEM_MC_ADVANCE_RIP();
5627 IEM_MC_END();
5628 }
5629 else
5630 {
5631 IEM_MC_BEGIN(4, 3);
5632 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5633 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5634 IEM_MC_ARG(uint8_t, u8Src, 2);
5635 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5636 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5637 IEM_MC_LOCAL(uint8_t, u8Al);
5638
5639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5640 IEMOP_HLP_DONE_DECODING();
5641 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5642 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5643 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5644 IEM_MC_FETCH_EFLAGS(EFlags);
5645 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5646 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5647 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5648 else
5649 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5650
5651 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5652 IEM_MC_COMMIT_EFLAGS(EFlags);
5653 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5654 IEM_MC_ADVANCE_RIP();
5655 IEM_MC_END();
5656 }
5657 return VINF_SUCCESS;
5658}
5659
5660/** Opcode 0x0f 0xb1. */
5661FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5662{
5663 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
5664 IEMOP_HLP_MIN_486();
5665 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5666
5667 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5668 {
5669 IEMOP_HLP_DONE_DECODING();
5670 switch (pVCpu->iem.s.enmEffOpSize)
5671 {
5672 case IEMMODE_16BIT:
5673 IEM_MC_BEGIN(4, 0);
5674 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5675 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5676 IEM_MC_ARG(uint16_t, u16Src, 2);
5677 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5678
5679 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5680 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5681 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5682 IEM_MC_REF_EFLAGS(pEFlags);
5683 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5684 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5685 else
5686 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5687
5688 IEM_MC_ADVANCE_RIP();
5689 IEM_MC_END();
5690 return VINF_SUCCESS;
5691
5692 case IEMMODE_32BIT:
5693 IEM_MC_BEGIN(4, 0);
5694 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5695 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5696 IEM_MC_ARG(uint32_t, u32Src, 2);
5697 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5698
5699 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5700 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5701 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5702 IEM_MC_REF_EFLAGS(pEFlags);
5703 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5704 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5705 else
5706 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5707
5708 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5709 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5710 IEM_MC_ADVANCE_RIP();
5711 IEM_MC_END();
5712 return VINF_SUCCESS;
5713
5714 case IEMMODE_64BIT:
5715 IEM_MC_BEGIN(4, 0);
5716 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5717 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5718#ifdef RT_ARCH_X86
5719 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5720#else
5721 IEM_MC_ARG(uint64_t, u64Src, 2);
5722#endif
5723 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5724
5725 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5726 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5727 IEM_MC_REF_EFLAGS(pEFlags);
5728#ifdef RT_ARCH_X86
5729 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5730 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5731 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5732 else
5733 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5734#else
5735 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5736 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5737 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5738 else
5739 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5740#endif
5741
5742 IEM_MC_ADVANCE_RIP();
5743 IEM_MC_END();
5744 return VINF_SUCCESS;
5745
5746 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5747 }
5748 }
5749 else
5750 {
5751 switch (pVCpu->iem.s.enmEffOpSize)
5752 {
5753 case IEMMODE_16BIT:
5754 IEM_MC_BEGIN(4, 3);
5755 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5756 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5757 IEM_MC_ARG(uint16_t, u16Src, 2);
5758 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5760 IEM_MC_LOCAL(uint16_t, u16Ax);
5761
5762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5763 IEMOP_HLP_DONE_DECODING();
5764 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5765 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5766 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5767 IEM_MC_FETCH_EFLAGS(EFlags);
5768 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5769 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5770 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5771 else
5772 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5773
5774 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5775 IEM_MC_COMMIT_EFLAGS(EFlags);
5776 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5777 IEM_MC_ADVANCE_RIP();
5778 IEM_MC_END();
5779 return VINF_SUCCESS;
5780
5781 case IEMMODE_32BIT:
5782 IEM_MC_BEGIN(4, 3);
5783 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5784 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5785 IEM_MC_ARG(uint32_t, u32Src, 2);
5786 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5788 IEM_MC_LOCAL(uint32_t, u32Eax);
5789
5790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5791 IEMOP_HLP_DONE_DECODING();
5792 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5793 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5794 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5795 IEM_MC_FETCH_EFLAGS(EFlags);
5796 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5797 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5798 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5799 else
5800 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5801
5802 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5803 IEM_MC_COMMIT_EFLAGS(EFlags);
5804 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5805 IEM_MC_ADVANCE_RIP();
5806 IEM_MC_END();
5807 return VINF_SUCCESS;
5808
5809 case IEMMODE_64BIT:
5810 IEM_MC_BEGIN(4, 3);
5811 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5812 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5813#ifdef RT_ARCH_X86
5814 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5815#else
5816 IEM_MC_ARG(uint64_t, u64Src, 2);
5817#endif
5818 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5820 IEM_MC_LOCAL(uint64_t, u64Rax);
5821
5822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5823 IEMOP_HLP_DONE_DECODING();
5824 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5825 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5826 IEM_MC_FETCH_EFLAGS(EFlags);
5827 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5828#ifdef RT_ARCH_X86
5829 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5830 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5831 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5832 else
5833 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5834#else
5835 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5836 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5837 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5838 else
5839 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5840#endif
5841
5842 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5843 IEM_MC_COMMIT_EFLAGS(EFlags);
5844 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5845 IEM_MC_ADVANCE_RIP();
5846 IEM_MC_END();
5847 return VINF_SUCCESS;
5848
5849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5850 }
5851 }
5852}
5853
5854
5855FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5856{
5857 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5858 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
5859
5860 switch (pVCpu->iem.s.enmEffOpSize)
5861 {
5862 case IEMMODE_16BIT:
5863 IEM_MC_BEGIN(5, 1);
5864 IEM_MC_ARG(uint16_t, uSel, 0);
5865 IEM_MC_ARG(uint16_t, offSeg, 1);
5866 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5867 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5868 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5869 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5872 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5873 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
5874 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5875 IEM_MC_END();
5876 return VINF_SUCCESS;
5877
5878 case IEMMODE_32BIT:
5879 IEM_MC_BEGIN(5, 1);
5880 IEM_MC_ARG(uint16_t, uSel, 0);
5881 IEM_MC_ARG(uint32_t, offSeg, 1);
5882 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5883 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5884 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5885 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5888 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5889 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
5890 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5891 IEM_MC_END();
5892 return VINF_SUCCESS;
5893
5894 case IEMMODE_64BIT:
5895 IEM_MC_BEGIN(5, 1);
5896 IEM_MC_ARG(uint16_t, uSel, 0);
5897 IEM_MC_ARG(uint64_t, offSeg, 1);
5898 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5899 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5900 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5901 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5904 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5905 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5906 else
5907 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5908 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
5909 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5910 IEM_MC_END();
5911 return VINF_SUCCESS;
5912
5913 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5914 }
5915}
5916
5917
5918/** Opcode 0x0f 0xb2. */
5919FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5920{
5921 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
5922 IEMOP_HLP_MIN_386();
5923 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5924 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5925 return IEMOP_RAISE_INVALID_OPCODE();
5926 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5927}
5928
5929
5930/** Opcode 0x0f 0xb3. */
5931FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5932{
5933 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
5934 IEMOP_HLP_MIN_386();
5935 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5936}
5937
5938
5939/** Opcode 0x0f 0xb4. */
5940FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5941{
5942 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
5943 IEMOP_HLP_MIN_386();
5944 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5945 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5946 return IEMOP_RAISE_INVALID_OPCODE();
5947 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5948}
5949
5950
5951/** Opcode 0x0f 0xb5. */
5952FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5953{
5954 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
5955 IEMOP_HLP_MIN_386();
5956 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5957 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5958 return IEMOP_RAISE_INVALID_OPCODE();
5959 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5960}
5961
5962
5963/** Opcode 0x0f 0xb6. */
5964FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5965{
5966 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
5967 IEMOP_HLP_MIN_386();
5968
5969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5970
5971 /*
5972 * If rm is denoting a register, no more instruction bytes.
5973 */
5974 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5975 {
5976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5977 switch (pVCpu->iem.s.enmEffOpSize)
5978 {
5979 case IEMMODE_16BIT:
5980 IEM_MC_BEGIN(0, 1);
5981 IEM_MC_LOCAL(uint16_t, u16Value);
5982 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5983 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
5984 IEM_MC_ADVANCE_RIP();
5985 IEM_MC_END();
5986 return VINF_SUCCESS;
5987
5988 case IEMMODE_32BIT:
5989 IEM_MC_BEGIN(0, 1);
5990 IEM_MC_LOCAL(uint32_t, u32Value);
5991 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5992 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
5993 IEM_MC_ADVANCE_RIP();
5994 IEM_MC_END();
5995 return VINF_SUCCESS;
5996
5997 case IEMMODE_64BIT:
5998 IEM_MC_BEGIN(0, 1);
5999 IEM_MC_LOCAL(uint64_t, u64Value);
6000 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6001 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6002 IEM_MC_ADVANCE_RIP();
6003 IEM_MC_END();
6004 return VINF_SUCCESS;
6005
6006 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6007 }
6008 }
6009 else
6010 {
6011 /*
6012 * We're loading a register from memory.
6013 */
6014 switch (pVCpu->iem.s.enmEffOpSize)
6015 {
6016 case IEMMODE_16BIT:
6017 IEM_MC_BEGIN(0, 2);
6018 IEM_MC_LOCAL(uint16_t, u16Value);
6019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6022 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6023 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6024 IEM_MC_ADVANCE_RIP();
6025 IEM_MC_END();
6026 return VINF_SUCCESS;
6027
6028 case IEMMODE_32BIT:
6029 IEM_MC_BEGIN(0, 2);
6030 IEM_MC_LOCAL(uint32_t, u32Value);
6031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6034 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6035 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6036 IEM_MC_ADVANCE_RIP();
6037 IEM_MC_END();
6038 return VINF_SUCCESS;
6039
6040 case IEMMODE_64BIT:
6041 IEM_MC_BEGIN(0, 2);
6042 IEM_MC_LOCAL(uint64_t, u64Value);
6043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6046 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6047 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6048 IEM_MC_ADVANCE_RIP();
6049 IEM_MC_END();
6050 return VINF_SUCCESS;
6051
6052 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6053 }
6054 }
6055}
6056
6057
6058/** Opcode 0x0f 0xb7. */
6059FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6060{
6061 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6062 IEMOP_HLP_MIN_386();
6063
6064 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6065
6066 /** @todo Not entirely sure how the operand size prefix is handled here,
6067 * assuming that it will be ignored. Would be nice to have a few
6068 * test for this. */
6069 /*
6070 * If rm is denoting a register, no more instruction bytes.
6071 */
6072 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6073 {
6074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6075 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6076 {
6077 IEM_MC_BEGIN(0, 1);
6078 IEM_MC_LOCAL(uint32_t, u32Value);
6079 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6080 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6081 IEM_MC_ADVANCE_RIP();
6082 IEM_MC_END();
6083 }
6084 else
6085 {
6086 IEM_MC_BEGIN(0, 1);
6087 IEM_MC_LOCAL(uint64_t, u64Value);
6088 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6089 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6090 IEM_MC_ADVANCE_RIP();
6091 IEM_MC_END();
6092 }
6093 }
6094 else
6095 {
6096 /*
6097 * We're loading a register from memory.
6098 */
6099 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6100 {
6101 IEM_MC_BEGIN(0, 2);
6102 IEM_MC_LOCAL(uint32_t, u32Value);
6103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6104 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6106 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6107 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6108 IEM_MC_ADVANCE_RIP();
6109 IEM_MC_END();
6110 }
6111 else
6112 {
6113 IEM_MC_BEGIN(0, 2);
6114 IEM_MC_LOCAL(uint64_t, u64Value);
6115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6118 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6119 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6120 IEM_MC_ADVANCE_RIP();
6121 IEM_MC_END();
6122 }
6123 }
6124 return VINF_SUCCESS;
6125}
6126
6127
6128/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6129FNIEMOP_UD_STUB(iemOp_jmpe);
6130/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6131FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6132
6133
6134/** Opcode 0x0f 0xb9. */
6135FNIEMOP_DEF(iemOp_Grp10)
6136{
6137 Log(("iemOp_Grp10 -> #UD\n"));
6138 return IEMOP_RAISE_INVALID_OPCODE();
6139}
6140
6141
6142/** Opcode 0x0f 0xba. */
6143FNIEMOP_DEF(iemOp_Grp8)
6144{
6145 IEMOP_HLP_MIN_386();
6146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6147 PCIEMOPBINSIZES pImpl;
6148 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6149 {
6150 case 0: case 1: case 2: case 3:
6151 return IEMOP_RAISE_INVALID_OPCODE();
6152 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6153 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6154 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6155 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6156 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6157 }
6158 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6159
6160 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6161 {
6162 /* register destination. */
6163 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6165
6166 switch (pVCpu->iem.s.enmEffOpSize)
6167 {
6168 case IEMMODE_16BIT:
6169 IEM_MC_BEGIN(3, 0);
6170 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6171 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6172 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6173
6174 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6175 IEM_MC_REF_EFLAGS(pEFlags);
6176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6177
6178 IEM_MC_ADVANCE_RIP();
6179 IEM_MC_END();
6180 return VINF_SUCCESS;
6181
6182 case IEMMODE_32BIT:
6183 IEM_MC_BEGIN(3, 0);
6184 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6185 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6186 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6187
6188 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6189 IEM_MC_REF_EFLAGS(pEFlags);
6190 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6191
6192 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6193 IEM_MC_ADVANCE_RIP();
6194 IEM_MC_END();
6195 return VINF_SUCCESS;
6196
6197 case IEMMODE_64BIT:
6198 IEM_MC_BEGIN(3, 0);
6199 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6200 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6201 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6202
6203 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6204 IEM_MC_REF_EFLAGS(pEFlags);
6205 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6206
6207 IEM_MC_ADVANCE_RIP();
6208 IEM_MC_END();
6209 return VINF_SUCCESS;
6210
6211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6212 }
6213 }
6214 else
6215 {
6216 /* memory destination. */
6217
6218 uint32_t fAccess;
6219 if (pImpl->pfnLockedU16)
6220 fAccess = IEM_ACCESS_DATA_RW;
6221 else /* BT */
6222 fAccess = IEM_ACCESS_DATA_R;
6223
6224 /** @todo test negative bit offsets! */
6225 switch (pVCpu->iem.s.enmEffOpSize)
6226 {
6227 case IEMMODE_16BIT:
6228 IEM_MC_BEGIN(3, 1);
6229 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6230 IEM_MC_ARG(uint16_t, u16Src, 1);
6231 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6233
6234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6235 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6236 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6237 if (pImpl->pfnLockedU16)
6238 IEMOP_HLP_DONE_DECODING();
6239 else
6240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6241 IEM_MC_FETCH_EFLAGS(EFlags);
6242 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6243 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6244 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6245 else
6246 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6247 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6248
6249 IEM_MC_COMMIT_EFLAGS(EFlags);
6250 IEM_MC_ADVANCE_RIP();
6251 IEM_MC_END();
6252 return VINF_SUCCESS;
6253
6254 case IEMMODE_32BIT:
6255 IEM_MC_BEGIN(3, 1);
6256 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6257 IEM_MC_ARG(uint32_t, u32Src, 1);
6258 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6260
6261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6262 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6263 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6264 if (pImpl->pfnLockedU16)
6265 IEMOP_HLP_DONE_DECODING();
6266 else
6267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6268 IEM_MC_FETCH_EFLAGS(EFlags);
6269 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6270 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6271 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6272 else
6273 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6274 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6275
6276 IEM_MC_COMMIT_EFLAGS(EFlags);
6277 IEM_MC_ADVANCE_RIP();
6278 IEM_MC_END();
6279 return VINF_SUCCESS;
6280
6281 case IEMMODE_64BIT:
6282 IEM_MC_BEGIN(3, 1);
6283 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6284 IEM_MC_ARG(uint64_t, u64Src, 1);
6285 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6287
6288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6289 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6290 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6291 if (pImpl->pfnLockedU16)
6292 IEMOP_HLP_DONE_DECODING();
6293 else
6294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6295 IEM_MC_FETCH_EFLAGS(EFlags);
6296 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6297 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6298 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6299 else
6300 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6301 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6302
6303 IEM_MC_COMMIT_EFLAGS(EFlags);
6304 IEM_MC_ADVANCE_RIP();
6305 IEM_MC_END();
6306 return VINF_SUCCESS;
6307
6308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6309 }
6310 }
6311
6312}
6313
6314
6315/** Opcode 0x0f 0xbb. */
6316FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6317{
6318 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6319 IEMOP_HLP_MIN_386();
6320 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6321}
6322
6323
6324/** Opcode 0x0f 0xbc. */
6325FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6326{
6327 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6328 IEMOP_HLP_MIN_386();
6329 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6330 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6331}
6332
6333
6334/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6335FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6336
6337
6338/** Opcode 0x0f 0xbd. */
6339FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6340{
6341 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6342 IEMOP_HLP_MIN_386();
6343 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6344 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6345}
6346
6347
6348/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6349FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6350
6351
6352/** Opcode 0x0f 0xbe. */
6353FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6354{
6355 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6356 IEMOP_HLP_MIN_386();
6357
6358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6359
6360 /*
6361 * If rm is denoting a register, no more instruction bytes.
6362 */
6363 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6364 {
6365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6366 switch (pVCpu->iem.s.enmEffOpSize)
6367 {
6368 case IEMMODE_16BIT:
6369 IEM_MC_BEGIN(0, 1);
6370 IEM_MC_LOCAL(uint16_t, u16Value);
6371 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6372 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6373 IEM_MC_ADVANCE_RIP();
6374 IEM_MC_END();
6375 return VINF_SUCCESS;
6376
6377 case IEMMODE_32BIT:
6378 IEM_MC_BEGIN(0, 1);
6379 IEM_MC_LOCAL(uint32_t, u32Value);
6380 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6381 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6382 IEM_MC_ADVANCE_RIP();
6383 IEM_MC_END();
6384 return VINF_SUCCESS;
6385
6386 case IEMMODE_64BIT:
6387 IEM_MC_BEGIN(0, 1);
6388 IEM_MC_LOCAL(uint64_t, u64Value);
6389 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6390 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6391 IEM_MC_ADVANCE_RIP();
6392 IEM_MC_END();
6393 return VINF_SUCCESS;
6394
6395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6396 }
6397 }
6398 else
6399 {
6400 /*
6401 * We're loading a register from memory.
6402 */
6403 switch (pVCpu->iem.s.enmEffOpSize)
6404 {
6405 case IEMMODE_16BIT:
6406 IEM_MC_BEGIN(0, 2);
6407 IEM_MC_LOCAL(uint16_t, u16Value);
6408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6411 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6412 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6413 IEM_MC_ADVANCE_RIP();
6414 IEM_MC_END();
6415 return VINF_SUCCESS;
6416
6417 case IEMMODE_32BIT:
6418 IEM_MC_BEGIN(0, 2);
6419 IEM_MC_LOCAL(uint32_t, u32Value);
6420 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6423 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6424 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6425 IEM_MC_ADVANCE_RIP();
6426 IEM_MC_END();
6427 return VINF_SUCCESS;
6428
6429 case IEMMODE_64BIT:
6430 IEM_MC_BEGIN(0, 2);
6431 IEM_MC_LOCAL(uint64_t, u64Value);
6432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6435 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6436 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6437 IEM_MC_ADVANCE_RIP();
6438 IEM_MC_END();
6439 return VINF_SUCCESS;
6440
6441 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6442 }
6443 }
6444}
6445
6446
6447/** Opcode 0x0f 0xbf. */
6448FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6449{
6450 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6451 IEMOP_HLP_MIN_386();
6452
6453 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6454
6455 /** @todo Not entirely sure how the operand size prefix is handled here,
6456 * assuming that it will be ignored. Would be nice to have a few
6457 * test for this. */
6458 /*
6459 * If rm is denoting a register, no more instruction bytes.
6460 */
6461 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6462 {
6463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6464 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6465 {
6466 IEM_MC_BEGIN(0, 1);
6467 IEM_MC_LOCAL(uint32_t, u32Value);
6468 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6469 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6470 IEM_MC_ADVANCE_RIP();
6471 IEM_MC_END();
6472 }
6473 else
6474 {
6475 IEM_MC_BEGIN(0, 1);
6476 IEM_MC_LOCAL(uint64_t, u64Value);
6477 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6478 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6479 IEM_MC_ADVANCE_RIP();
6480 IEM_MC_END();
6481 }
6482 }
6483 else
6484 {
6485 /*
6486 * We're loading a register from memory.
6487 */
6488 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6489 {
6490 IEM_MC_BEGIN(0, 2);
6491 IEM_MC_LOCAL(uint32_t, u32Value);
6492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6495 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6496 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6497 IEM_MC_ADVANCE_RIP();
6498 IEM_MC_END();
6499 }
6500 else
6501 {
6502 IEM_MC_BEGIN(0, 2);
6503 IEM_MC_LOCAL(uint64_t, u64Value);
6504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6507 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6508 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6509 IEM_MC_ADVANCE_RIP();
6510 IEM_MC_END();
6511 }
6512 }
6513 return VINF_SUCCESS;
6514}
6515
6516
6517/** Opcode 0x0f 0xc0. */
6518FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6519{
6520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6521 IEMOP_HLP_MIN_486();
6522 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6523
6524 /*
6525 * If rm is denoting a register, no more instruction bytes.
6526 */
6527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6528 {
6529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6530
6531 IEM_MC_BEGIN(3, 0);
6532 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6533 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6534 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6535
6536 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6537 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6538 IEM_MC_REF_EFLAGS(pEFlags);
6539 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6540
6541 IEM_MC_ADVANCE_RIP();
6542 IEM_MC_END();
6543 }
6544 else
6545 {
6546 /*
6547 * We're accessing memory.
6548 */
6549 IEM_MC_BEGIN(3, 3);
6550 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6551 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6552 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6553 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6555
6556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6557 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6558 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6559 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6560 IEM_MC_FETCH_EFLAGS(EFlags);
6561 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6562 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6563 else
6564 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6565
6566 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6567 IEM_MC_COMMIT_EFLAGS(EFlags);
6568 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6569 IEM_MC_ADVANCE_RIP();
6570 IEM_MC_END();
6571 return VINF_SUCCESS;
6572 }
6573 return VINF_SUCCESS;
6574}
6575
6576
6577/** Opcode 0x0f 0xc1. */
6578FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6579{
6580 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6581 IEMOP_HLP_MIN_486();
6582 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6583
6584 /*
6585 * If rm is denoting a register, no more instruction bytes.
6586 */
6587 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6588 {
6589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6590
6591 switch (pVCpu->iem.s.enmEffOpSize)
6592 {
6593 case IEMMODE_16BIT:
6594 IEM_MC_BEGIN(3, 0);
6595 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6596 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6597 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6598
6599 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6600 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6601 IEM_MC_REF_EFLAGS(pEFlags);
6602 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6603
6604 IEM_MC_ADVANCE_RIP();
6605 IEM_MC_END();
6606 return VINF_SUCCESS;
6607
6608 case IEMMODE_32BIT:
6609 IEM_MC_BEGIN(3, 0);
6610 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6611 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6612 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6613
6614 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6615 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6616 IEM_MC_REF_EFLAGS(pEFlags);
6617 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6618
6619 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6620 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6621 IEM_MC_ADVANCE_RIP();
6622 IEM_MC_END();
6623 return VINF_SUCCESS;
6624
6625 case IEMMODE_64BIT:
6626 IEM_MC_BEGIN(3, 0);
6627 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6628 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6629 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6630
6631 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6632 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6633 IEM_MC_REF_EFLAGS(pEFlags);
6634 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6635
6636 IEM_MC_ADVANCE_RIP();
6637 IEM_MC_END();
6638 return VINF_SUCCESS;
6639
6640 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6641 }
6642 }
6643 else
6644 {
6645 /*
6646 * We're accessing memory.
6647 */
6648 switch (pVCpu->iem.s.enmEffOpSize)
6649 {
6650 case IEMMODE_16BIT:
6651 IEM_MC_BEGIN(3, 3);
6652 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6653 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6654 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6655 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6657
6658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6659 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6660 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6661 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6662 IEM_MC_FETCH_EFLAGS(EFlags);
6663 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6664 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6665 else
6666 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6667
6668 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6669 IEM_MC_COMMIT_EFLAGS(EFlags);
6670 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
6671 IEM_MC_ADVANCE_RIP();
6672 IEM_MC_END();
6673 return VINF_SUCCESS;
6674
6675 case IEMMODE_32BIT:
6676 IEM_MC_BEGIN(3, 3);
6677 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6678 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6679 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6680 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6682
6683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6684 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6685 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6686 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6687 IEM_MC_FETCH_EFLAGS(EFlags);
6688 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6689 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6690 else
6691 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6692
6693 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6694 IEM_MC_COMMIT_EFLAGS(EFlags);
6695 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
6696 IEM_MC_ADVANCE_RIP();
6697 IEM_MC_END();
6698 return VINF_SUCCESS;
6699
6700 case IEMMODE_64BIT:
6701 IEM_MC_BEGIN(3, 3);
6702 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6703 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6704 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6705 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6707
6708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6709 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6710 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6711 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6712 IEM_MC_FETCH_EFLAGS(EFlags);
6713 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6714 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6715 else
6716 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6717
6718 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6719 IEM_MC_COMMIT_EFLAGS(EFlags);
6720 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
6721 IEM_MC_ADVANCE_RIP();
6722 IEM_MC_END();
6723 return VINF_SUCCESS;
6724
6725 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6726 }
6727 }
6728}
6729
6730
6731/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
6732FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
6733/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
6734FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
6735/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
6736FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
6737/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
6738FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
6739
6740
6741/** Opcode 0x0f 0xc3. */
6742FNIEMOP_DEF(iemOp_movnti_My_Gy)
6743{
6744 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
6745
6746 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6747
6748 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6749 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6750 {
6751 switch (pVCpu->iem.s.enmEffOpSize)
6752 {
6753 case IEMMODE_32BIT:
6754 IEM_MC_BEGIN(0, 2);
6755 IEM_MC_LOCAL(uint32_t, u32Value);
6756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6757
6758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6760 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6761 return IEMOP_RAISE_INVALID_OPCODE();
6762
6763 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6764 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6765 IEM_MC_ADVANCE_RIP();
6766 IEM_MC_END();
6767 break;
6768
6769 case IEMMODE_64BIT:
6770 IEM_MC_BEGIN(0, 2);
6771 IEM_MC_LOCAL(uint64_t, u64Value);
6772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6773
6774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6776 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6777 return IEMOP_RAISE_INVALID_OPCODE();
6778
6779 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6780 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
6781 IEM_MC_ADVANCE_RIP();
6782 IEM_MC_END();
6783 break;
6784
6785 case IEMMODE_16BIT:
6786 /** @todo check this form. */
6787 return IEMOP_RAISE_INVALID_OPCODE();
6788 }
6789 }
6790 else
6791 return IEMOP_RAISE_INVALID_OPCODE();
6792 return VINF_SUCCESS;
6793}
6794/* Opcode 0x66 0x0f 0xc3 - invalid */
6795/* Opcode 0xf3 0x0f 0xc3 - invalid */
6796/* Opcode 0xf2 0x0f 0xc3 - invalid */
6797
6798/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
6799FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
6800/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
6801FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
6802/* Opcode 0xf3 0x0f 0xc4 - invalid */
6803/* Opcode 0xf2 0x0f 0xc4 - invalid */
6804
6805/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
6806FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
6807/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
6808FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
6809/* Opcode 0xf3 0x0f 0xc5 - invalid */
6810/* Opcode 0xf2 0x0f 0xc5 - invalid */
6811
6812/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
6813FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
6814/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
6815FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
6816/* Opcode 0xf3 0x0f 0xc6 - invalid */
6817/* Opcode 0xf2 0x0f 0xc6 - invalid */
6818
6819
6820/** Opcode 0x0f 0xc7 !11/1. */
6821FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6822{
6823 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
6824
6825 IEM_MC_BEGIN(4, 3);
6826 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6827 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6828 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6829 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6830 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6831 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6833
6834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6835 IEMOP_HLP_DONE_DECODING();
6836 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6837
6838 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6839 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6840 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6841
6842 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6843 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6844 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6845
6846 IEM_MC_FETCH_EFLAGS(EFlags);
6847 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6848 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6849 else
6850 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6851
6852 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6853 IEM_MC_COMMIT_EFLAGS(EFlags);
6854 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6855 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6856 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6857 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6858 IEM_MC_ENDIF();
6859 IEM_MC_ADVANCE_RIP();
6860
6861 IEM_MC_END();
6862 return VINF_SUCCESS;
6863}
6864
6865
6866/** Opcode REX.W 0x0f 0xc7 !11/1. */
6867FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
6868{
6869 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
6870 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6871 {
6872#if 0
6873 RT_NOREF(bRm);
6874 IEMOP_BITCH_ABOUT_STUB();
6875 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6876#else
6877 IEM_MC_BEGIN(4, 3);
6878 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
6879 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
6880 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
6881 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6882 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
6883 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
6884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6885
6886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6887 IEMOP_HLP_DONE_DECODING();
6888 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
6889 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6890
6891 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
6892 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
6893 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
6894
6895 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
6896 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
6897 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
6898
6899 IEM_MC_FETCH_EFLAGS(EFlags);
6900# ifdef RT_ARCH_AMD64
6901 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6902 {
6903 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6904 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6905 else
6906 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6907 }
6908 else
6909# endif
6910 {
6911 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
6912 accesses and not all all atomic, which works fine on in UNI CPU guest
6913 configuration (ignoring DMA). If guest SMP is active we have no choice
6914 but to use a rendezvous callback here. Sigh. */
6915 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
6916 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6917 else
6918 {
6919 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6920 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
6921 }
6922 }
6923
6924 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
6925 IEM_MC_COMMIT_EFLAGS(EFlags);
6926 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6927 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
6928 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
6929 IEM_MC_ENDIF();
6930 IEM_MC_ADVANCE_RIP();
6931
6932 IEM_MC_END();
6933 return VINF_SUCCESS;
6934#endif
6935 }
6936 Log(("cmpxchg16b -> #UD\n"));
6937 return IEMOP_RAISE_INVALID_OPCODE();
6938}
6939
6940
6941/** Opcode 0x0f 0xc7 11/6. */
6942FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6943
6944/** Opcode 0x0f 0xc7 !11/6. */
6945FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6946
6947/** Opcode 0x66 0x0f 0xc7 !11/6. */
6948FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6949
6950/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6951FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6952
6953/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6954FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6955
6956
6957/** Opcode 0x0f 0xc7. */
6958FNIEMOP_DEF(iemOp_Grp9)
6959{
6960 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6961 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6962 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6963 {
6964 case 0: case 2: case 3: case 4: case 5:
6965 return IEMOP_RAISE_INVALID_OPCODE();
6966 case 1:
6967 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6968 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6969 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6970 return IEMOP_RAISE_INVALID_OPCODE();
6971 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6972 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6973 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6974 case 6:
6975 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6976 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6977 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6978 {
6979 case 0:
6980 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6981 case IEM_OP_PRF_SIZE_OP:
6982 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6983 case IEM_OP_PRF_REPZ:
6984 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6985 default:
6986 return IEMOP_RAISE_INVALID_OPCODE();
6987 }
6988 case 7:
6989 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6990 {
6991 case 0:
6992 case IEM_OP_PRF_REPZ:
6993 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6994 default:
6995 return IEMOP_RAISE_INVALID_OPCODE();
6996 }
6997 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6998 }
6999}
7000
7001
7002/**
7003 * Common 'bswap register' helper.
7004 */
7005FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7006{
7007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7008 switch (pVCpu->iem.s.enmEffOpSize)
7009 {
7010 case IEMMODE_16BIT:
7011 IEM_MC_BEGIN(1, 0);
7012 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7013 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7014 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7015 IEM_MC_ADVANCE_RIP();
7016 IEM_MC_END();
7017 return VINF_SUCCESS;
7018
7019 case IEMMODE_32BIT:
7020 IEM_MC_BEGIN(1, 0);
7021 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7022 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7023 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7024 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7025 IEM_MC_ADVANCE_RIP();
7026 IEM_MC_END();
7027 return VINF_SUCCESS;
7028
7029 case IEMMODE_64BIT:
7030 IEM_MC_BEGIN(1, 0);
7031 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7032 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7033 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7034 IEM_MC_ADVANCE_RIP();
7035 IEM_MC_END();
7036 return VINF_SUCCESS;
7037
7038 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7039 }
7040}
7041
7042
7043/** Opcode 0x0f 0xc8. */
7044FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7045{
7046 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7047 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7048 prefix. REX.B is the correct prefix it appears. For a parallel
7049 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7050 IEMOP_HLP_MIN_486();
7051 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7052}
7053
7054
7055/** Opcode 0x0f 0xc9. */
7056FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7057{
7058 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7059 IEMOP_HLP_MIN_486();
7060 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7061}
7062
7063
7064/** Opcode 0x0f 0xca. */
7065FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7066{
7067 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7068 IEMOP_HLP_MIN_486();
7069 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7070}
7071
7072
7073/** Opcode 0x0f 0xcb. */
7074FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7075{
7076 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7077 IEMOP_HLP_MIN_486();
7078 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7079}
7080
7081
7082/** Opcode 0x0f 0xcc. */
7083FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7084{
7085 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7086 IEMOP_HLP_MIN_486();
7087 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7088}
7089
7090
7091/** Opcode 0x0f 0xcd. */
7092FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7093{
7094 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7095 IEMOP_HLP_MIN_486();
7096 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7097}
7098
7099
7100/** Opcode 0x0f 0xce. */
7101FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7102{
7103 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7104 IEMOP_HLP_MIN_486();
7105 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7106}
7107
7108
7109/** Opcode 0x0f 0xcf. */
7110FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7111{
7112 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7113 IEMOP_HLP_MIN_486();
7114 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7115}
7116
7117
7118/* Opcode 0x0f 0xd0 - invalid */
7119/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7120FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7121/* Opcode 0xf3 0x0f 0xd0 - invalid */
7122/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7123FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7124
7125/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7126FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7127/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7128FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7129/* Opcode 0xf3 0x0f 0xd1 - invalid */
7130/* Opcode 0xf2 0x0f 0xd1 - invalid */
7131
7132/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7133FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7134/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7135FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7136/* Opcode 0xf3 0x0f 0xd2 - invalid */
7137/* Opcode 0xf2 0x0f 0xd2 - invalid */
7138
7139/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7140FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7141/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7142FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7143/* Opcode 0xf3 0x0f 0xd3 - invalid */
7144/* Opcode 0xf2 0x0f 0xd3 - invalid */
7145
7146/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7147FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7148/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7149FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7150/* Opcode 0xf3 0x0f 0xd4 - invalid */
7151/* Opcode 0xf2 0x0f 0xd4 - invalid */
7152
7153/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7154FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7155/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7156FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7157/* Opcode 0xf3 0x0f 0xd5 - invalid */
7158/* Opcode 0xf2 0x0f 0xd5 - invalid */
7159
7160/* Opcode 0x0f 0xd6 - invalid */
7161/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7162FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7163/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7164FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7165/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7166FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7167#if 0
7168FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7169{
7170 /* Docs says register only. */
7171 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7172
7173 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7174 {
7175 case IEM_OP_PRF_SIZE_OP: /* SSE */
7176 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7177 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7178 IEM_MC_BEGIN(2, 0);
7179 IEM_MC_ARG(uint64_t *, pDst, 0);
7180 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7181 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7182 IEM_MC_PREPARE_SSE_USAGE();
7183 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7184 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7185 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7186 IEM_MC_ADVANCE_RIP();
7187 IEM_MC_END();
7188 return VINF_SUCCESS;
7189
7190 case 0: /* MMX */
7191 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7192 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7193 IEM_MC_BEGIN(2, 0);
7194 IEM_MC_ARG(uint64_t *, pDst, 0);
7195 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7196 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7197 IEM_MC_PREPARE_FPU_USAGE();
7198 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7199 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7200 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7201 IEM_MC_ADVANCE_RIP();
7202 IEM_MC_END();
7203 return VINF_SUCCESS;
7204
7205 default:
7206 return IEMOP_RAISE_INVALID_OPCODE();
7207 }
7208}
7209#endif
7210
7211
7212/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7213FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7214{
7215 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7216 /** @todo testcase: Check that the instruction implicitly clears the high
7217 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7218 * and opcode modifications are made to work with the whole width (not
7219 * just 128). */
7220 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
7221 /* Docs says register only. */
7222 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7223 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7224 {
7225 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7226 IEM_MC_BEGIN(2, 0);
7227 IEM_MC_ARG(uint64_t *, pDst, 0);
7228 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7229 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7230 IEM_MC_PREPARE_FPU_USAGE();
7231 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7232 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7233 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7234 IEM_MC_ADVANCE_RIP();
7235 IEM_MC_END();
7236 return VINF_SUCCESS;
7237 }
7238 return IEMOP_RAISE_INVALID_OPCODE();
7239}
7240
7241/** Opcode 0x66 0x0f 0xd7 - */
7242FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
7243{
7244 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7245 /** @todo testcase: Check that the instruction implicitly clears the high
7246 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7247 * and opcode modifications are made to work with the whole width (not
7248 * just 128). */
7249 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
7250 /* Docs says register only. */
7251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7252 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7253 {
7254 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7255 IEM_MC_BEGIN(2, 0);
7256 IEM_MC_ARG(uint64_t *, pDst, 0);
7257 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7258 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7259 IEM_MC_PREPARE_SSE_USAGE();
7260 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7261 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7262 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7263 IEM_MC_ADVANCE_RIP();
7264 IEM_MC_END();
7265 return VINF_SUCCESS;
7266 }
7267 return IEMOP_RAISE_INVALID_OPCODE();
7268}
7269
7270/* Opcode 0xf3 0x0f 0xd7 - invalid */
7271/* Opcode 0xf2 0x0f 0xd7 - invalid */
7272
7273
7274/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7275FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7276/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7277FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7278/* Opcode 0xf3 0x0f 0xd8 - invalid */
7279/* Opcode 0xf2 0x0f 0xd8 - invalid */
7280
7281/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7282FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7283/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7284FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7285/* Opcode 0xf3 0x0f 0xd9 - invalid */
7286/* Opcode 0xf2 0x0f 0xd9 - invalid */
7287
7288/** Opcode 0x0f 0xda - pminub Pq, Qq */
7289FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7290/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7291FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7292/* Opcode 0xf3 0x0f 0xda - invalid */
7293/* Opcode 0xf2 0x0f 0xda - invalid */
7294
7295/** Opcode 0x0f 0xdb - pand Pq, Qq */
7296FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7297/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7298FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7299/* Opcode 0xf3 0x0f 0xdb - invalid */
7300/* Opcode 0xf2 0x0f 0xdb - invalid */
7301
7302/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7303FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7304/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7305FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7306/* Opcode 0xf3 0x0f 0xdc - invalid */
7307/* Opcode 0xf2 0x0f 0xdc - invalid */
7308
7309/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7310FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7311/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7312FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7313/* Opcode 0xf3 0x0f 0xdd - invalid */
7314/* Opcode 0xf2 0x0f 0xdd - invalid */
7315
7316/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7317FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7318/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7319FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7320/* Opcode 0xf3 0x0f 0xde - invalid */
7321/* Opcode 0xf2 0x0f 0xde - invalid */
7322
7323/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7324FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7325/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7326FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7327/* Opcode 0xf3 0x0f 0xdf - invalid */
7328/* Opcode 0xf2 0x0f 0xdf - invalid */
7329
7330/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7331FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7332/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7333FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7334/* Opcode 0xf3 0x0f 0xe0 - invalid */
7335/* Opcode 0xf2 0x0f 0xe0 - invalid */
7336
7337/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7338FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7339/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7340FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7341/* Opcode 0xf3 0x0f 0xe1 - invalid */
7342/* Opcode 0xf2 0x0f 0xe1 - invalid */
7343
7344/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7345FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7346/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7347FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7348/* Opcode 0xf3 0x0f 0xe2 - invalid */
7349/* Opcode 0xf2 0x0f 0xe2 - invalid */
7350
7351/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7352FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7353/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7354FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7355/* Opcode 0xf3 0x0f 0xe3 - invalid */
7356/* Opcode 0xf2 0x0f 0xe3 - invalid */
7357
7358/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7359FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7360/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7361FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7362/* Opcode 0xf3 0x0f 0xe4 - invalid */
7363/* Opcode 0xf2 0x0f 0xe4 - invalid */
7364
7365/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7366FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7367/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7368FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7369/* Opcode 0xf3 0x0f 0xe5 - invalid */
7370/* Opcode 0xf2 0x0f 0xe5 - invalid */
7371
7372/* Opcode 0x0f 0xe6 - invalid */
7373/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7374FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7375/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7376FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7377/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7378FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7379
7380
7381/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
7382FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
7383{
7384 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7385 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7386 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7387 {
7388 /* Register, memory. */
7389 IEM_MC_BEGIN(0, 2);
7390 IEM_MC_LOCAL(uint64_t, uSrc);
7391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7392
7393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7395 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7396 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7397
7398 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7399 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7400
7401 IEM_MC_ADVANCE_RIP();
7402 IEM_MC_END();
7403 return VINF_SUCCESS;
7404 }
7405 /* The register, register encoding is invalid. */
7406 return IEMOP_RAISE_INVALID_OPCODE();
7407}
7408
7409/** Opcode 0x66 0x0f 0xe7 - vmovntdq Mx, Vx */
7410FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
7411{
7412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7413 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7414 {
7415 /* Register, memory. */
7416 IEMOP_MNEMONIC(vmovntdq_Mx_Vx, "vmovntdq Mx,Vx");
7417 IEM_MC_BEGIN(0, 2);
7418 IEM_MC_LOCAL(uint128_t, uSrc);
7419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7420
7421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7423 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7424 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7425
7426 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7427 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7428
7429 IEM_MC_ADVANCE_RIP();
7430 IEM_MC_END();
7431 return VINF_SUCCESS;
7432 }
7433
7434 /* The register, register encoding is invalid. */
7435 return IEMOP_RAISE_INVALID_OPCODE();
7436}
7437
7438/* Opcode 0xf3 0x0f 0xe7 - invalid */
7439/* Opcode 0xf2 0x0f 0xe7 - invalid */
7440
7441
7442/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7443FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7444/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7445FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7446/* Opcode 0xf3 0x0f 0xe8 - invalid */
7447/* Opcode 0xf2 0x0f 0xe8 - invalid */
7448
7449/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7450FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7451/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7452FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7453/* Opcode 0xf3 0x0f 0xe9 - invalid */
7454/* Opcode 0xf2 0x0f 0xe9 - invalid */
7455
7456/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7457FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7458/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7459FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7460/* Opcode 0xf3 0x0f 0xea - invalid */
7461/* Opcode 0xf2 0x0f 0xea - invalid */
7462
7463/** Opcode 0x0f 0xeb - por Pq, Qq */
7464FNIEMOP_STUB(iemOp_por_Pq_Qq);
7465/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7466FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7467/* Opcode 0xf3 0x0f 0xeb - invalid */
7468/* Opcode 0xf2 0x0f 0xeb - invalid */
7469
7470/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7471FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7472/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7473FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7474/* Opcode 0xf3 0x0f 0xec - invalid */
7475/* Opcode 0xf2 0x0f 0xec - invalid */
7476
7477/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7478FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7479/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7480FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7481/* Opcode 0xf3 0x0f 0xed - invalid */
7482/* Opcode 0xf2 0x0f 0xed - invalid */
7483
7484/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7485FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7486/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7487FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7488/* Opcode 0xf3 0x0f 0xee - invalid */
7489/* Opcode 0xf2 0x0f 0xee - invalid */
7490
7491
7492/** Opcode 0x0f 0xef - pxor Pq, Qq */
7493FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
7494{
7495 IEMOP_MNEMONIC(pxor, "pxor");
7496 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
7497}
7498
7499/** Opcode 0x66 0x0f 0xef - vpxor Vx, Hx, Wx */
7500FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
7501{
7502 IEMOP_MNEMONIC(vpxor, "vpxor");
7503 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7504}
7505
7506/* Opcode 0xf3 0x0f 0xef - invalid */
7507/* Opcode 0xf2 0x0f 0xef - invalid */
7508
7509/* Opcode 0x0f 0xf0 - invalid */
7510/* Opcode 0x66 0x0f 0xf0 - invalid */
7511/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7512FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7513
7514/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7515FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7516/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7517FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7518/* Opcode 0xf2 0x0f 0xf1 - invalid */
7519
7520/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7521FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7522/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7523FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7524/* Opcode 0xf2 0x0f 0xf2 - invalid */
7525
7526/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7527FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7528/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7529FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7530/* Opcode 0xf2 0x0f 0xf3 - invalid */
7531
7532/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7533FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7534/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7535FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7536/* Opcode 0xf2 0x0f 0xf4 - invalid */
7537
7538/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7539FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7540/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7541FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7542/* Opcode 0xf2 0x0f 0xf5 - invalid */
7543
7544/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7545FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7546/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7547FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7548/* Opcode 0xf2 0x0f 0xf6 - invalid */
7549
7550/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7551FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7552/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7553FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7554/* Opcode 0xf2 0x0f 0xf7 - invalid */
7555
7556/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7557FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7558/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7559FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7560/* Opcode 0xf2 0x0f 0xf8 - invalid */
7561
7562/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7563FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7564/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7565FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7566/* Opcode 0xf2 0x0f 0xf9 - invalid */
7567
7568/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7569FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7570/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7571FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7572/* Opcode 0xf2 0x0f 0xfa - invalid */
7573
7574/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7575FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7576/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7577FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7578/* Opcode 0xf2 0x0f 0xfb - invalid */
7579
7580/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7581FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7582/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7583FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7584/* Opcode 0xf2 0x0f 0xfc - invalid */
7585
7586/** Opcode 0x0f 0xfd - paddw Pq, Qq */
7587FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
7588/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
7589FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
7590/* Opcode 0xf2 0x0f 0xfd - invalid */
7591
7592/** Opcode 0x0f 0xfe - paddd Pq, Qq */
7593FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
7594/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
7595FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
7596/* Opcode 0xf2 0x0f 0xfe - invalid */
7597
7598
7599/** Opcode **** 0x0f 0xff - UD0 */
7600FNIEMOP_DEF(iemOp_ud0)
7601{
7602 IEMOP_MNEMONIC(ud0, "ud0");
7603 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7604 {
7605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7606#ifndef TST_IEM_CHECK_MC
7607 RTGCPTR GCPtrEff;
7608 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
7609 if (rcStrict != VINF_SUCCESS)
7610 return rcStrict;
7611#endif
7612 IEMOP_HLP_DONE_DECODING();
7613 }
7614 return IEMOP_RAISE_INVALID_OPCODE();
7615}
7616
7617
7618
7619/**
7620 * Two byte opcode map, first byte 0x0f.
7621 *
7622 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
7623 * check if it needs updating as well when making changes.
7624 */
7625IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
7626{
7627 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7628 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
7629 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
7630 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
7631 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
7632 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
7633 /* 0x05 */ IEMOP_X4(iemOp_syscall),
7634 /* 0x06 */ IEMOP_X4(iemOp_clts),
7635 /* 0x07 */ IEMOP_X4(iemOp_sysret),
7636 /* 0x08 */ IEMOP_X4(iemOp_invd),
7637 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
7638 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
7639 /* 0x0b */ IEMOP_X4(iemOp_ud2),
7640 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
7641 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
7642 /* 0x0e */ IEMOP_X4(iemOp_femms),
7643 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
7644
7645 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7646 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7647 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7648 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7649 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7650 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7651 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7652 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7653 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
7654 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
7655 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
7656 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
7657 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
7658 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
7659 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
7660 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
7661
7662 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
7663 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
7664 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
7665 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
7666 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
7667 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7668 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
7669 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7670 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7671 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7672 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7673 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7674 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7675 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7676 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7677 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7678
7679 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
7680 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
7681 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
7682 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
7683 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
7684 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
7685 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
7686 /* 0x37 */ IEMOP_X4(iemOp_getsec),
7687 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
7688 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7689 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
7690 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7691 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7692 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7693 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7694 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7695
7696 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
7697 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
7698 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
7699 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
7700 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
7701 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
7702 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
7703 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
7704 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
7705 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
7706 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
7707 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
7708 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
7709 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
7710 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
7711 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
7712
7713 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7714 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
7715 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7716 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7717 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7718 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7719 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7720 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7721 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
7722 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
7723 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
7724 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
7725 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
7726 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
7727 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
7728 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
7729
7730 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7731 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7732 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7733 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7734 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7735 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7736 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7737 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7738 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7739 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7740 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7741 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7742 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7743 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7744 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7745 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
7746
7747 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
7748 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
7749 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
7750 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
7751 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7752 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7753 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7754 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7755
7756 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7757 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7758 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7759 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7760 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
7761 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
7762 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
7763 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
7764
7765 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
7766 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
7767 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
7768 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
7769 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
7770 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
7771 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
7772 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
7773 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
7774 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
7775 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
7776 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
7777 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
7778 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
7779 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
7780 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
7781
7782 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
7783 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
7784 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
7785 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
7786 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
7787 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
7788 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
7789 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
7790 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
7791 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
7792 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
7793 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
7794 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
7795 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
7796 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
7797 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
7798
7799 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
7800 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
7801 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
7802 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
7803 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
7804 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
7805 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
7806 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7807 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
7808 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
7809 /* 0xaa */ IEMOP_X4(iemOp_rsm),
7810 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
7811 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
7812 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
7813 /* 0xae */ IEMOP_X4(iemOp_Grp15),
7814 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
7815
7816 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
7817 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
7818 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
7819 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
7820 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
7821 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
7822 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
7823 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
7824 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
7825 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
7826 /* 0xba */ IEMOP_X4(iemOp_Grp8),
7827 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
7828 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
7829 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
7830 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
7831 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
7832
7833 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
7834 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
7835 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
7836 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7837 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7838 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7839 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
7840 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
7841 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
7842 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
7843 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
7844 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
7845 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
7846 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
7847 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
7848 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
7849
7850 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
7851 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7852 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7853 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7854 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7855 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7856 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
7857 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7858 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7859 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7860 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7861 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7862 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7863 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7864 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7865 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7866
7867 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7868 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7869 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7870 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7871 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7872 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7873 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
7874 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7875 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7876 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7877 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7878 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7879 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7880 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7881 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7882 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7883
7884 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
7885 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7886 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7887 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7888 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7889 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7890 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7891 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7892 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7893 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7894 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7895 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7896 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7897 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7898 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7899 /* 0xff */ IEMOP_X4(iemOp_ud0),
7900};
7901AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
7902
7903
7904/**
7905 * VEX opcode map \#1.
7906 *
7907 * @remarks This is (currently) a subset of g_apfnTwoByteMap, so please check if
7908 * it it needs updating too when making changes.
7909 */
7910IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
7911{
7912 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7913 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
7914 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
7915 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
7916 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
7917 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
7918 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
7919 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
7920 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
7921 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
7922 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
7923 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
7924 /* 0x0b */ IEMOP_X4(iemOp_InvalidNeedRM),
7925 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
7926 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
7927 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
7928 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
7929
7930 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7931 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7932 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7933 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7934 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7935 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7936 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7937 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7938 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
7939 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
7940 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
7941 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
7942 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
7943 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
7944 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
7945 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
7946
7947 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
7948 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
7949 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
7950 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
7951 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
7952 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
7953 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
7954 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
7955 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7956 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7957 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7958 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7959 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7960 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7961 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7962 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7963
7964 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
7965 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
7966 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
7967 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
7968 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
7969 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
7970 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
7971 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
7972 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7973 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7974 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7975 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7976 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7977 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7978 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7979 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7980
7981 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
7982 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
7983 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
7984 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
7985 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
7986 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
7987 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
7988 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
7989 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
7990 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
7991 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
7992 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
7993 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
7994 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
7995 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
7996 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
7997
7998 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7999 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8000 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8001 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8002 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8003 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8004 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8005 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8006 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8007 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8008 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8009 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8010 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8011 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8012 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8013 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8014
8015 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8016 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8017 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8018 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8019 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8020 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8021 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8022 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8023 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8024 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8025 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8026 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8027 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8028 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8029 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8030 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8031
8032 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8033 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_Grp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8034 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_Grp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8035 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_Grp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8036 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8037 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8038 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8039 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8040 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
8041 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
8042 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
8043 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
8044 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8045 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8046 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
8047 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
8048
8049 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
8050 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
8051 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
8052 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
8053 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
8054 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
8055 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
8056 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
8057 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
8058 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
8059 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
8060 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
8061 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
8062 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
8063 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
8064 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
8065 IEMOP_X4(iemOp_InvalidNeedRM),
8066 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
8067 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
8068 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
8069 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
8070 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
8071 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
8072 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
8073 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
8074 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
8075 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
8076 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
8077 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
8078 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
8079 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
8080 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
8081 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
8082
8083 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8084 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8085 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8086 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8087 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8088 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8089 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8090 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8091 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8092 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8093 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
8094 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
8095 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
8096 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
8097 /* 0xae */ IEMOP_X4(iemOp_Grp15), /** @todo groups and vex */
8098 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
8099
8100 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8101 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8102 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8103 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8104 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8105 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8106 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8107 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8108 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8109 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8110 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
8111 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
8112 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
8113 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
8114 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
8115 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
8116
8117 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8118 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8119 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8120 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8121 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8122 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8123 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8124 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8125 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8126 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8127 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
8128 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
8129 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
8130 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
8131 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
8132 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
8133
8134 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8135 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8136 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8137 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8138 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8139 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8140 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8141 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8142 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8143 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8144 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8145 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8146 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8147 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8148 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8149 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8150
8151 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8152 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8153 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8154 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8155 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8156 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8157 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8158 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8159 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8160 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8161 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8162 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8163 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8164 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8165 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8166 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8167
8168 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8169 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8170 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8171 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8172 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8173 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8174 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8175 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8176 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8177 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8178 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8179 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8180 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8181 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8182 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8183 /* 0xff */ IEMOP_X4(iemOp_ud0),
8184};
8185AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8186/** @} */
8187
8188
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette