VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 65989

Last change on this file since 65989 was 65989, checked in by vboxsync, 8 years ago

VMM: Nested Hw.virt: Implement AMD-V VMMCALL in IEM. Cleanup the code in HMAll and segregate SVM all-context code.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 307.1 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 65989 2017-03-07 21:36:03Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/** @name ..... opcodes.
26 *
27 * @{
28 */
29
30/** @} */
31
32
33/** @name Two byte opcodes (first byte 0x0f).
34 *
35 * @{
36 */
37
38/** Opcode 0x0f 0x00 /0. */
39FNIEMOPRM_DEF(iemOp_Grp6_sldt)
40{
41 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
42 IEMOP_HLP_MIN_286();
43 IEMOP_HLP_NO_REAL_OR_V86_MODE();
44
45 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
46 {
47 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
48 switch (pVCpu->iem.s.enmEffOpSize)
49 {
50 case IEMMODE_16BIT:
51 IEM_MC_BEGIN(0, 1);
52 IEM_MC_LOCAL(uint16_t, u16Ldtr);
53 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
54 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
55 IEM_MC_ADVANCE_RIP();
56 IEM_MC_END();
57 break;
58
59 case IEMMODE_32BIT:
60 IEM_MC_BEGIN(0, 1);
61 IEM_MC_LOCAL(uint32_t, u32Ldtr);
62 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
63 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
64 IEM_MC_ADVANCE_RIP();
65 IEM_MC_END();
66 break;
67
68 case IEMMODE_64BIT:
69 IEM_MC_BEGIN(0, 1);
70 IEM_MC_LOCAL(uint64_t, u64Ldtr);
71 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
72 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
73 IEM_MC_ADVANCE_RIP();
74 IEM_MC_END();
75 break;
76
77 IEM_NOT_REACHED_DEFAULT_CASE_RET();
78 }
79 }
80 else
81 {
82 IEM_MC_BEGIN(0, 2);
83 IEM_MC_LOCAL(uint16_t, u16Ldtr);
84 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
85 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
86 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
87 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
88 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
89 IEM_MC_ADVANCE_RIP();
90 IEM_MC_END();
91 }
92 return VINF_SUCCESS;
93}
94
95
96/** Opcode 0x0f 0x00 /1. */
97FNIEMOPRM_DEF(iemOp_Grp6_str)
98{
99 IEMOP_MNEMONIC(str, "str Rv/Mw");
100 IEMOP_HLP_MIN_286();
101 IEMOP_HLP_NO_REAL_OR_V86_MODE();
102
103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
104 {
105 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
106 switch (pVCpu->iem.s.enmEffOpSize)
107 {
108 case IEMMODE_16BIT:
109 IEM_MC_BEGIN(0, 1);
110 IEM_MC_LOCAL(uint16_t, u16Tr);
111 IEM_MC_FETCH_TR_U16(u16Tr);
112 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 break;
116
117 case IEMMODE_32BIT:
118 IEM_MC_BEGIN(0, 1);
119 IEM_MC_LOCAL(uint32_t, u32Tr);
120 IEM_MC_FETCH_TR_U32(u32Tr);
121 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
122 IEM_MC_ADVANCE_RIP();
123 IEM_MC_END();
124 break;
125
126 case IEMMODE_64BIT:
127 IEM_MC_BEGIN(0, 1);
128 IEM_MC_LOCAL(uint64_t, u64Tr);
129 IEM_MC_FETCH_TR_U64(u64Tr);
130 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
131 IEM_MC_ADVANCE_RIP();
132 IEM_MC_END();
133 break;
134
135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
136 }
137 }
138 else
139 {
140 IEM_MC_BEGIN(0, 2);
141 IEM_MC_LOCAL(uint16_t, u16Tr);
142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
144 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
145 IEM_MC_FETCH_TR_U16(u16Tr);
146 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
147 IEM_MC_ADVANCE_RIP();
148 IEM_MC_END();
149 }
150 return VINF_SUCCESS;
151}
152
153
154/** Opcode 0x0f 0x00 /2. */
155FNIEMOPRM_DEF(iemOp_Grp6_lldt)
156{
157 IEMOP_MNEMONIC(lldt, "lldt Ew");
158 IEMOP_HLP_MIN_286();
159 IEMOP_HLP_NO_REAL_OR_V86_MODE();
160
161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
162 {
163 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
164 IEM_MC_BEGIN(1, 0);
165 IEM_MC_ARG(uint16_t, u16Sel, 0);
166 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
167 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
168 IEM_MC_END();
169 }
170 else
171 {
172 IEM_MC_BEGIN(1, 1);
173 IEM_MC_ARG(uint16_t, u16Sel, 0);
174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
176 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
177 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
178 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
179 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
180 IEM_MC_END();
181 }
182 return VINF_SUCCESS;
183}
184
185
186/** Opcode 0x0f 0x00 /3. */
187FNIEMOPRM_DEF(iemOp_Grp6_ltr)
188{
189 IEMOP_MNEMONIC(ltr, "ltr Ew");
190 IEMOP_HLP_MIN_286();
191 IEMOP_HLP_NO_REAL_OR_V86_MODE();
192
193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
194 {
195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
196 IEM_MC_BEGIN(1, 0);
197 IEM_MC_ARG(uint16_t, u16Sel, 0);
198 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
199 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
200 IEM_MC_END();
201 }
202 else
203 {
204 IEM_MC_BEGIN(1, 1);
205 IEM_MC_ARG(uint16_t, u16Sel, 0);
206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
209 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
210 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
211 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
212 IEM_MC_END();
213 }
214 return VINF_SUCCESS;
215}
216
217
218/** Opcode 0x0f 0x00 /3. */
219FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
220{
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
223
224 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
225 {
226 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
227 IEM_MC_BEGIN(2, 0);
228 IEM_MC_ARG(uint16_t, u16Sel, 0);
229 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
230 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
231 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
232 IEM_MC_END();
233 }
234 else
235 {
236 IEM_MC_BEGIN(2, 1);
237 IEM_MC_ARG(uint16_t, u16Sel, 0);
238 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
241 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
242 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
243 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
244 IEM_MC_END();
245 }
246 return VINF_SUCCESS;
247}
248
249
250/** Opcode 0x0f 0x00 /4. */
251FNIEMOPRM_DEF(iemOp_Grp6_verr)
252{
253 IEMOP_MNEMONIC(verr, "verr Ew");
254 IEMOP_HLP_MIN_286();
255 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
256}
257
258
259/** Opcode 0x0f 0x00 /5. */
260FNIEMOPRM_DEF(iemOp_Grp6_verw)
261{
262 IEMOP_MNEMONIC(verw, "verw Ew");
263 IEMOP_HLP_MIN_286();
264 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
265}
266
267
268/**
269 * Group 6 jump table.
270 */
271IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
272{
273 iemOp_Grp6_sldt,
274 iemOp_Grp6_str,
275 iemOp_Grp6_lldt,
276 iemOp_Grp6_ltr,
277 iemOp_Grp6_verr,
278 iemOp_Grp6_verw,
279 iemOp_InvalidWithRM,
280 iemOp_InvalidWithRM
281};
282
283/** Opcode 0x0f 0x00. */
284FNIEMOP_DEF(iemOp_Grp6)
285{
286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
287 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
288}
289
290
291/** Opcode 0x0f 0x01 /0. */
292FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
293{
294 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
295 IEMOP_HLP_MIN_286();
296 IEMOP_HLP_64BIT_OP_SIZE();
297 IEM_MC_BEGIN(2, 1);
298 IEM_MC_ARG(uint8_t, iEffSeg, 0);
299 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
302 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
303 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
304 IEM_MC_END();
305 return VINF_SUCCESS;
306}
307
308
309/** Opcode 0x0f 0x01 /0. */
310FNIEMOP_DEF(iemOp_Grp7_vmcall)
311{
312 IEMOP_BITCH_ABOUT_STUB();
313 return IEMOP_RAISE_INVALID_OPCODE();
314}
315
316
317/** Opcode 0x0f 0x01 /0. */
318FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
319{
320 IEMOP_BITCH_ABOUT_STUB();
321 return IEMOP_RAISE_INVALID_OPCODE();
322}
323
324
325/** Opcode 0x0f 0x01 /0. */
326FNIEMOP_DEF(iemOp_Grp7_vmresume)
327{
328 IEMOP_BITCH_ABOUT_STUB();
329 return IEMOP_RAISE_INVALID_OPCODE();
330}
331
332
333/** Opcode 0x0f 0x01 /0. */
334FNIEMOP_DEF(iemOp_Grp7_vmxoff)
335{
336 IEMOP_BITCH_ABOUT_STUB();
337 return IEMOP_RAISE_INVALID_OPCODE();
338}
339
340
341/** Opcode 0x0f 0x01 /1. */
342FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
343{
344 IEMOP_MNEMONIC(sidt, "sidt Ms");
345 IEMOP_HLP_MIN_286();
346 IEMOP_HLP_64BIT_OP_SIZE();
347 IEM_MC_BEGIN(2, 1);
348 IEM_MC_ARG(uint8_t, iEffSeg, 0);
349 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
352 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
353 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
354 IEM_MC_END();
355 return VINF_SUCCESS;
356}
357
358
359/** Opcode 0x0f 0x01 /1. */
360FNIEMOP_DEF(iemOp_Grp7_monitor)
361{
362 IEMOP_MNEMONIC(monitor, "monitor");
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
364 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
365}
366
367
368/** Opcode 0x0f 0x01 /1. */
369FNIEMOP_DEF(iemOp_Grp7_mwait)
370{
371 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
374}
375
376
377/** Opcode 0x0f 0x01 /2. */
378FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
379{
380 IEMOP_MNEMONIC(lgdt, "lgdt");
381 IEMOP_HLP_64BIT_OP_SIZE();
382 IEM_MC_BEGIN(3, 1);
383 IEM_MC_ARG(uint8_t, iEffSeg, 0);
384 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
385 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
389 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
390 IEM_MC_END();
391 return VINF_SUCCESS;
392}
393
394
395/** Opcode 0x0f 0x01 0xd0. */
396FNIEMOP_DEF(iemOp_Grp7_xgetbv)
397{
398 IEMOP_MNEMONIC(xgetbv, "xgetbv");
399 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
400 {
401 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
402 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
403 }
404 return IEMOP_RAISE_INVALID_OPCODE();
405}
406
407
408/** Opcode 0x0f 0x01 0xd1. */
409FNIEMOP_DEF(iemOp_Grp7_xsetbv)
410{
411 IEMOP_MNEMONIC(xsetbv, "xsetbv");
412 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
413 {
414 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
416 }
417 return IEMOP_RAISE_INVALID_OPCODE();
418}
419
420
421/** Opcode 0x0f 0x01 /3. */
422FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
423{
424 IEMOP_MNEMONIC(lidt, "lidt");
425 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
426 ? IEMMODE_64BIT
427 : pVCpu->iem.s.enmEffOpSize;
428 IEM_MC_BEGIN(3, 1);
429 IEM_MC_ARG(uint8_t, iEffSeg, 0);
430 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
431 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
434 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
435 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
436 IEM_MC_END();
437 return VINF_SUCCESS;
438}
439
440
441/** Opcode 0x0f 0x01 0xd8. */
442FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
443
444#ifdef VBOX_WITH_NESTED_HWVIRT
445/** Opcode 0x0f 0x01 0xd9. */
446FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
447{
448 IEMOP_MNEMONIC(vmmcall, "vmmcall");
449 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
450}
451
452
453/** Opcode 0x0f 0x01 0xda. */
454FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
455{
456 IEMOP_MNEMONIC(vmload, "vmload");
457 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
458}
459
460
461/** Opcode 0x0f 0x01 0xdb. */
462FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
463{
464 IEMOP_MNEMONIC(vmsave, "vmsave");
465 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
466}
467
468
469/** Opcode 0x0f 0x01 0xdc. */
470FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
471{
472 IEMOP_MNEMONIC(stgi, "stgi");
473 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
474}
475
476
477/** Opcode 0x0f 0x01 0xdd. */
478FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
479{
480 IEMOP_MNEMONIC(clgi, "clgi");
481 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
482}
483
484
485/** Opcode 0x0f 0x01 0xdf. */
486FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
487{
488 IEMOP_MNEMONIC(invlpga, "invlpga");
489 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
490}
491#else
492/** Opcode 0x0f 0x01 0xd9. */
493FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
494
495/** Opcode 0x0f 0x01 0xda. */
496FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
497
498/** Opcode 0x0f 0x01 0xdb. */
499FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
500
501/** Opcode 0x0f 0x01 0xdc. */
502FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
503
504/** Opcode 0x0f 0x01 0xdd. */
505FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
506
507/** Opcode 0x0f 0x01 0xdf. */
508FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
509#endif /* VBOX_WITH_NESTED_HWVIRT */
510
511/** Opcode 0x0f 0x01 0xde. */
512FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
513
514/** Opcode 0x0f 0x01 /4. */
515FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
516{
517 IEMOP_MNEMONIC(smsw, "smsw");
518 IEMOP_HLP_MIN_286();
519 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
520 {
521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
522 switch (pVCpu->iem.s.enmEffOpSize)
523 {
524 case IEMMODE_16BIT:
525 IEM_MC_BEGIN(0, 1);
526 IEM_MC_LOCAL(uint16_t, u16Tmp);
527 IEM_MC_FETCH_CR0_U16(u16Tmp);
528 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
529 { /* likely */ }
530 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
531 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
532 else
533 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
534 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
535 IEM_MC_ADVANCE_RIP();
536 IEM_MC_END();
537 return VINF_SUCCESS;
538
539 case IEMMODE_32BIT:
540 IEM_MC_BEGIN(0, 1);
541 IEM_MC_LOCAL(uint32_t, u32Tmp);
542 IEM_MC_FETCH_CR0_U32(u32Tmp);
543 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
544 IEM_MC_ADVANCE_RIP();
545 IEM_MC_END();
546 return VINF_SUCCESS;
547
548 case IEMMODE_64BIT:
549 IEM_MC_BEGIN(0, 1);
550 IEM_MC_LOCAL(uint64_t, u64Tmp);
551 IEM_MC_FETCH_CR0_U64(u64Tmp);
552 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 return VINF_SUCCESS;
556
557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
558 }
559 }
560 else
561 {
562 /* Ignore operand size here, memory refs are always 16-bit. */
563 IEM_MC_BEGIN(0, 2);
564 IEM_MC_LOCAL(uint16_t, u16Tmp);
565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
568 IEM_MC_FETCH_CR0_U16(u16Tmp);
569 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
570 { /* likely */ }
571 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
572 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
573 else
574 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
575 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
576 IEM_MC_ADVANCE_RIP();
577 IEM_MC_END();
578 return VINF_SUCCESS;
579 }
580}
581
582
583/** Opcode 0x0f 0x01 /6. */
584FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
585{
586 /* The operand size is effectively ignored, all is 16-bit and only the
587 lower 3-bits are used. */
588 IEMOP_MNEMONIC(lmsw, "lmsw");
589 IEMOP_HLP_MIN_286();
590 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
591 {
592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
593 IEM_MC_BEGIN(1, 0);
594 IEM_MC_ARG(uint16_t, u16Tmp, 0);
595 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
596 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
597 IEM_MC_END();
598 }
599 else
600 {
601 IEM_MC_BEGIN(1, 1);
602 IEM_MC_ARG(uint16_t, u16Tmp, 0);
603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
606 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
607 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
608 IEM_MC_END();
609 }
610 return VINF_SUCCESS;
611}
612
613
614/** Opcode 0x0f 0x01 /7. */
615FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
616{
617 IEMOP_MNEMONIC(invlpg, "invlpg");
618 IEMOP_HLP_MIN_486();
619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
620 IEM_MC_BEGIN(1, 1);
621 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
623 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
624 IEM_MC_END();
625 return VINF_SUCCESS;
626}
627
628
629/** Opcode 0x0f 0x01 /7. */
630FNIEMOP_DEF(iemOp_Grp7_swapgs)
631{
632 IEMOP_MNEMONIC(swapgs, "swapgs");
633 IEMOP_HLP_ONLY_64BIT();
634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
635 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
636}
637
638
639/** Opcode 0x0f 0x01 /7. */
640FNIEMOP_DEF(iemOp_Grp7_rdtscp)
641{
642 NOREF(pVCpu);
643 IEMOP_BITCH_ABOUT_STUB();
644 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
645}
646
647
648/**
649 * Group 7 jump table, memory variant.
650 */
651IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
652{
653 iemOp_Grp7_sgdt,
654 iemOp_Grp7_sidt,
655 iemOp_Grp7_lgdt,
656 iemOp_Grp7_lidt,
657 iemOp_Grp7_smsw,
658 iemOp_InvalidWithRM,
659 iemOp_Grp7_lmsw,
660 iemOp_Grp7_invlpg
661};
662
663
664/** Opcode 0x0f 0x01. */
665FNIEMOP_DEF(iemOp_Grp7)
666{
667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
668 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
669 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
670
671 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
672 {
673 case 0:
674 switch (bRm & X86_MODRM_RM_MASK)
675 {
676 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
677 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
678 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
679 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
680 }
681 return IEMOP_RAISE_INVALID_OPCODE();
682
683 case 1:
684 switch (bRm & X86_MODRM_RM_MASK)
685 {
686 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
687 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
688 }
689 return IEMOP_RAISE_INVALID_OPCODE();
690
691 case 2:
692 switch (bRm & X86_MODRM_RM_MASK)
693 {
694 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
695 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
696 }
697 return IEMOP_RAISE_INVALID_OPCODE();
698
699 case 3:
700 switch (bRm & X86_MODRM_RM_MASK)
701 {
702 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
703 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
704 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
705 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
706 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
707 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
708 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
709 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
710 IEM_NOT_REACHED_DEFAULT_CASE_RET();
711 }
712
713 case 4:
714 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
715
716 case 5:
717 return IEMOP_RAISE_INVALID_OPCODE();
718
719 case 6:
720 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
721
722 case 7:
723 switch (bRm & X86_MODRM_RM_MASK)
724 {
725 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
726 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
727 }
728 return IEMOP_RAISE_INVALID_OPCODE();
729
730 IEM_NOT_REACHED_DEFAULT_CASE_RET();
731 }
732}
733
734/** Opcode 0x0f 0x00 /3. */
735FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
736{
737 IEMOP_HLP_NO_REAL_OR_V86_MODE();
738 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
739
740 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
741 {
742 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
743 switch (pVCpu->iem.s.enmEffOpSize)
744 {
745 case IEMMODE_16BIT:
746 {
747 IEM_MC_BEGIN(3, 0);
748 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
749 IEM_MC_ARG(uint16_t, u16Sel, 1);
750 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
751
752 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
753 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
754 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
755
756 IEM_MC_END();
757 return VINF_SUCCESS;
758 }
759
760 case IEMMODE_32BIT:
761 case IEMMODE_64BIT:
762 {
763 IEM_MC_BEGIN(3, 0);
764 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
765 IEM_MC_ARG(uint16_t, u16Sel, 1);
766 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
767
768 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
769 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
770 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
771
772 IEM_MC_END();
773 return VINF_SUCCESS;
774 }
775
776 IEM_NOT_REACHED_DEFAULT_CASE_RET();
777 }
778 }
779 else
780 {
781 switch (pVCpu->iem.s.enmEffOpSize)
782 {
783 case IEMMODE_16BIT:
784 {
785 IEM_MC_BEGIN(3, 1);
786 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
787 IEM_MC_ARG(uint16_t, u16Sel, 1);
788 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
789 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
790
791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
792 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
793
794 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
795 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
796 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
797
798 IEM_MC_END();
799 return VINF_SUCCESS;
800 }
801
802 case IEMMODE_32BIT:
803 case IEMMODE_64BIT:
804 {
805 IEM_MC_BEGIN(3, 1);
806 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
807 IEM_MC_ARG(uint16_t, u16Sel, 1);
808 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
810
811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
812 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
813/** @todo testcase: make sure it's a 16-bit read. */
814
815 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
816 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
817 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
818
819 IEM_MC_END();
820 return VINF_SUCCESS;
821 }
822
823 IEM_NOT_REACHED_DEFAULT_CASE_RET();
824 }
825 }
826}
827
828
829
830/** Opcode 0x0f 0x02. */
831FNIEMOP_DEF(iemOp_lar_Gv_Ew)
832{
833 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
834 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
835}
836
837
838/** Opcode 0x0f 0x03. */
839FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
840{
841 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
842 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
843}
844
845
846/** Opcode 0x0f 0x05. */
847FNIEMOP_DEF(iemOp_syscall)
848{
849 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
851 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
852}
853
854
855/** Opcode 0x0f 0x06. */
856FNIEMOP_DEF(iemOp_clts)
857{
858 IEMOP_MNEMONIC(clts, "clts");
859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
860 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
861}
862
863
864/** Opcode 0x0f 0x07. */
865FNIEMOP_DEF(iemOp_sysret)
866{
867 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
869 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
870}
871
872
873/** Opcode 0x0f 0x08. */
874FNIEMOP_STUB(iemOp_invd);
875// IEMOP_HLP_MIN_486();
876
877
878/** Opcode 0x0f 0x09. */
879FNIEMOP_DEF(iemOp_wbinvd)
880{
881 IEMOP_MNEMONIC(wbinvd, "wbinvd");
882 IEMOP_HLP_MIN_486();
883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
884 IEM_MC_BEGIN(0, 0);
885 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
886 IEM_MC_ADVANCE_RIP();
887 IEM_MC_END();
888 return VINF_SUCCESS; /* ignore for now */
889}
890
891
892/** Opcode 0x0f 0x0b. */
893FNIEMOP_DEF(iemOp_ud2)
894{
895 IEMOP_MNEMONIC(ud2, "ud2");
896 return IEMOP_RAISE_INVALID_OPCODE();
897}
898
899/** Opcode 0x0f 0x0d. */
900FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
901{
902 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
903 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
904 {
905 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
906 return IEMOP_RAISE_INVALID_OPCODE();
907 }
908
909 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
910 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
911 {
912 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
913 return IEMOP_RAISE_INVALID_OPCODE();
914 }
915
916 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
917 {
918 case 2: /* Aliased to /0 for the time being. */
919 case 4: /* Aliased to /0 for the time being. */
920 case 5: /* Aliased to /0 for the time being. */
921 case 6: /* Aliased to /0 for the time being. */
922 case 7: /* Aliased to /0 for the time being. */
923 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
924 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
925 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
926 IEM_NOT_REACHED_DEFAULT_CASE_RET();
927 }
928
929 IEM_MC_BEGIN(0, 1);
930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
933 /* Currently a NOP. */
934 NOREF(GCPtrEffSrc);
935 IEM_MC_ADVANCE_RIP();
936 IEM_MC_END();
937 return VINF_SUCCESS;
938}
939
940
941/** Opcode 0x0f 0x0e. */
942FNIEMOP_STUB(iemOp_femms);
943
944
945/** Opcode 0x0f 0x0f 0x0c. */
946FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
947
948/** Opcode 0x0f 0x0f 0x0d. */
949FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
950
951/** Opcode 0x0f 0x0f 0x1c. */
952FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
953
954/** Opcode 0x0f 0x0f 0x1d. */
955FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
956
957/** Opcode 0x0f 0x0f 0x8a. */
958FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
959
960/** Opcode 0x0f 0x0f 0x8e. */
961FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
962
963/** Opcode 0x0f 0x0f 0x90. */
964FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
965
966/** Opcode 0x0f 0x0f 0x94. */
967FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
968
969/** Opcode 0x0f 0x0f 0x96. */
970FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
971
972/** Opcode 0x0f 0x0f 0x97. */
973FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
974
975/** Opcode 0x0f 0x0f 0x9a. */
976FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
977
978/** Opcode 0x0f 0x0f 0x9e. */
979FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
980
981/** Opcode 0x0f 0x0f 0xa0. */
982FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
983
984/** Opcode 0x0f 0x0f 0xa4. */
985FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
986
987/** Opcode 0x0f 0x0f 0xa6. */
988FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
989
990/** Opcode 0x0f 0x0f 0xa7. */
991FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
992
993/** Opcode 0x0f 0x0f 0xaa. */
994FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
995
996/** Opcode 0x0f 0x0f 0xae. */
997FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
998
999/** Opcode 0x0f 0x0f 0xb0. */
1000FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1001
1002/** Opcode 0x0f 0x0f 0xb4. */
1003FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1004
1005/** Opcode 0x0f 0x0f 0xb6. */
1006FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1007
1008/** Opcode 0x0f 0x0f 0xb7. */
1009FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1010
1011/** Opcode 0x0f 0x0f 0xbb. */
1012FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1013
1014/** Opcode 0x0f 0x0f 0xbf. */
1015FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1016
1017
1018/** Opcode 0x0f 0x0f. */
1019FNIEMOP_DEF(iemOp_3Dnow)
1020{
1021 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1022 {
1023 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1024 return IEMOP_RAISE_INVALID_OPCODE();
1025 }
1026
1027 /* This is pretty sparse, use switch instead of table. */
1028 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1029 switch (b)
1030 {
1031 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1032 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1033 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1034 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1035 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1036 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1037 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1038 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1039 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1040 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1041 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1042 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1043 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1044 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1045 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1046 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1047 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1048 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1049 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1050 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1051 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1052 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1053 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1054 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1055 default:
1056 return IEMOP_RAISE_INVALID_OPCODE();
1057 }
1058}
1059
1060
1061/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1062FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1063/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1064FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1065/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1066FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1067/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1068FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1069
1070
1071/** Opcode 0x0f 0x11 - vmovups Wps, Vps */
1072FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1073{
1074 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1076 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1077 {
1078 /*
1079 * Register, register.
1080 */
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_BEGIN(0, 0);
1083 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1084 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1085 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1086 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1087 IEM_MC_ADVANCE_RIP();
1088 IEM_MC_END();
1089 }
1090 else
1091 {
1092 /*
1093 * Memory, register.
1094 */
1095 IEM_MC_BEGIN(0, 2);
1096 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1098
1099 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1101 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1102 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1103
1104 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1105 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1106
1107 IEM_MC_ADVANCE_RIP();
1108 IEM_MC_END();
1109 }
1110 return VINF_SUCCESS;
1111}
1112
1113
1114/** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */
1115FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
1116
1117/** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */
1118FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
1119
1120/** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */
1121FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1122{
1123 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1125 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1126 {
1127 /*
1128 * Register, register.
1129 */
1130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1131 IEM_MC_BEGIN(0, 1);
1132 IEM_MC_LOCAL(uint64_t, uSrc);
1133
1134 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1135 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1136 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1137 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1138
1139 IEM_MC_ADVANCE_RIP();
1140 IEM_MC_END();
1141 }
1142 else
1143 {
1144 /*
1145 * Memory, register.
1146 */
1147 IEM_MC_BEGIN(0, 2);
1148 IEM_MC_LOCAL(uint64_t, uSrc);
1149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1150
1151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1153 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1154 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1155
1156 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1157 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1158
1159 IEM_MC_ADVANCE_RIP();
1160 IEM_MC_END();
1161 }
1162 return VINF_SUCCESS;
1163}
1164
1165
1166/** Opcode 0x0f 0x12. */
1167FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1168
1169/** Opcode 0x66 0x0f 0x12. */
1170FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1171
1172/** Opcode 0xf3 0x0f 0x12. */
1173FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1174
1175/** Opcode 0xf2 0x0f 0x12. */
1176FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1177
1178/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1179FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1180
1181/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1182FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1183{
1184 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1187 {
1188#if 0
1189 /*
1190 * Register, register.
1191 */
1192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1193 IEM_MC_BEGIN(0, 1);
1194 IEM_MC_LOCAL(uint64_t, uSrc);
1195 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1196 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1197 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1198 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1199 IEM_MC_ADVANCE_RIP();
1200 IEM_MC_END();
1201#else
1202 return IEMOP_RAISE_INVALID_OPCODE();
1203#endif
1204 }
1205 else
1206 {
1207 /*
1208 * Memory, register.
1209 */
1210 IEM_MC_BEGIN(0, 2);
1211 IEM_MC_LOCAL(uint64_t, uSrc);
1212 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1213
1214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1216 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1217 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1218
1219 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1220 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1221
1222 IEM_MC_ADVANCE_RIP();
1223 IEM_MC_END();
1224 }
1225 return VINF_SUCCESS;
1226}
1227
1228/* Opcode 0xf3 0x0f 0x13 - invalid */
1229/* Opcode 0xf2 0x0f 0x13 - invalid */
1230
1231/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1232FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1233/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1234FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1235/* Opcode 0xf3 0x0f 0x14 - invalid */
1236/* Opcode 0xf2 0x0f 0x14 - invalid */
1237/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1238FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1239/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1240FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1241/* Opcode 0xf3 0x0f 0x15 - invalid */
1242/* Opcode 0xf2 0x0f 0x15 - invalid */
1243/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1244FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1245/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1246FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1247/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1248FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1249/* Opcode 0xf2 0x0f 0x16 - invalid */
1250/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1251FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1252/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1253FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1254/* Opcode 0xf3 0x0f 0x17 - invalid */
1255/* Opcode 0xf2 0x0f 0x17 - invalid */
1256
1257
1258/** Opcode 0x0f 0x18. */
1259FNIEMOP_DEF(iemOp_prefetch_Grp16)
1260{
1261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1262 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1263 {
1264 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1265 {
1266 case 4: /* Aliased to /0 for the time being according to AMD. */
1267 case 5: /* Aliased to /0 for the time being according to AMD. */
1268 case 6: /* Aliased to /0 for the time being according to AMD. */
1269 case 7: /* Aliased to /0 for the time being according to AMD. */
1270 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1271 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1272 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1273 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1275 }
1276
1277 IEM_MC_BEGIN(0, 1);
1278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1281 /* Currently a NOP. */
1282 NOREF(GCPtrEffSrc);
1283 IEM_MC_ADVANCE_RIP();
1284 IEM_MC_END();
1285 return VINF_SUCCESS;
1286 }
1287
1288 return IEMOP_RAISE_INVALID_OPCODE();
1289}
1290
1291
1292/** Opcode 0x0f 0x19..0x1f. */
1293FNIEMOP_DEF(iemOp_nop_Ev)
1294{
1295 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1297 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1298 {
1299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1300 IEM_MC_BEGIN(0, 0);
1301 IEM_MC_ADVANCE_RIP();
1302 IEM_MC_END();
1303 }
1304 else
1305 {
1306 IEM_MC_BEGIN(0, 1);
1307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1310 /* Currently a NOP. */
1311 NOREF(GCPtrEffSrc);
1312 IEM_MC_ADVANCE_RIP();
1313 IEM_MC_END();
1314 }
1315 return VINF_SUCCESS;
1316}
1317
1318
1319/** Opcode 0x0f 0x20. */
1320FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1321{
1322 /* mod is ignored, as is operand size overrides. */
1323 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1324 IEMOP_HLP_MIN_386();
1325 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1326 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1327 else
1328 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1329
1330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1331 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1332 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1333 {
1334 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1335 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1336 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1337 iCrReg |= 8;
1338 }
1339 switch (iCrReg)
1340 {
1341 case 0: case 2: case 3: case 4: case 8:
1342 break;
1343 default:
1344 return IEMOP_RAISE_INVALID_OPCODE();
1345 }
1346 IEMOP_HLP_DONE_DECODING();
1347
1348 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1349}
1350
1351
1352/** Opcode 0x0f 0x21. */
1353FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1354{
1355 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1356 IEMOP_HLP_MIN_386();
1357 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1359 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1360 return IEMOP_RAISE_INVALID_OPCODE();
1361 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1362 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1363 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1364}
1365
1366
1367/** Opcode 0x0f 0x22. */
1368FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1369{
1370 /* mod is ignored, as is operand size overrides. */
1371 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1372 IEMOP_HLP_MIN_386();
1373 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1374 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1375 else
1376 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1377
1378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1379 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1380 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1381 {
1382 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1383 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1384 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1385 iCrReg |= 8;
1386 }
1387 switch (iCrReg)
1388 {
1389 case 0: case 2: case 3: case 4: case 8:
1390 break;
1391 default:
1392 return IEMOP_RAISE_INVALID_OPCODE();
1393 }
1394 IEMOP_HLP_DONE_DECODING();
1395
1396 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1397}
1398
1399
1400/** Opcode 0x0f 0x23. */
1401FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1402{
1403 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1404 IEMOP_HLP_MIN_386();
1405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1407 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1408 return IEMOP_RAISE_INVALID_OPCODE();
1409 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1410 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1411 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1412}
1413
1414
1415/** Opcode 0x0f 0x24. */
1416FNIEMOP_DEF(iemOp_mov_Rd_Td)
1417{
1418 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1419 /** @todo works on 386 and 486. */
1420 /* The RM byte is not considered, see testcase. */
1421 return IEMOP_RAISE_INVALID_OPCODE();
1422}
1423
1424
1425/** Opcode 0x0f 0x26. */
1426FNIEMOP_DEF(iemOp_mov_Td_Rd)
1427{
1428 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1429 /** @todo works on 386 and 486. */
1430 /* The RM byte is not considered, see testcase. */
1431 return IEMOP_RAISE_INVALID_OPCODE();
1432}
1433
1434
1435/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
1436FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1437{
1438 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1440 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1441 {
1442 /*
1443 * Register, register.
1444 */
1445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1446 IEM_MC_BEGIN(0, 0);
1447 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1448 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1449 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1450 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1451 IEM_MC_ADVANCE_RIP();
1452 IEM_MC_END();
1453 }
1454 else
1455 {
1456 /*
1457 * Register, memory.
1458 */
1459 IEM_MC_BEGIN(0, 2);
1460 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1462
1463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1465 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1466 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1467
1468 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1469 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1470
1471 IEM_MC_ADVANCE_RIP();
1472 IEM_MC_END();
1473 }
1474 return VINF_SUCCESS;
1475}
1476
1477/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
1478FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1479{
1480 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1482 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1483 {
1484 /*
1485 * Register, register.
1486 */
1487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1488 IEM_MC_BEGIN(0, 0);
1489 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1490 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1491 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1492 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1493 IEM_MC_ADVANCE_RIP();
1494 IEM_MC_END();
1495 }
1496 else
1497 {
1498 /*
1499 * Register, memory.
1500 */
1501 IEM_MC_BEGIN(0, 2);
1502 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1504
1505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1507 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1508 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1509
1510 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1511 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1512
1513 IEM_MC_ADVANCE_RIP();
1514 IEM_MC_END();
1515 }
1516 return VINF_SUCCESS;
1517}
1518
1519/* Opcode 0xf3 0x0f 0x28 - invalid */
1520/* Opcode 0xf2 0x0f 0x28 - invalid */
1521
1522/** Opcode 0x0f 0x29 - vmovaps Wps, Vps */
1523FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1524{
1525 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1526 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1528 {
1529 /*
1530 * Register, register.
1531 */
1532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1533 IEM_MC_BEGIN(0, 0);
1534 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1535 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1536 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1537 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1538 IEM_MC_ADVANCE_RIP();
1539 IEM_MC_END();
1540 }
1541 else
1542 {
1543 /*
1544 * Memory, register.
1545 */
1546 IEM_MC_BEGIN(0, 2);
1547 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1549
1550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1552 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1553 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1554
1555 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1556 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1557
1558 IEM_MC_ADVANCE_RIP();
1559 IEM_MC_END();
1560 }
1561 return VINF_SUCCESS;
1562}
1563
1564/** Opcode 0x66 0x0f 0x29 - vmovapd Wpd,Vpd */
1565FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
1566{
1567 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1568 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1569 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1570 {
1571 /*
1572 * Register, register.
1573 */
1574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1575 IEM_MC_BEGIN(0, 0);
1576 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1577 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1578 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1579 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1580 IEM_MC_ADVANCE_RIP();
1581 IEM_MC_END();
1582 }
1583 else
1584 {
1585 /*
1586 * Memory, register.
1587 */
1588 IEM_MC_BEGIN(0, 2);
1589 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1591
1592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1594 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1595 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1596
1597 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1598 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1599
1600 IEM_MC_ADVANCE_RIP();
1601 IEM_MC_END();
1602 }
1603 return VINF_SUCCESS;
1604}
1605
1606/* Opcode 0xf3 0x0f 0x29 - invalid */
1607/* Opcode 0xf2 0x0f 0x29 - invalid */
1608
1609
1610/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
1611FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
1612/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
1613FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
1614/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
1615FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
1616/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1617FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
1618
1619
1620/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
1621FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
1622{
1623 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1624 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1625 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1626 {
1627 /*
1628 * memory, register.
1629 */
1630 IEM_MC_BEGIN(0, 2);
1631 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1633
1634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1636 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1637 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1638
1639 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1640 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1641
1642 IEM_MC_ADVANCE_RIP();
1643 IEM_MC_END();
1644 }
1645 /* The register, register encoding is invalid. */
1646 else
1647 return IEMOP_RAISE_INVALID_OPCODE();
1648 return VINF_SUCCESS;
1649}
1650
1651/** Opcode 0x66 0x0f 0x2b - vmovntpd Mpd, Vpd */
1652FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
1653{
1654 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1655 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1656 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1657 {
1658 /*
1659 * memory, register.
1660 */
1661 IEM_MC_BEGIN(0, 2);
1662 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1664
1665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1667 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1668 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1669
1670 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1671 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1672
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 }
1676 /* The register, register encoding is invalid. */
1677 else
1678 return IEMOP_RAISE_INVALID_OPCODE();
1679 return VINF_SUCCESS;
1680}
1681/* Opcode 0xf3 0x0f 0x2b - invalid */
1682/* Opcode 0xf2 0x0f 0x2b - invalid */
1683
1684
1685/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
1686FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
1687/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
1688FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
1689/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
1690FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
1691/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
1692FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
1693
1694/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
1695FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
1696/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
1697FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
1698/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
1699FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
1700/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
1701FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
1702
1703/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
1704FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
1705/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
1706FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
1707/* Opcode 0xf3 0x0f 0x2e - invalid */
1708/* Opcode 0xf2 0x0f 0x2e - invalid */
1709
1710/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
1711FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
1712/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
1713FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
1714/* Opcode 0xf3 0x0f 0x2f - invalid */
1715/* Opcode 0xf2 0x0f 0x2f - invalid */
1716
1717/** Opcode 0x0f 0x30. */
1718FNIEMOP_DEF(iemOp_wrmsr)
1719{
1720 IEMOP_MNEMONIC(wrmsr, "wrmsr");
1721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1722 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1723}
1724
1725
1726/** Opcode 0x0f 0x31. */
1727FNIEMOP_DEF(iemOp_rdtsc)
1728{
1729 IEMOP_MNEMONIC(rdtsc, "rdtsc");
1730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1731 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1732}
1733
1734
1735/** Opcode 0x0f 0x33. */
1736FNIEMOP_DEF(iemOp_rdmsr)
1737{
1738 IEMOP_MNEMONIC(rdmsr, "rdmsr");
1739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1740 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1741}
1742
1743
1744/** Opcode 0x0f 0x34. */
1745FNIEMOP_STUB(iemOp_rdpmc);
1746/** Opcode 0x0f 0x34. */
1747FNIEMOP_STUB(iemOp_sysenter);
1748/** Opcode 0x0f 0x35. */
1749FNIEMOP_STUB(iemOp_sysexit);
1750/** Opcode 0x0f 0x37. */
1751FNIEMOP_STUB(iemOp_getsec);
1752/** Opcode 0x0f 0x38. */
1753FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1754/** Opcode 0x0f 0x3a. */
1755FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1756
1757
1758/**
1759 * Implements a conditional move.
1760 *
1761 * Wish there was an obvious way to do this where we could share and reduce
1762 * code bloat.
1763 *
1764 * @param a_Cnd The conditional "microcode" operation.
1765 */
1766#define CMOV_X(a_Cnd) \
1767 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1768 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1769 { \
1770 switch (pVCpu->iem.s.enmEffOpSize) \
1771 { \
1772 case IEMMODE_16BIT: \
1773 IEM_MC_BEGIN(0, 1); \
1774 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1775 a_Cnd { \
1776 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1777 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
1778 } IEM_MC_ENDIF(); \
1779 IEM_MC_ADVANCE_RIP(); \
1780 IEM_MC_END(); \
1781 return VINF_SUCCESS; \
1782 \
1783 case IEMMODE_32BIT: \
1784 IEM_MC_BEGIN(0, 1); \
1785 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1786 a_Cnd { \
1787 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1788 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
1789 } IEM_MC_ELSE() { \
1790 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
1791 } IEM_MC_ENDIF(); \
1792 IEM_MC_ADVANCE_RIP(); \
1793 IEM_MC_END(); \
1794 return VINF_SUCCESS; \
1795 \
1796 case IEMMODE_64BIT: \
1797 IEM_MC_BEGIN(0, 1); \
1798 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1799 a_Cnd { \
1800 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1801 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
1802 } IEM_MC_ENDIF(); \
1803 IEM_MC_ADVANCE_RIP(); \
1804 IEM_MC_END(); \
1805 return VINF_SUCCESS; \
1806 \
1807 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1808 } \
1809 } \
1810 else \
1811 { \
1812 switch (pVCpu->iem.s.enmEffOpSize) \
1813 { \
1814 case IEMMODE_16BIT: \
1815 IEM_MC_BEGIN(0, 2); \
1816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1817 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1819 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1820 a_Cnd { \
1821 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
1822 } IEM_MC_ENDIF(); \
1823 IEM_MC_ADVANCE_RIP(); \
1824 IEM_MC_END(); \
1825 return VINF_SUCCESS; \
1826 \
1827 case IEMMODE_32BIT: \
1828 IEM_MC_BEGIN(0, 2); \
1829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1830 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1832 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1833 a_Cnd { \
1834 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
1835 } IEM_MC_ELSE() { \
1836 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
1837 } IEM_MC_ENDIF(); \
1838 IEM_MC_ADVANCE_RIP(); \
1839 IEM_MC_END(); \
1840 return VINF_SUCCESS; \
1841 \
1842 case IEMMODE_64BIT: \
1843 IEM_MC_BEGIN(0, 2); \
1844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1845 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1847 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1848 a_Cnd { \
1849 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
1850 } IEM_MC_ENDIF(); \
1851 IEM_MC_ADVANCE_RIP(); \
1852 IEM_MC_END(); \
1853 return VINF_SUCCESS; \
1854 \
1855 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1856 } \
1857 } do {} while (0)
1858
1859
1860
1861/** Opcode 0x0f 0x40. */
1862FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1863{
1864 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
1865 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1866}
1867
1868
1869/** Opcode 0x0f 0x41. */
1870FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1871{
1872 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
1873 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1874}
1875
1876
1877/** Opcode 0x0f 0x42. */
1878FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1879{
1880 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
1881 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1882}
1883
1884
1885/** Opcode 0x0f 0x43. */
1886FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1887{
1888 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
1889 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1890}
1891
1892
1893/** Opcode 0x0f 0x44. */
1894FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1895{
1896 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
1897 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1898}
1899
1900
1901/** Opcode 0x0f 0x45. */
1902FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1903{
1904 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
1905 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1906}
1907
1908
1909/** Opcode 0x0f 0x46. */
1910FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1911{
1912 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
1913 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1914}
1915
1916
1917/** Opcode 0x0f 0x47. */
1918FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1919{
1920 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
1921 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1922}
1923
1924
1925/** Opcode 0x0f 0x48. */
1926FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1927{
1928 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
1929 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1930}
1931
1932
1933/** Opcode 0x0f 0x49. */
1934FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1935{
1936 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
1937 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1938}
1939
1940
1941/** Opcode 0x0f 0x4a. */
1942FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1943{
1944 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
1945 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1946}
1947
1948
1949/** Opcode 0x0f 0x4b. */
1950FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1951{
1952 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
1953 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1954}
1955
1956
1957/** Opcode 0x0f 0x4c. */
1958FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1959{
1960 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
1961 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1962}
1963
1964
1965/** Opcode 0x0f 0x4d. */
1966FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1967{
1968 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
1969 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1970}
1971
1972
1973/** Opcode 0x0f 0x4e. */
1974FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1975{
1976 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
1977 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1978}
1979
1980
1981/** Opcode 0x0f 0x4f. */
1982FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1983{
1984 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
1985 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1986}
1987
1988#undef CMOV_X
1989
1990/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
1991FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
1992/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
1993FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
1994/* Opcode 0xf3 0x0f 0x50 - invalid */
1995/* Opcode 0xf2 0x0f 0x50 - invalid */
1996
1997/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
1998FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
1999/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
2000FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2001/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
2002FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2003/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2004FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2005
2006/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
2007FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2008/* Opcode 0x66 0x0f 0x52 - invalid */
2009/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
2010FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2011/* Opcode 0xf2 0x0f 0x52 - invalid */
2012
2013/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
2014FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2015/* Opcode 0x66 0x0f 0x53 - invalid */
2016/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
2017FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2018/* Opcode 0xf2 0x0f 0x53 - invalid */
2019
2020/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
2021FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
2022/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
2023FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
2024/* Opcode 0xf3 0x0f 0x54 - invalid */
2025/* Opcode 0xf2 0x0f 0x54 - invalid */
2026
2027/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
2028FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
2029/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
2030FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
2031/* Opcode 0xf3 0x0f 0x55 - invalid */
2032/* Opcode 0xf2 0x0f 0x55 - invalid */
2033
2034/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
2035FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
2036/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
2037FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
2038/* Opcode 0xf3 0x0f 0x56 - invalid */
2039/* Opcode 0xf2 0x0f 0x56 - invalid */
2040
2041/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
2042FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
2043/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
2044FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
2045/* Opcode 0xf3 0x0f 0x57 - invalid */
2046/* Opcode 0xf2 0x0f 0x57 - invalid */
2047
2048/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2049FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2050/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2051FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2052/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2053FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2054/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2055FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2056
2057/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2058FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2059/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2060FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2061/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2062FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2063/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2064FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2065
2066/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2067FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2068/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2069FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2070/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2071FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2072/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2073FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2074
2075/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2076FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2077/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2078FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2079/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2080FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2081/* Opcode 0xf2 0x0f 0x5b - invalid */
2082
2083/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2084FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2085/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2086FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2087/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2088FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2089/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2090FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2091
2092/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2093FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2094/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2095FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2096/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2097FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2098/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2099FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2100
2101/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2102FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2103/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2104FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2105/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2106FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2107/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2108FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2109
2110/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2111FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2112/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2113FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2114/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2115FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2116/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2117FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2118
2119/**
2120 * Common worker for MMX instructions on the forms:
2121 * pxxxx mm1, mm2/mem32
2122 *
2123 * The 2nd operand is the first half of a register, which in the memory case
2124 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2125 * memory accessed for MMX.
2126 *
2127 * Exceptions type 4.
2128 */
2129FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2130{
2131 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2132 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2133 {
2134 /*
2135 * Register, register.
2136 */
2137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2138 IEM_MC_BEGIN(2, 0);
2139 IEM_MC_ARG(uint128_t *, pDst, 0);
2140 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2141 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2142 IEM_MC_PREPARE_SSE_USAGE();
2143 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2144 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2145 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2146 IEM_MC_ADVANCE_RIP();
2147 IEM_MC_END();
2148 }
2149 else
2150 {
2151 /*
2152 * Register, memory.
2153 */
2154 IEM_MC_BEGIN(2, 2);
2155 IEM_MC_ARG(uint128_t *, pDst, 0);
2156 IEM_MC_LOCAL(uint64_t, uSrc);
2157 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2159
2160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2162 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2163 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2164
2165 IEM_MC_PREPARE_SSE_USAGE();
2166 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2167 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2168
2169 IEM_MC_ADVANCE_RIP();
2170 IEM_MC_END();
2171 }
2172 return VINF_SUCCESS;
2173}
2174
2175
2176/**
2177 * Common worker for SSE2 instructions on the forms:
2178 * pxxxx xmm1, xmm2/mem128
2179 *
2180 * The 2nd operand is the first half of a register, which in the memory case
2181 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2182 * memory accessed for MMX.
2183 *
2184 * Exceptions type 4.
2185 */
2186FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2187{
2188 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2189 if (!pImpl->pfnU64)
2190 return IEMOP_RAISE_INVALID_OPCODE();
2191 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2192 {
2193 /*
2194 * Register, register.
2195 */
2196 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2197 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2199 IEM_MC_BEGIN(2, 0);
2200 IEM_MC_ARG(uint64_t *, pDst, 0);
2201 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2202 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2203 IEM_MC_PREPARE_FPU_USAGE();
2204 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2205 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2206 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2207 IEM_MC_ADVANCE_RIP();
2208 IEM_MC_END();
2209 }
2210 else
2211 {
2212 /*
2213 * Register, memory.
2214 */
2215 IEM_MC_BEGIN(2, 2);
2216 IEM_MC_ARG(uint64_t *, pDst, 0);
2217 IEM_MC_LOCAL(uint32_t, uSrc);
2218 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2220
2221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2223 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2224 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2225
2226 IEM_MC_PREPARE_FPU_USAGE();
2227 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2228 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2229
2230 IEM_MC_ADVANCE_RIP();
2231 IEM_MC_END();
2232 }
2233 return VINF_SUCCESS;
2234}
2235
2236
2237/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2238FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2239{
2240 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2241 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2242}
2243
2244/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2245FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2246{
2247 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2248 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2249}
2250
2251/* Opcode 0xf3 0x0f 0x60 - invalid */
2252
2253
2254/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2255FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2256{
2257 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2258 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2259}
2260
2261/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2262FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2263{
2264 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2265 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2266}
2267
2268/* Opcode 0xf3 0x0f 0x61 - invalid */
2269
2270
2271/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2272FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2273{
2274 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2275 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2276}
2277
2278/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2279FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2280{
2281 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2282 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2283}
2284
2285/* Opcode 0xf3 0x0f 0x62 - invalid */
2286
2287
2288
2289/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2290FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2291/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2292FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2293/* Opcode 0xf3 0x0f 0x63 - invalid */
2294
2295/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2296FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2297/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2298FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2299/* Opcode 0xf3 0x0f 0x64 - invalid */
2300
2301/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2302FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2303/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2304FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2305/* Opcode 0xf3 0x0f 0x65 - invalid */
2306
2307/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2308FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2309/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2310FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2311/* Opcode 0xf3 0x0f 0x66 - invalid */
2312
2313/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2314FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2315/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2316FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2317/* Opcode 0xf3 0x0f 0x67 - invalid */
2318
2319
2320/**
2321 * Common worker for MMX instructions on the form:
2322 * pxxxx mm1, mm2/mem64
2323 *
2324 * The 2nd operand is the second half of a register, which in the memory case
2325 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2326 * where it may read the full 128 bits or only the upper 64 bits.
2327 *
2328 * Exceptions type 4.
2329 */
2330FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2331{
2332 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2333 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2334 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2335 {
2336 /*
2337 * Register, register.
2338 */
2339 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2340 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2342 IEM_MC_BEGIN(2, 0);
2343 IEM_MC_ARG(uint64_t *, pDst, 0);
2344 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2345 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2346 IEM_MC_PREPARE_FPU_USAGE();
2347 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2348 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2349 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2350 IEM_MC_ADVANCE_RIP();
2351 IEM_MC_END();
2352 }
2353 else
2354 {
2355 /*
2356 * Register, memory.
2357 */
2358 IEM_MC_BEGIN(2, 2);
2359 IEM_MC_ARG(uint64_t *, pDst, 0);
2360 IEM_MC_LOCAL(uint64_t, uSrc);
2361 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2363
2364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2366 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2367 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2368
2369 IEM_MC_PREPARE_FPU_USAGE();
2370 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2371 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2372
2373 IEM_MC_ADVANCE_RIP();
2374 IEM_MC_END();
2375 }
2376 return VINF_SUCCESS;
2377}
2378
2379
2380/**
2381 * Common worker for SSE2 instructions on the form:
2382 * pxxxx xmm1, xmm2/mem128
2383 *
2384 * The 2nd operand is the second half of a register, which in the memory case
2385 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2386 * where it may read the full 128 bits or only the upper 64 bits.
2387 *
2388 * Exceptions type 4.
2389 */
2390FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2391{
2392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2393 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2394 {
2395 /*
2396 * Register, register.
2397 */
2398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2399 IEM_MC_BEGIN(2, 0);
2400 IEM_MC_ARG(uint128_t *, pDst, 0);
2401 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2402 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2403 IEM_MC_PREPARE_SSE_USAGE();
2404 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2405 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2406 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2407 IEM_MC_ADVANCE_RIP();
2408 IEM_MC_END();
2409 }
2410 else
2411 {
2412 /*
2413 * Register, memory.
2414 */
2415 IEM_MC_BEGIN(2, 2);
2416 IEM_MC_ARG(uint128_t *, pDst, 0);
2417 IEM_MC_LOCAL(uint128_t, uSrc);
2418 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2420
2421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2423 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2424 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2425
2426 IEM_MC_PREPARE_SSE_USAGE();
2427 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2428 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2429
2430 IEM_MC_ADVANCE_RIP();
2431 IEM_MC_END();
2432 }
2433 return VINF_SUCCESS;
2434}
2435
2436
2437/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2438FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2439{
2440 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2441 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2442}
2443
2444/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2445FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2446{
2447 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2448 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2449}
2450/* Opcode 0xf3 0x0f 0x68 - invalid */
2451
2452
2453/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2454FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2455{
2456 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2457 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2458}
2459
2460/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2461FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2462{
2463 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2464 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2465
2466}
2467/* Opcode 0xf3 0x0f 0x69 - invalid */
2468
2469
2470/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2471FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2472{
2473 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2474 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2475}
2476
2477/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2478FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2479{
2480 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2481 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2482}
2483/* Opcode 0xf3 0x0f 0x6a - invalid */
2484
2485
2486/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2487FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2488/** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */
2489FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2490/* Opcode 0xf3 0x0f 0x6b - invalid */
2491
2492
2493/* Opcode 0x0f 0x6c - invalid */
2494
2495/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2496FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2497{
2498 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
2499 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2500}
2501
2502/* Opcode 0xf3 0x0f 0x6c - invalid */
2503/* Opcode 0xf2 0x0f 0x6c - invalid */
2504
2505
2506/* Opcode 0x0f 0x6d - invalid */
2507
2508/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
2509FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
2510{
2511 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2512 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2513}
2514
2515/* Opcode 0xf3 0x0f 0x6d - invalid */
2516
2517
2518/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
2519FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
2520{
2521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2522 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2523 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2524 else
2525 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2526 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2527 {
2528 /* MMX, greg */
2529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2530 IEM_MC_BEGIN(0, 1);
2531 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2532 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2533 IEM_MC_LOCAL(uint64_t, u64Tmp);
2534 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2535 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2536 else
2537 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2538 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2539 IEM_MC_ADVANCE_RIP();
2540 IEM_MC_END();
2541 }
2542 else
2543 {
2544 /* MMX, [mem] */
2545 IEM_MC_BEGIN(0, 2);
2546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2547 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2550 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2551 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2552 {
2553 IEM_MC_LOCAL(uint64_t, u64Tmp);
2554 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2555 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2556 }
2557 else
2558 {
2559 IEM_MC_LOCAL(uint32_t, u32Tmp);
2560 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2561 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2562 }
2563 IEM_MC_ADVANCE_RIP();
2564 IEM_MC_END();
2565 }
2566 return VINF_SUCCESS;
2567}
2568
2569/** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */
2570FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
2571{
2572 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2573 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2574 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
2575 else
2576 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
2577 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2578 {
2579 /* XMM, greg*/
2580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2581 IEM_MC_BEGIN(0, 1);
2582 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2583 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2584 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2585 {
2586 IEM_MC_LOCAL(uint64_t, u64Tmp);
2587 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2588 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2589 }
2590 else
2591 {
2592 IEM_MC_LOCAL(uint32_t, u32Tmp);
2593 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2594 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2595 }
2596 IEM_MC_ADVANCE_RIP();
2597 IEM_MC_END();
2598 }
2599 else
2600 {
2601 /* XMM, [mem] */
2602 IEM_MC_BEGIN(0, 2);
2603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2604 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2607 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2608 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2609 {
2610 IEM_MC_LOCAL(uint64_t, u64Tmp);
2611 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2612 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2613 }
2614 else
2615 {
2616 IEM_MC_LOCAL(uint32_t, u32Tmp);
2617 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2618 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2619 }
2620 IEM_MC_ADVANCE_RIP();
2621 IEM_MC_END();
2622 }
2623 return VINF_SUCCESS;
2624}
2625
2626/* Opcode 0xf3 0x0f 0x6e - invalid */
2627
2628
2629/** Opcode 0x0f 0x6f - movq Pq, Qq */
2630FNIEMOP_DEF(iemOp_movq_Pq_Qq)
2631{
2632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2633 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2634 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2635 {
2636 /*
2637 * Register, register.
2638 */
2639 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2640 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2642 IEM_MC_BEGIN(0, 1);
2643 IEM_MC_LOCAL(uint64_t, u64Tmp);
2644 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2645 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2646 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2647 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2648 IEM_MC_ADVANCE_RIP();
2649 IEM_MC_END();
2650 }
2651 else
2652 {
2653 /*
2654 * Register, memory.
2655 */
2656 IEM_MC_BEGIN(0, 2);
2657 IEM_MC_LOCAL(uint64_t, u64Tmp);
2658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2659
2660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2662 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2663 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2664 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2665 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2666
2667 IEM_MC_ADVANCE_RIP();
2668 IEM_MC_END();
2669 }
2670 return VINF_SUCCESS;
2671}
2672
2673/** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */
2674FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
2675{
2676 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2677 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
2678 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2679 {
2680 /*
2681 * Register, register.
2682 */
2683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2684 IEM_MC_BEGIN(0, 0);
2685 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2686 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2687 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2688 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2689 IEM_MC_ADVANCE_RIP();
2690 IEM_MC_END();
2691 }
2692 else
2693 {
2694 /*
2695 * Register, memory.
2696 */
2697 IEM_MC_BEGIN(0, 2);
2698 IEM_MC_LOCAL(uint128_t, u128Tmp);
2699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2700
2701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2703 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2704 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2705 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2706 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2707
2708 IEM_MC_ADVANCE_RIP();
2709 IEM_MC_END();
2710 }
2711 return VINF_SUCCESS;
2712}
2713
2714/** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */
2715FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
2716{
2717 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2718 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
2719 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2720 {
2721 /*
2722 * Register, register.
2723 */
2724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2725 IEM_MC_BEGIN(0, 0);
2726 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2727 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2728 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2729 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2730 IEM_MC_ADVANCE_RIP();
2731 IEM_MC_END();
2732 }
2733 else
2734 {
2735 /*
2736 * Register, memory.
2737 */
2738 IEM_MC_BEGIN(0, 2);
2739 IEM_MC_LOCAL(uint128_t, u128Tmp);
2740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2741
2742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2744 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2745 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2746 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2747 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2748
2749 IEM_MC_ADVANCE_RIP();
2750 IEM_MC_END();
2751 }
2752 return VINF_SUCCESS;
2753}
2754
2755
2756/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
2757FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
2758{
2759 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
2760 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2761 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2762 {
2763 /*
2764 * Register, register.
2765 */
2766 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2768
2769 IEM_MC_BEGIN(3, 0);
2770 IEM_MC_ARG(uint64_t *, pDst, 0);
2771 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2772 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2773 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2774 IEM_MC_PREPARE_FPU_USAGE();
2775 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2776 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2777 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2778 IEM_MC_ADVANCE_RIP();
2779 IEM_MC_END();
2780 }
2781 else
2782 {
2783 /*
2784 * Register, memory.
2785 */
2786 IEM_MC_BEGIN(3, 2);
2787 IEM_MC_ARG(uint64_t *, pDst, 0);
2788 IEM_MC_LOCAL(uint64_t, uSrc);
2789 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2791
2792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2793 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2794 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2796 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2797
2798 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2799 IEM_MC_PREPARE_FPU_USAGE();
2800 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2801 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2802
2803 IEM_MC_ADVANCE_RIP();
2804 IEM_MC_END();
2805 }
2806 return VINF_SUCCESS;
2807}
2808
2809/** Opcode 0x66 0x0f 0x70 - vpshufd Vx, Wx, Ib */
2810FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
2811{
2812 IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
2813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2814 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2815 {
2816 /*
2817 * Register, register.
2818 */
2819 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2821
2822 IEM_MC_BEGIN(3, 0);
2823 IEM_MC_ARG(uint128_t *, pDst, 0);
2824 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2825 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2826 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2827 IEM_MC_PREPARE_SSE_USAGE();
2828 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2829 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2830 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2831 IEM_MC_ADVANCE_RIP();
2832 IEM_MC_END();
2833 }
2834 else
2835 {
2836 /*
2837 * Register, memory.
2838 */
2839 IEM_MC_BEGIN(3, 2);
2840 IEM_MC_ARG(uint128_t *, pDst, 0);
2841 IEM_MC_LOCAL(uint128_t, uSrc);
2842 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2844
2845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2846 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2847 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2849 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2850
2851 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2852 IEM_MC_PREPARE_SSE_USAGE();
2853 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2854 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2855
2856 IEM_MC_ADVANCE_RIP();
2857 IEM_MC_END();
2858 }
2859 return VINF_SUCCESS;
2860}
2861
2862/** Opcode 0xf3 0x0f 0x70 - vpshufhw Vx, Wx, Ib */
2863FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
2864{
2865 IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
2866 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2867 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2868 {
2869 /*
2870 * Register, register.
2871 */
2872 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2874
2875 IEM_MC_BEGIN(3, 0);
2876 IEM_MC_ARG(uint128_t *, pDst, 0);
2877 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2878 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2879 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2880 IEM_MC_PREPARE_SSE_USAGE();
2881 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2882 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2883 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2884 IEM_MC_ADVANCE_RIP();
2885 IEM_MC_END();
2886 }
2887 else
2888 {
2889 /*
2890 * Register, memory.
2891 */
2892 IEM_MC_BEGIN(3, 2);
2893 IEM_MC_ARG(uint128_t *, pDst, 0);
2894 IEM_MC_LOCAL(uint128_t, uSrc);
2895 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2897
2898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2899 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2900 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2902 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2903
2904 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2905 IEM_MC_PREPARE_SSE_USAGE();
2906 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2907 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2908
2909 IEM_MC_ADVANCE_RIP();
2910 IEM_MC_END();
2911 }
2912 return VINF_SUCCESS;
2913}
2914
2915/** Opcode 0xf2 0x0f 0x70 - vpshuflw Vx, Wx, Ib */
2916FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
2917{
2918 IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
2919 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2920 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2921 {
2922 /*
2923 * Register, register.
2924 */
2925 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2927
2928 IEM_MC_BEGIN(3, 0);
2929 IEM_MC_ARG(uint128_t *, pDst, 0);
2930 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2931 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2932 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2933 IEM_MC_PREPARE_SSE_USAGE();
2934 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2935 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2936 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2937 IEM_MC_ADVANCE_RIP();
2938 IEM_MC_END();
2939 }
2940 else
2941 {
2942 /*
2943 * Register, memory.
2944 */
2945 IEM_MC_BEGIN(3, 2);
2946 IEM_MC_ARG(uint128_t *, pDst, 0);
2947 IEM_MC_LOCAL(uint128_t, uSrc);
2948 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2950
2951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2952 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2953 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2955 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2956
2957 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2958 IEM_MC_PREPARE_SSE_USAGE();
2959 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2960 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2961
2962 IEM_MC_ADVANCE_RIP();
2963 IEM_MC_END();
2964 }
2965 return VINF_SUCCESS;
2966}
2967
2968
2969/** Opcode 0x0f 0x71 11/2. */
2970FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2971
2972/** Opcode 0x66 0x0f 0x71 11/2. */
2973FNIEMOP_STUB_1(iemOp_Grp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
2974
2975/** Opcode 0x0f 0x71 11/4. */
2976FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2977
2978/** Opcode 0x66 0x0f 0x71 11/4. */
2979FNIEMOP_STUB_1(iemOp_Grp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
2980
2981/** Opcode 0x0f 0x71 11/6. */
2982FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2983
2984/** Opcode 0x66 0x0f 0x71 11/6. */
2985FNIEMOP_STUB_1(iemOp_Grp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
2986
2987
2988/**
2989 * Group 12 jump table for register variant.
2990 */
2991IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
2992{
2993 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2994 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2995 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2996 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2997 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2998 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2999 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3000 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3001};
3002AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3003
3004
3005/** Opcode 0x0f 0x71. */
3006FNIEMOP_DEF(iemOp_Grp12)
3007{
3008 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3009 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3010 /* register, register */
3011 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3012 + pVCpu->iem.s.idxPrefix], bRm);
3013 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3014}
3015
3016
3017/** Opcode 0x0f 0x72 11/2. */
3018FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3019
3020/** Opcode 0x66 0x0f 0x72 11/2. */
3021FNIEMOP_STUB_1(iemOp_Grp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
3022
3023/** Opcode 0x0f 0x72 11/4. */
3024FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3025
3026/** Opcode 0x66 0x0f 0x72 11/4. */
3027FNIEMOP_STUB_1(iemOp_Grp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
3028
3029/** Opcode 0x0f 0x72 11/6. */
3030FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3031
3032/** Opcode 0x66 0x0f 0x72 11/6. */
3033FNIEMOP_STUB_1(iemOp_Grp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
3034
3035
3036/**
3037 * Group 13 jump table for register variant.
3038 */
3039IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3040{
3041 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3042 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3043 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3044 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3045 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3046 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3047 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3048 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3049};
3050AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3051
3052/** Opcode 0x0f 0x72. */
3053FNIEMOP_DEF(iemOp_Grp13)
3054{
3055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3056 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3057 /* register, register */
3058 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3059 + pVCpu->iem.s.idxPrefix], bRm);
3060 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3061}
3062
3063
3064/** Opcode 0x0f 0x73 11/2. */
3065FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3066
3067/** Opcode 0x66 0x0f 0x73 11/2. */
3068FNIEMOP_STUB_1(iemOp_Grp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
3069
3070/** Opcode 0x66 0x0f 0x73 11/3. */
3071FNIEMOP_STUB_1(iemOp_Grp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3072
3073/** Opcode 0x0f 0x73 11/6. */
3074FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3075
3076/** Opcode 0x66 0x0f 0x73 11/6. */
3077FNIEMOP_STUB_1(iemOp_Grp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
3078
3079/** Opcode 0x66 0x0f 0x73 11/7. */
3080FNIEMOP_STUB_1(iemOp_Grp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3081
3082/**
3083 * Group 14 jump table for register variant.
3084 */
3085IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
3086{
3087 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3088 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3089 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3090 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3091 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3092 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3093 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3094 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3095};
3096AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
3097
3098
3099/** Opcode 0x0f 0x73. */
3100FNIEMOP_DEF(iemOp_Grp14)
3101{
3102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3104 /* register, register */
3105 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3106 + pVCpu->iem.s.idxPrefix], bRm);
3107 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3108}
3109
3110
3111/**
3112 * Common worker for MMX instructions on the form:
3113 * pxxx mm1, mm2/mem64
3114 */
3115FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3116{
3117 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3118 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3119 {
3120 /*
3121 * Register, register.
3122 */
3123 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3124 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3126 IEM_MC_BEGIN(2, 0);
3127 IEM_MC_ARG(uint64_t *, pDst, 0);
3128 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3129 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3130 IEM_MC_PREPARE_FPU_USAGE();
3131 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3132 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3133 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3134 IEM_MC_ADVANCE_RIP();
3135 IEM_MC_END();
3136 }
3137 else
3138 {
3139 /*
3140 * Register, memory.
3141 */
3142 IEM_MC_BEGIN(2, 2);
3143 IEM_MC_ARG(uint64_t *, pDst, 0);
3144 IEM_MC_LOCAL(uint64_t, uSrc);
3145 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3147
3148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3150 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3151 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3152
3153 IEM_MC_PREPARE_FPU_USAGE();
3154 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3155 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3156
3157 IEM_MC_ADVANCE_RIP();
3158 IEM_MC_END();
3159 }
3160 return VINF_SUCCESS;
3161}
3162
3163
3164/**
3165 * Common worker for SSE2 instructions on the forms:
3166 * pxxx xmm1, xmm2/mem128
3167 *
3168 * Proper alignment of the 128-bit operand is enforced.
3169 * Exceptions type 4. SSE2 cpuid checks.
3170 */
3171FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3172{
3173 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3174 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3175 {
3176 /*
3177 * Register, register.
3178 */
3179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3180 IEM_MC_BEGIN(2, 0);
3181 IEM_MC_ARG(uint128_t *, pDst, 0);
3182 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3183 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3184 IEM_MC_PREPARE_SSE_USAGE();
3185 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3186 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3187 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3188 IEM_MC_ADVANCE_RIP();
3189 IEM_MC_END();
3190 }
3191 else
3192 {
3193 /*
3194 * Register, memory.
3195 */
3196 IEM_MC_BEGIN(2, 2);
3197 IEM_MC_ARG(uint128_t *, pDst, 0);
3198 IEM_MC_LOCAL(uint128_t, uSrc);
3199 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3201
3202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3204 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3205 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3206
3207 IEM_MC_PREPARE_SSE_USAGE();
3208 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3209 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3210
3211 IEM_MC_ADVANCE_RIP();
3212 IEM_MC_END();
3213 }
3214 return VINF_SUCCESS;
3215}
3216
3217
3218/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3219FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3220{
3221 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3222 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3223}
3224
3225/** Opcode 0x66 0x0f 0x74 - vpcmpeqb Vx, Hx, Wx */
3226FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3227{
3228 IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb");
3229 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3230}
3231
3232/* Opcode 0xf3 0x0f 0x74 - invalid */
3233/* Opcode 0xf2 0x0f 0x74 - invalid */
3234
3235
3236/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3237FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3238{
3239 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3240 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3241}
3242
3243/** Opcode 0x66 0x0f 0x75 - vpcmpeqw Vx, Hx, Wx */
3244FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3245{
3246 IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw");
3247 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3248}
3249
3250/* Opcode 0xf3 0x0f 0x75 - invalid */
3251/* Opcode 0xf2 0x0f 0x75 - invalid */
3252
3253
3254/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3255FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3256{
3257 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3258 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3259}
3260
3261/** Opcode 0x66 0x0f 0x76 - vpcmpeqd Vx, Hx, Wx */
3262FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3263{
3264 IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd");
3265 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3266}
3267
3268/* Opcode 0xf3 0x0f 0x76 - invalid */
3269/* Opcode 0xf2 0x0f 0x76 - invalid */
3270
3271
3272/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3273FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3274/* Opcode 0x66 0x0f 0x77 - invalid */
3275/* Opcode 0xf3 0x0f 0x77 - invalid */
3276/* Opcode 0xf2 0x0f 0x77 - invalid */
3277
3278/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3279FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3280/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3281FNIEMOP_STUB(iemOp_AmdGrp17);
3282/* Opcode 0xf3 0x0f 0x78 - invalid */
3283/* Opcode 0xf2 0x0f 0x78 - invalid */
3284
3285/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3286FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3287/* Opcode 0x66 0x0f 0x79 - invalid */
3288/* Opcode 0xf3 0x0f 0x79 - invalid */
3289/* Opcode 0xf2 0x0f 0x79 - invalid */
3290
3291/* Opcode 0x0f 0x7a - invalid */
3292/* Opcode 0x66 0x0f 0x7a - invalid */
3293/* Opcode 0xf3 0x0f 0x7a - invalid */
3294/* Opcode 0xf2 0x0f 0x7a - invalid */
3295
3296/* Opcode 0x0f 0x7b - invalid */
3297/* Opcode 0x66 0x0f 0x7b - invalid */
3298/* Opcode 0xf3 0x0f 0x7b - invalid */
3299/* Opcode 0xf2 0x0f 0x7b - invalid */
3300
3301/* Opcode 0x0f 0x7c - invalid */
3302/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3303FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3304/* Opcode 0xf3 0x0f 0x7c - invalid */
3305/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3306FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3307
3308/* Opcode 0x0f 0x7d - invalid */
3309/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3310FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3311/* Opcode 0xf3 0x0f 0x7d - invalid */
3312/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3313FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3314
3315
3316/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3317FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3318{
3319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3320 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3321 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3322 else
3323 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3324 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3325 {
3326 /* greg, MMX */
3327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3328 IEM_MC_BEGIN(0, 1);
3329 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3330 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3331 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3332 {
3333 IEM_MC_LOCAL(uint64_t, u64Tmp);
3334 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3335 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3336 }
3337 else
3338 {
3339 IEM_MC_LOCAL(uint32_t, u32Tmp);
3340 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3341 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3342 }
3343 IEM_MC_ADVANCE_RIP();
3344 IEM_MC_END();
3345 }
3346 else
3347 {
3348 /* [mem], MMX */
3349 IEM_MC_BEGIN(0, 2);
3350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3351 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3354 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3355 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3356 {
3357 IEM_MC_LOCAL(uint64_t, u64Tmp);
3358 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3359 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3360 }
3361 else
3362 {
3363 IEM_MC_LOCAL(uint32_t, u32Tmp);
3364 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3365 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3366 }
3367 IEM_MC_ADVANCE_RIP();
3368 IEM_MC_END();
3369 }
3370 return VINF_SUCCESS;
3371}
3372
3373/** Opcode 0x66 0x0f 0x7e - vmovd_q Ey, Vy */
3374FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3375{
3376 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3377 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3378 IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq");
3379 else
3380 IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd");
3381 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3382 {
3383 /* greg, XMM */
3384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3385 IEM_MC_BEGIN(0, 1);
3386 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3387 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3388 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3389 {
3390 IEM_MC_LOCAL(uint64_t, u64Tmp);
3391 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3392 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3393 }
3394 else
3395 {
3396 IEM_MC_LOCAL(uint32_t, u32Tmp);
3397 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3398 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3399 }
3400 IEM_MC_ADVANCE_RIP();
3401 IEM_MC_END();
3402 }
3403 else
3404 {
3405 /* [mem], XMM */
3406 IEM_MC_BEGIN(0, 2);
3407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3408 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3411 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3412 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3413 {
3414 IEM_MC_LOCAL(uint64_t, u64Tmp);
3415 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3416 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3417 }
3418 else
3419 {
3420 IEM_MC_LOCAL(uint32_t, u32Tmp);
3421 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3422 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3423 }
3424 IEM_MC_ADVANCE_RIP();
3425 IEM_MC_END();
3426 }
3427 return VINF_SUCCESS;
3428}
3429
3430/** Opcode 0xf3 0x0f 0x7e - vmovq Vq, Wq */
3431FNIEMOP_STUB(iemOp_vmovq_Vq_Wq);
3432/* Opcode 0xf2 0x0f 0x7e - invalid */
3433
3434
3435/** Opcode 0x0f 0x7f - movq Qq, Pq */
3436FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3437{
3438 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3440 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3441 {
3442 /*
3443 * Register, register.
3444 */
3445 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3446 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3448 IEM_MC_BEGIN(0, 1);
3449 IEM_MC_LOCAL(uint64_t, u64Tmp);
3450 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3451 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3452 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3453 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3454 IEM_MC_ADVANCE_RIP();
3455 IEM_MC_END();
3456 }
3457 else
3458 {
3459 /*
3460 * Register, memory.
3461 */
3462 IEM_MC_BEGIN(0, 2);
3463 IEM_MC_LOCAL(uint64_t, u64Tmp);
3464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3465
3466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3468 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3469 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3470
3471 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3472 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3473
3474 IEM_MC_ADVANCE_RIP();
3475 IEM_MC_END();
3476 }
3477 return VINF_SUCCESS;
3478}
3479
3480/** Opcode 0x66 0x0f 0x7f - vmovdqa Wx,Vx */
3481FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3482{
3483 IEMOP_MNEMONIC(vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx");
3484 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3485 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3486 {
3487 /*
3488 * Register, register.
3489 */
3490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3491 IEM_MC_BEGIN(0, 0);
3492 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3493 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3494 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3495 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3496 IEM_MC_ADVANCE_RIP();
3497 IEM_MC_END();
3498 }
3499 else
3500 {
3501 /*
3502 * Register, memory.
3503 */
3504 IEM_MC_BEGIN(0, 2);
3505 IEM_MC_LOCAL(uint128_t, u128Tmp);
3506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3507
3508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3510 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3511 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3512
3513 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3514 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3515
3516 IEM_MC_ADVANCE_RIP();
3517 IEM_MC_END();
3518 }
3519 return VINF_SUCCESS;
3520}
3521
3522/** Opcode 0xf3 0x0f 0x7f - vmovdqu Wx,Vx */
3523FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
3524{
3525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3526 IEMOP_MNEMONIC(vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx");
3527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3528 {
3529 /*
3530 * Register, register.
3531 */
3532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3533 IEM_MC_BEGIN(0, 0);
3534 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3535 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3536 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3537 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3538 IEM_MC_ADVANCE_RIP();
3539 IEM_MC_END();
3540 }
3541 else
3542 {
3543 /*
3544 * Register, memory.
3545 */
3546 IEM_MC_BEGIN(0, 2);
3547 IEM_MC_LOCAL(uint128_t, u128Tmp);
3548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3549
3550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3552 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3553 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3554
3555 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3556 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3557
3558 IEM_MC_ADVANCE_RIP();
3559 IEM_MC_END();
3560 }
3561 return VINF_SUCCESS;
3562}
3563
3564/* Opcode 0xf2 0x0f 0x7f - invalid */
3565
3566
3567
3568/** Opcode 0x0f 0x80. */
3569FNIEMOP_DEF(iemOp_jo_Jv)
3570{
3571 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3572 IEMOP_HLP_MIN_386();
3573 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3574 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3575 {
3576 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3578
3579 IEM_MC_BEGIN(0, 0);
3580 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3581 IEM_MC_REL_JMP_S16(i16Imm);
3582 } IEM_MC_ELSE() {
3583 IEM_MC_ADVANCE_RIP();
3584 } IEM_MC_ENDIF();
3585 IEM_MC_END();
3586 }
3587 else
3588 {
3589 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3591
3592 IEM_MC_BEGIN(0, 0);
3593 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3594 IEM_MC_REL_JMP_S32(i32Imm);
3595 } IEM_MC_ELSE() {
3596 IEM_MC_ADVANCE_RIP();
3597 } IEM_MC_ENDIF();
3598 IEM_MC_END();
3599 }
3600 return VINF_SUCCESS;
3601}
3602
3603
3604/** Opcode 0x0f 0x81. */
3605FNIEMOP_DEF(iemOp_jno_Jv)
3606{
3607 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3608 IEMOP_HLP_MIN_386();
3609 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3610 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3611 {
3612 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3614
3615 IEM_MC_BEGIN(0, 0);
3616 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3617 IEM_MC_ADVANCE_RIP();
3618 } IEM_MC_ELSE() {
3619 IEM_MC_REL_JMP_S16(i16Imm);
3620 } IEM_MC_ENDIF();
3621 IEM_MC_END();
3622 }
3623 else
3624 {
3625 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3627
3628 IEM_MC_BEGIN(0, 0);
3629 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3630 IEM_MC_ADVANCE_RIP();
3631 } IEM_MC_ELSE() {
3632 IEM_MC_REL_JMP_S32(i32Imm);
3633 } IEM_MC_ENDIF();
3634 IEM_MC_END();
3635 }
3636 return VINF_SUCCESS;
3637}
3638
3639
3640/** Opcode 0x0f 0x82. */
3641FNIEMOP_DEF(iemOp_jc_Jv)
3642{
3643 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3644 IEMOP_HLP_MIN_386();
3645 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3646 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3647 {
3648 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3650
3651 IEM_MC_BEGIN(0, 0);
3652 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3653 IEM_MC_REL_JMP_S16(i16Imm);
3654 } IEM_MC_ELSE() {
3655 IEM_MC_ADVANCE_RIP();
3656 } IEM_MC_ENDIF();
3657 IEM_MC_END();
3658 }
3659 else
3660 {
3661 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3663
3664 IEM_MC_BEGIN(0, 0);
3665 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3666 IEM_MC_REL_JMP_S32(i32Imm);
3667 } IEM_MC_ELSE() {
3668 IEM_MC_ADVANCE_RIP();
3669 } IEM_MC_ENDIF();
3670 IEM_MC_END();
3671 }
3672 return VINF_SUCCESS;
3673}
3674
3675
3676/** Opcode 0x0f 0x83. */
3677FNIEMOP_DEF(iemOp_jnc_Jv)
3678{
3679 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3680 IEMOP_HLP_MIN_386();
3681 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3682 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3683 {
3684 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3686
3687 IEM_MC_BEGIN(0, 0);
3688 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3689 IEM_MC_ADVANCE_RIP();
3690 } IEM_MC_ELSE() {
3691 IEM_MC_REL_JMP_S16(i16Imm);
3692 } IEM_MC_ENDIF();
3693 IEM_MC_END();
3694 }
3695 else
3696 {
3697 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3699
3700 IEM_MC_BEGIN(0, 0);
3701 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3702 IEM_MC_ADVANCE_RIP();
3703 } IEM_MC_ELSE() {
3704 IEM_MC_REL_JMP_S32(i32Imm);
3705 } IEM_MC_ENDIF();
3706 IEM_MC_END();
3707 }
3708 return VINF_SUCCESS;
3709}
3710
3711
3712/** Opcode 0x0f 0x84. */
3713FNIEMOP_DEF(iemOp_je_Jv)
3714{
3715 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
3716 IEMOP_HLP_MIN_386();
3717 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3718 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3719 {
3720 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3722
3723 IEM_MC_BEGIN(0, 0);
3724 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3725 IEM_MC_REL_JMP_S16(i16Imm);
3726 } IEM_MC_ELSE() {
3727 IEM_MC_ADVANCE_RIP();
3728 } IEM_MC_ENDIF();
3729 IEM_MC_END();
3730 }
3731 else
3732 {
3733 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3735
3736 IEM_MC_BEGIN(0, 0);
3737 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3738 IEM_MC_REL_JMP_S32(i32Imm);
3739 } IEM_MC_ELSE() {
3740 IEM_MC_ADVANCE_RIP();
3741 } IEM_MC_ENDIF();
3742 IEM_MC_END();
3743 }
3744 return VINF_SUCCESS;
3745}
3746
3747
3748/** Opcode 0x0f 0x85. */
3749FNIEMOP_DEF(iemOp_jne_Jv)
3750{
3751 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
3752 IEMOP_HLP_MIN_386();
3753 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3754 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3755 {
3756 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3758
3759 IEM_MC_BEGIN(0, 0);
3760 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3761 IEM_MC_ADVANCE_RIP();
3762 } IEM_MC_ELSE() {
3763 IEM_MC_REL_JMP_S16(i16Imm);
3764 } IEM_MC_ENDIF();
3765 IEM_MC_END();
3766 }
3767 else
3768 {
3769 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3771
3772 IEM_MC_BEGIN(0, 0);
3773 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3774 IEM_MC_ADVANCE_RIP();
3775 } IEM_MC_ELSE() {
3776 IEM_MC_REL_JMP_S32(i32Imm);
3777 } IEM_MC_ENDIF();
3778 IEM_MC_END();
3779 }
3780 return VINF_SUCCESS;
3781}
3782
3783
3784/** Opcode 0x0f 0x86. */
3785FNIEMOP_DEF(iemOp_jbe_Jv)
3786{
3787 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
3788 IEMOP_HLP_MIN_386();
3789 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3790 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3791 {
3792 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3794
3795 IEM_MC_BEGIN(0, 0);
3796 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3797 IEM_MC_REL_JMP_S16(i16Imm);
3798 } IEM_MC_ELSE() {
3799 IEM_MC_ADVANCE_RIP();
3800 } IEM_MC_ENDIF();
3801 IEM_MC_END();
3802 }
3803 else
3804 {
3805 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3807
3808 IEM_MC_BEGIN(0, 0);
3809 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3810 IEM_MC_REL_JMP_S32(i32Imm);
3811 } IEM_MC_ELSE() {
3812 IEM_MC_ADVANCE_RIP();
3813 } IEM_MC_ENDIF();
3814 IEM_MC_END();
3815 }
3816 return VINF_SUCCESS;
3817}
3818
3819
3820/** Opcode 0x0f 0x87. */
3821FNIEMOP_DEF(iemOp_jnbe_Jv)
3822{
3823 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
3824 IEMOP_HLP_MIN_386();
3825 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3826 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3827 {
3828 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3830
3831 IEM_MC_BEGIN(0, 0);
3832 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3833 IEM_MC_ADVANCE_RIP();
3834 } IEM_MC_ELSE() {
3835 IEM_MC_REL_JMP_S16(i16Imm);
3836 } IEM_MC_ENDIF();
3837 IEM_MC_END();
3838 }
3839 else
3840 {
3841 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3843
3844 IEM_MC_BEGIN(0, 0);
3845 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3846 IEM_MC_ADVANCE_RIP();
3847 } IEM_MC_ELSE() {
3848 IEM_MC_REL_JMP_S32(i32Imm);
3849 } IEM_MC_ENDIF();
3850 IEM_MC_END();
3851 }
3852 return VINF_SUCCESS;
3853}
3854
3855
3856/** Opcode 0x0f 0x88. */
3857FNIEMOP_DEF(iemOp_js_Jv)
3858{
3859 IEMOP_MNEMONIC(js_Jv, "js Jv");
3860 IEMOP_HLP_MIN_386();
3861 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3862 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3863 {
3864 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3866
3867 IEM_MC_BEGIN(0, 0);
3868 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3869 IEM_MC_REL_JMP_S16(i16Imm);
3870 } IEM_MC_ELSE() {
3871 IEM_MC_ADVANCE_RIP();
3872 } IEM_MC_ENDIF();
3873 IEM_MC_END();
3874 }
3875 else
3876 {
3877 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3879
3880 IEM_MC_BEGIN(0, 0);
3881 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3882 IEM_MC_REL_JMP_S32(i32Imm);
3883 } IEM_MC_ELSE() {
3884 IEM_MC_ADVANCE_RIP();
3885 } IEM_MC_ENDIF();
3886 IEM_MC_END();
3887 }
3888 return VINF_SUCCESS;
3889}
3890
3891
3892/** Opcode 0x0f 0x89. */
3893FNIEMOP_DEF(iemOp_jns_Jv)
3894{
3895 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
3896 IEMOP_HLP_MIN_386();
3897 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3898 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3899 {
3900 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3902
3903 IEM_MC_BEGIN(0, 0);
3904 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3905 IEM_MC_ADVANCE_RIP();
3906 } IEM_MC_ELSE() {
3907 IEM_MC_REL_JMP_S16(i16Imm);
3908 } IEM_MC_ENDIF();
3909 IEM_MC_END();
3910 }
3911 else
3912 {
3913 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3915
3916 IEM_MC_BEGIN(0, 0);
3917 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3918 IEM_MC_ADVANCE_RIP();
3919 } IEM_MC_ELSE() {
3920 IEM_MC_REL_JMP_S32(i32Imm);
3921 } IEM_MC_ENDIF();
3922 IEM_MC_END();
3923 }
3924 return VINF_SUCCESS;
3925}
3926
3927
3928/** Opcode 0x0f 0x8a. */
3929FNIEMOP_DEF(iemOp_jp_Jv)
3930{
3931 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
3932 IEMOP_HLP_MIN_386();
3933 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3934 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3935 {
3936 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3938
3939 IEM_MC_BEGIN(0, 0);
3940 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3941 IEM_MC_REL_JMP_S16(i16Imm);
3942 } IEM_MC_ELSE() {
3943 IEM_MC_ADVANCE_RIP();
3944 } IEM_MC_ENDIF();
3945 IEM_MC_END();
3946 }
3947 else
3948 {
3949 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3951
3952 IEM_MC_BEGIN(0, 0);
3953 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3954 IEM_MC_REL_JMP_S32(i32Imm);
3955 } IEM_MC_ELSE() {
3956 IEM_MC_ADVANCE_RIP();
3957 } IEM_MC_ENDIF();
3958 IEM_MC_END();
3959 }
3960 return VINF_SUCCESS;
3961}
3962
3963
3964/** Opcode 0x0f 0x8b. */
3965FNIEMOP_DEF(iemOp_jnp_Jv)
3966{
3967 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
3968 IEMOP_HLP_MIN_386();
3969 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3970 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3971 {
3972 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3974
3975 IEM_MC_BEGIN(0, 0);
3976 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3977 IEM_MC_ADVANCE_RIP();
3978 } IEM_MC_ELSE() {
3979 IEM_MC_REL_JMP_S16(i16Imm);
3980 } IEM_MC_ENDIF();
3981 IEM_MC_END();
3982 }
3983 else
3984 {
3985 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3987
3988 IEM_MC_BEGIN(0, 0);
3989 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3990 IEM_MC_ADVANCE_RIP();
3991 } IEM_MC_ELSE() {
3992 IEM_MC_REL_JMP_S32(i32Imm);
3993 } IEM_MC_ENDIF();
3994 IEM_MC_END();
3995 }
3996 return VINF_SUCCESS;
3997}
3998
3999
4000/** Opcode 0x0f 0x8c. */
4001FNIEMOP_DEF(iemOp_jl_Jv)
4002{
4003 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4004 IEMOP_HLP_MIN_386();
4005 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4006 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4007 {
4008 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4010
4011 IEM_MC_BEGIN(0, 0);
4012 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4013 IEM_MC_REL_JMP_S16(i16Imm);
4014 } IEM_MC_ELSE() {
4015 IEM_MC_ADVANCE_RIP();
4016 } IEM_MC_ENDIF();
4017 IEM_MC_END();
4018 }
4019 else
4020 {
4021 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4023
4024 IEM_MC_BEGIN(0, 0);
4025 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4026 IEM_MC_REL_JMP_S32(i32Imm);
4027 } IEM_MC_ELSE() {
4028 IEM_MC_ADVANCE_RIP();
4029 } IEM_MC_ENDIF();
4030 IEM_MC_END();
4031 }
4032 return VINF_SUCCESS;
4033}
4034
4035
4036/** Opcode 0x0f 0x8d. */
4037FNIEMOP_DEF(iemOp_jnl_Jv)
4038{
4039 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4040 IEMOP_HLP_MIN_386();
4041 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4042 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4043 {
4044 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4046
4047 IEM_MC_BEGIN(0, 0);
4048 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4049 IEM_MC_ADVANCE_RIP();
4050 } IEM_MC_ELSE() {
4051 IEM_MC_REL_JMP_S16(i16Imm);
4052 } IEM_MC_ENDIF();
4053 IEM_MC_END();
4054 }
4055 else
4056 {
4057 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4059
4060 IEM_MC_BEGIN(0, 0);
4061 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4062 IEM_MC_ADVANCE_RIP();
4063 } IEM_MC_ELSE() {
4064 IEM_MC_REL_JMP_S32(i32Imm);
4065 } IEM_MC_ENDIF();
4066 IEM_MC_END();
4067 }
4068 return VINF_SUCCESS;
4069}
4070
4071
4072/** Opcode 0x0f 0x8e. */
4073FNIEMOP_DEF(iemOp_jle_Jv)
4074{
4075 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4076 IEMOP_HLP_MIN_386();
4077 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4078 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4079 {
4080 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4082
4083 IEM_MC_BEGIN(0, 0);
4084 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4085 IEM_MC_REL_JMP_S16(i16Imm);
4086 } IEM_MC_ELSE() {
4087 IEM_MC_ADVANCE_RIP();
4088 } IEM_MC_ENDIF();
4089 IEM_MC_END();
4090 }
4091 else
4092 {
4093 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4095
4096 IEM_MC_BEGIN(0, 0);
4097 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4098 IEM_MC_REL_JMP_S32(i32Imm);
4099 } IEM_MC_ELSE() {
4100 IEM_MC_ADVANCE_RIP();
4101 } IEM_MC_ENDIF();
4102 IEM_MC_END();
4103 }
4104 return VINF_SUCCESS;
4105}
4106
4107
4108/** Opcode 0x0f 0x8f. */
4109FNIEMOP_DEF(iemOp_jnle_Jv)
4110{
4111 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4112 IEMOP_HLP_MIN_386();
4113 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4114 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4115 {
4116 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4118
4119 IEM_MC_BEGIN(0, 0);
4120 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4121 IEM_MC_ADVANCE_RIP();
4122 } IEM_MC_ELSE() {
4123 IEM_MC_REL_JMP_S16(i16Imm);
4124 } IEM_MC_ENDIF();
4125 IEM_MC_END();
4126 }
4127 else
4128 {
4129 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4131
4132 IEM_MC_BEGIN(0, 0);
4133 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4134 IEM_MC_ADVANCE_RIP();
4135 } IEM_MC_ELSE() {
4136 IEM_MC_REL_JMP_S32(i32Imm);
4137 } IEM_MC_ENDIF();
4138 IEM_MC_END();
4139 }
4140 return VINF_SUCCESS;
4141}
4142
4143
4144/** Opcode 0x0f 0x90. */
4145FNIEMOP_DEF(iemOp_seto_Eb)
4146{
4147 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4148 IEMOP_HLP_MIN_386();
4149 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4150
4151 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4152 * any way. AMD says it's "unused", whatever that means. We're
4153 * ignoring for now. */
4154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4155 {
4156 /* register target */
4157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4158 IEM_MC_BEGIN(0, 0);
4159 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4160 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4161 } IEM_MC_ELSE() {
4162 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4163 } IEM_MC_ENDIF();
4164 IEM_MC_ADVANCE_RIP();
4165 IEM_MC_END();
4166 }
4167 else
4168 {
4169 /* memory target */
4170 IEM_MC_BEGIN(0, 1);
4171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4174 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4175 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4176 } IEM_MC_ELSE() {
4177 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4178 } IEM_MC_ENDIF();
4179 IEM_MC_ADVANCE_RIP();
4180 IEM_MC_END();
4181 }
4182 return VINF_SUCCESS;
4183}
4184
4185
4186/** Opcode 0x0f 0x91. */
4187FNIEMOP_DEF(iemOp_setno_Eb)
4188{
4189 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4190 IEMOP_HLP_MIN_386();
4191 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4192
4193 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4194 * any way. AMD says it's "unused", whatever that means. We're
4195 * ignoring for now. */
4196 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4197 {
4198 /* register target */
4199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4200 IEM_MC_BEGIN(0, 0);
4201 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4202 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4203 } IEM_MC_ELSE() {
4204 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4205 } IEM_MC_ENDIF();
4206 IEM_MC_ADVANCE_RIP();
4207 IEM_MC_END();
4208 }
4209 else
4210 {
4211 /* memory target */
4212 IEM_MC_BEGIN(0, 1);
4213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4216 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4217 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4218 } IEM_MC_ELSE() {
4219 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4220 } IEM_MC_ENDIF();
4221 IEM_MC_ADVANCE_RIP();
4222 IEM_MC_END();
4223 }
4224 return VINF_SUCCESS;
4225}
4226
4227
4228/** Opcode 0x0f 0x92. */
4229FNIEMOP_DEF(iemOp_setc_Eb)
4230{
4231 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4232 IEMOP_HLP_MIN_386();
4233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4234
4235 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4236 * any way. AMD says it's "unused", whatever that means. We're
4237 * ignoring for now. */
4238 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4239 {
4240 /* register target */
4241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4242 IEM_MC_BEGIN(0, 0);
4243 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4244 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4245 } IEM_MC_ELSE() {
4246 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4247 } IEM_MC_ENDIF();
4248 IEM_MC_ADVANCE_RIP();
4249 IEM_MC_END();
4250 }
4251 else
4252 {
4253 /* memory target */
4254 IEM_MC_BEGIN(0, 1);
4255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4258 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4259 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4260 } IEM_MC_ELSE() {
4261 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4262 } IEM_MC_ENDIF();
4263 IEM_MC_ADVANCE_RIP();
4264 IEM_MC_END();
4265 }
4266 return VINF_SUCCESS;
4267}
4268
4269
4270/** Opcode 0x0f 0x93. */
4271FNIEMOP_DEF(iemOp_setnc_Eb)
4272{
4273 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4274 IEMOP_HLP_MIN_386();
4275 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4276
4277 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4278 * any way. AMD says it's "unused", whatever that means. We're
4279 * ignoring for now. */
4280 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4281 {
4282 /* register target */
4283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4284 IEM_MC_BEGIN(0, 0);
4285 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4286 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4287 } IEM_MC_ELSE() {
4288 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4289 } IEM_MC_ENDIF();
4290 IEM_MC_ADVANCE_RIP();
4291 IEM_MC_END();
4292 }
4293 else
4294 {
4295 /* memory target */
4296 IEM_MC_BEGIN(0, 1);
4297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4300 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4301 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4302 } IEM_MC_ELSE() {
4303 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4304 } IEM_MC_ENDIF();
4305 IEM_MC_ADVANCE_RIP();
4306 IEM_MC_END();
4307 }
4308 return VINF_SUCCESS;
4309}
4310
4311
4312/** Opcode 0x0f 0x94. */
4313FNIEMOP_DEF(iemOp_sete_Eb)
4314{
4315 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4316 IEMOP_HLP_MIN_386();
4317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4318
4319 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4320 * any way. AMD says it's "unused", whatever that means. We're
4321 * ignoring for now. */
4322 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4323 {
4324 /* register target */
4325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4326 IEM_MC_BEGIN(0, 0);
4327 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4328 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4329 } IEM_MC_ELSE() {
4330 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4331 } IEM_MC_ENDIF();
4332 IEM_MC_ADVANCE_RIP();
4333 IEM_MC_END();
4334 }
4335 else
4336 {
4337 /* memory target */
4338 IEM_MC_BEGIN(0, 1);
4339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4342 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4343 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4344 } IEM_MC_ELSE() {
4345 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4346 } IEM_MC_ENDIF();
4347 IEM_MC_ADVANCE_RIP();
4348 IEM_MC_END();
4349 }
4350 return VINF_SUCCESS;
4351}
4352
4353
4354/** Opcode 0x0f 0x95. */
4355FNIEMOP_DEF(iemOp_setne_Eb)
4356{
4357 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4358 IEMOP_HLP_MIN_386();
4359 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4360
4361 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4362 * any way. AMD says it's "unused", whatever that means. We're
4363 * ignoring for now. */
4364 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4365 {
4366 /* register target */
4367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4368 IEM_MC_BEGIN(0, 0);
4369 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4370 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4371 } IEM_MC_ELSE() {
4372 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4373 } IEM_MC_ENDIF();
4374 IEM_MC_ADVANCE_RIP();
4375 IEM_MC_END();
4376 }
4377 else
4378 {
4379 /* memory target */
4380 IEM_MC_BEGIN(0, 1);
4381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4384 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4385 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4386 } IEM_MC_ELSE() {
4387 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4388 } IEM_MC_ENDIF();
4389 IEM_MC_ADVANCE_RIP();
4390 IEM_MC_END();
4391 }
4392 return VINF_SUCCESS;
4393}
4394
4395
4396/** Opcode 0x0f 0x96. */
4397FNIEMOP_DEF(iemOp_setbe_Eb)
4398{
4399 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4400 IEMOP_HLP_MIN_386();
4401 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4402
4403 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4404 * any way. AMD says it's "unused", whatever that means. We're
4405 * ignoring for now. */
4406 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4407 {
4408 /* register target */
4409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4410 IEM_MC_BEGIN(0, 0);
4411 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4412 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4413 } IEM_MC_ELSE() {
4414 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4415 } IEM_MC_ENDIF();
4416 IEM_MC_ADVANCE_RIP();
4417 IEM_MC_END();
4418 }
4419 else
4420 {
4421 /* memory target */
4422 IEM_MC_BEGIN(0, 1);
4423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4426 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4427 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4428 } IEM_MC_ELSE() {
4429 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4430 } IEM_MC_ENDIF();
4431 IEM_MC_ADVANCE_RIP();
4432 IEM_MC_END();
4433 }
4434 return VINF_SUCCESS;
4435}
4436
4437
4438/** Opcode 0x0f 0x97. */
4439FNIEMOP_DEF(iemOp_setnbe_Eb)
4440{
4441 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4442 IEMOP_HLP_MIN_386();
4443 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4444
4445 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4446 * any way. AMD says it's "unused", whatever that means. We're
4447 * ignoring for now. */
4448 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4449 {
4450 /* register target */
4451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4452 IEM_MC_BEGIN(0, 0);
4453 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4454 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4455 } IEM_MC_ELSE() {
4456 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4457 } IEM_MC_ENDIF();
4458 IEM_MC_ADVANCE_RIP();
4459 IEM_MC_END();
4460 }
4461 else
4462 {
4463 /* memory target */
4464 IEM_MC_BEGIN(0, 1);
4465 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4468 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4469 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4470 } IEM_MC_ELSE() {
4471 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4472 } IEM_MC_ENDIF();
4473 IEM_MC_ADVANCE_RIP();
4474 IEM_MC_END();
4475 }
4476 return VINF_SUCCESS;
4477}
4478
4479
4480/** Opcode 0x0f 0x98. */
4481FNIEMOP_DEF(iemOp_sets_Eb)
4482{
4483 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4484 IEMOP_HLP_MIN_386();
4485 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4486
4487 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4488 * any way. AMD says it's "unused", whatever that means. We're
4489 * ignoring for now. */
4490 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4491 {
4492 /* register target */
4493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4494 IEM_MC_BEGIN(0, 0);
4495 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4496 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4497 } IEM_MC_ELSE() {
4498 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4499 } IEM_MC_ENDIF();
4500 IEM_MC_ADVANCE_RIP();
4501 IEM_MC_END();
4502 }
4503 else
4504 {
4505 /* memory target */
4506 IEM_MC_BEGIN(0, 1);
4507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4510 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4511 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4512 } IEM_MC_ELSE() {
4513 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4514 } IEM_MC_ENDIF();
4515 IEM_MC_ADVANCE_RIP();
4516 IEM_MC_END();
4517 }
4518 return VINF_SUCCESS;
4519}
4520
4521
4522/** Opcode 0x0f 0x99. */
4523FNIEMOP_DEF(iemOp_setns_Eb)
4524{
4525 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4526 IEMOP_HLP_MIN_386();
4527 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4528
4529 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4530 * any way. AMD says it's "unused", whatever that means. We're
4531 * ignoring for now. */
4532 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4533 {
4534 /* register target */
4535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4536 IEM_MC_BEGIN(0, 0);
4537 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4538 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4539 } IEM_MC_ELSE() {
4540 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4541 } IEM_MC_ENDIF();
4542 IEM_MC_ADVANCE_RIP();
4543 IEM_MC_END();
4544 }
4545 else
4546 {
4547 /* memory target */
4548 IEM_MC_BEGIN(0, 1);
4549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4552 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4553 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4554 } IEM_MC_ELSE() {
4555 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4556 } IEM_MC_ENDIF();
4557 IEM_MC_ADVANCE_RIP();
4558 IEM_MC_END();
4559 }
4560 return VINF_SUCCESS;
4561}
4562
4563
4564/** Opcode 0x0f 0x9a. */
4565FNIEMOP_DEF(iemOp_setp_Eb)
4566{
4567 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4568 IEMOP_HLP_MIN_386();
4569 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4570
4571 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4572 * any way. AMD says it's "unused", whatever that means. We're
4573 * ignoring for now. */
4574 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4575 {
4576 /* register target */
4577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4578 IEM_MC_BEGIN(0, 0);
4579 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4580 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4581 } IEM_MC_ELSE() {
4582 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4583 } IEM_MC_ENDIF();
4584 IEM_MC_ADVANCE_RIP();
4585 IEM_MC_END();
4586 }
4587 else
4588 {
4589 /* memory target */
4590 IEM_MC_BEGIN(0, 1);
4591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4594 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4595 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4596 } IEM_MC_ELSE() {
4597 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4598 } IEM_MC_ENDIF();
4599 IEM_MC_ADVANCE_RIP();
4600 IEM_MC_END();
4601 }
4602 return VINF_SUCCESS;
4603}
4604
4605
4606/** Opcode 0x0f 0x9b. */
4607FNIEMOP_DEF(iemOp_setnp_Eb)
4608{
4609 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4610 IEMOP_HLP_MIN_386();
4611 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4612
4613 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4614 * any way. AMD says it's "unused", whatever that means. We're
4615 * ignoring for now. */
4616 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4617 {
4618 /* register target */
4619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4620 IEM_MC_BEGIN(0, 0);
4621 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4622 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4623 } IEM_MC_ELSE() {
4624 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4625 } IEM_MC_ENDIF();
4626 IEM_MC_ADVANCE_RIP();
4627 IEM_MC_END();
4628 }
4629 else
4630 {
4631 /* memory target */
4632 IEM_MC_BEGIN(0, 1);
4633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4636 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4637 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4638 } IEM_MC_ELSE() {
4639 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4640 } IEM_MC_ENDIF();
4641 IEM_MC_ADVANCE_RIP();
4642 IEM_MC_END();
4643 }
4644 return VINF_SUCCESS;
4645}
4646
4647
4648/** Opcode 0x0f 0x9c. */
4649FNIEMOP_DEF(iemOp_setl_Eb)
4650{
4651 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4652 IEMOP_HLP_MIN_386();
4653 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4654
4655 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4656 * any way. AMD says it's "unused", whatever that means. We're
4657 * ignoring for now. */
4658 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4659 {
4660 /* register target */
4661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4662 IEM_MC_BEGIN(0, 0);
4663 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4664 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4665 } IEM_MC_ELSE() {
4666 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4667 } IEM_MC_ENDIF();
4668 IEM_MC_ADVANCE_RIP();
4669 IEM_MC_END();
4670 }
4671 else
4672 {
4673 /* memory target */
4674 IEM_MC_BEGIN(0, 1);
4675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4678 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4679 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4680 } IEM_MC_ELSE() {
4681 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4682 } IEM_MC_ENDIF();
4683 IEM_MC_ADVANCE_RIP();
4684 IEM_MC_END();
4685 }
4686 return VINF_SUCCESS;
4687}
4688
4689
4690/** Opcode 0x0f 0x9d. */
4691FNIEMOP_DEF(iemOp_setnl_Eb)
4692{
4693 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4694 IEMOP_HLP_MIN_386();
4695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4696
4697 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4698 * any way. AMD says it's "unused", whatever that means. We're
4699 * ignoring for now. */
4700 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4701 {
4702 /* register target */
4703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4704 IEM_MC_BEGIN(0, 0);
4705 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4706 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4707 } IEM_MC_ELSE() {
4708 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4709 } IEM_MC_ENDIF();
4710 IEM_MC_ADVANCE_RIP();
4711 IEM_MC_END();
4712 }
4713 else
4714 {
4715 /* memory target */
4716 IEM_MC_BEGIN(0, 1);
4717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4720 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4721 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4722 } IEM_MC_ELSE() {
4723 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4724 } IEM_MC_ENDIF();
4725 IEM_MC_ADVANCE_RIP();
4726 IEM_MC_END();
4727 }
4728 return VINF_SUCCESS;
4729}
4730
4731
4732/** Opcode 0x0f 0x9e. */
4733FNIEMOP_DEF(iemOp_setle_Eb)
4734{
4735 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
4736 IEMOP_HLP_MIN_386();
4737 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4738
4739 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4740 * any way. AMD says it's "unused", whatever that means. We're
4741 * ignoring for now. */
4742 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4743 {
4744 /* register target */
4745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4746 IEM_MC_BEGIN(0, 0);
4747 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4748 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4749 } IEM_MC_ELSE() {
4750 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4751 } IEM_MC_ENDIF();
4752 IEM_MC_ADVANCE_RIP();
4753 IEM_MC_END();
4754 }
4755 else
4756 {
4757 /* memory target */
4758 IEM_MC_BEGIN(0, 1);
4759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4762 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4763 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4764 } IEM_MC_ELSE() {
4765 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4766 } IEM_MC_ENDIF();
4767 IEM_MC_ADVANCE_RIP();
4768 IEM_MC_END();
4769 }
4770 return VINF_SUCCESS;
4771}
4772
4773
4774/** Opcode 0x0f 0x9f. */
4775FNIEMOP_DEF(iemOp_setnle_Eb)
4776{
4777 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
4778 IEMOP_HLP_MIN_386();
4779 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4780
4781 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4782 * any way. AMD says it's "unused", whatever that means. We're
4783 * ignoring for now. */
4784 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4785 {
4786 /* register target */
4787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4788 IEM_MC_BEGIN(0, 0);
4789 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4790 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4791 } IEM_MC_ELSE() {
4792 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4793 } IEM_MC_ENDIF();
4794 IEM_MC_ADVANCE_RIP();
4795 IEM_MC_END();
4796 }
4797 else
4798 {
4799 /* memory target */
4800 IEM_MC_BEGIN(0, 1);
4801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4804 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4805 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4806 } IEM_MC_ELSE() {
4807 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4808 } IEM_MC_ENDIF();
4809 IEM_MC_ADVANCE_RIP();
4810 IEM_MC_END();
4811 }
4812 return VINF_SUCCESS;
4813}
4814
4815
4816/**
4817 * Common 'push segment-register' helper.
4818 */
4819FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4820{
4821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4822 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
4823 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4824
4825 switch (pVCpu->iem.s.enmEffOpSize)
4826 {
4827 case IEMMODE_16BIT:
4828 IEM_MC_BEGIN(0, 1);
4829 IEM_MC_LOCAL(uint16_t, u16Value);
4830 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4831 IEM_MC_PUSH_U16(u16Value);
4832 IEM_MC_ADVANCE_RIP();
4833 IEM_MC_END();
4834 break;
4835
4836 case IEMMODE_32BIT:
4837 IEM_MC_BEGIN(0, 1);
4838 IEM_MC_LOCAL(uint32_t, u32Value);
4839 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4840 IEM_MC_PUSH_U32_SREG(u32Value);
4841 IEM_MC_ADVANCE_RIP();
4842 IEM_MC_END();
4843 break;
4844
4845 case IEMMODE_64BIT:
4846 IEM_MC_BEGIN(0, 1);
4847 IEM_MC_LOCAL(uint64_t, u64Value);
4848 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4849 IEM_MC_PUSH_U64(u64Value);
4850 IEM_MC_ADVANCE_RIP();
4851 IEM_MC_END();
4852 break;
4853 }
4854
4855 return VINF_SUCCESS;
4856}
4857
4858
4859/** Opcode 0x0f 0xa0. */
4860FNIEMOP_DEF(iemOp_push_fs)
4861{
4862 IEMOP_MNEMONIC(push_fs, "push fs");
4863 IEMOP_HLP_MIN_386();
4864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4865 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4866}
4867
4868
4869/** Opcode 0x0f 0xa1. */
4870FNIEMOP_DEF(iemOp_pop_fs)
4871{
4872 IEMOP_MNEMONIC(pop_fs, "pop fs");
4873 IEMOP_HLP_MIN_386();
4874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4875 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
4876}
4877
4878
4879/** Opcode 0x0f 0xa2. */
4880FNIEMOP_DEF(iemOp_cpuid)
4881{
4882 IEMOP_MNEMONIC(cpuid, "cpuid");
4883 IEMOP_HLP_MIN_486(); /* not all 486es. */
4884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4885 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4886}
4887
4888
4889/**
4890 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4891 * iemOp_bts_Ev_Gv.
4892 */
4893FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4894{
4895 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4896 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4897
4898 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4899 {
4900 /* register destination. */
4901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4902 switch (pVCpu->iem.s.enmEffOpSize)
4903 {
4904 case IEMMODE_16BIT:
4905 IEM_MC_BEGIN(3, 0);
4906 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4907 IEM_MC_ARG(uint16_t, u16Src, 1);
4908 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4909
4910 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4911 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4912 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4913 IEM_MC_REF_EFLAGS(pEFlags);
4914 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4915
4916 IEM_MC_ADVANCE_RIP();
4917 IEM_MC_END();
4918 return VINF_SUCCESS;
4919
4920 case IEMMODE_32BIT:
4921 IEM_MC_BEGIN(3, 0);
4922 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4923 IEM_MC_ARG(uint32_t, u32Src, 1);
4924 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4925
4926 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4927 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4928 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4929 IEM_MC_REF_EFLAGS(pEFlags);
4930 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4931
4932 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4933 IEM_MC_ADVANCE_RIP();
4934 IEM_MC_END();
4935 return VINF_SUCCESS;
4936
4937 case IEMMODE_64BIT:
4938 IEM_MC_BEGIN(3, 0);
4939 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4940 IEM_MC_ARG(uint64_t, u64Src, 1);
4941 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4942
4943 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4944 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4945 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4946 IEM_MC_REF_EFLAGS(pEFlags);
4947 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4948
4949 IEM_MC_ADVANCE_RIP();
4950 IEM_MC_END();
4951 return VINF_SUCCESS;
4952
4953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4954 }
4955 }
4956 else
4957 {
4958 /* memory destination. */
4959
4960 uint32_t fAccess;
4961 if (pImpl->pfnLockedU16)
4962 fAccess = IEM_ACCESS_DATA_RW;
4963 else /* BT */
4964 fAccess = IEM_ACCESS_DATA_R;
4965
4966 /** @todo test negative bit offsets! */
4967 switch (pVCpu->iem.s.enmEffOpSize)
4968 {
4969 case IEMMODE_16BIT:
4970 IEM_MC_BEGIN(3, 2);
4971 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4972 IEM_MC_ARG(uint16_t, u16Src, 1);
4973 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4974 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4975 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4976
4977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4978 if (pImpl->pfnLockedU16)
4979 IEMOP_HLP_DONE_DECODING();
4980 else
4981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4982 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4983 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4984 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4985 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4986 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
4987 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4988 IEM_MC_FETCH_EFLAGS(EFlags);
4989
4990 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4991 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4992 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4993 else
4994 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4995 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
4996
4997 IEM_MC_COMMIT_EFLAGS(EFlags);
4998 IEM_MC_ADVANCE_RIP();
4999 IEM_MC_END();
5000 return VINF_SUCCESS;
5001
5002 case IEMMODE_32BIT:
5003 IEM_MC_BEGIN(3, 2);
5004 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5005 IEM_MC_ARG(uint32_t, u32Src, 1);
5006 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5008 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5009
5010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5011 if (pImpl->pfnLockedU16)
5012 IEMOP_HLP_DONE_DECODING();
5013 else
5014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5015 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5016 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5017 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5018 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5019 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5020 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5021 IEM_MC_FETCH_EFLAGS(EFlags);
5022
5023 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5024 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5025 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5026 else
5027 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5028 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5029
5030 IEM_MC_COMMIT_EFLAGS(EFlags);
5031 IEM_MC_ADVANCE_RIP();
5032 IEM_MC_END();
5033 return VINF_SUCCESS;
5034
5035 case IEMMODE_64BIT:
5036 IEM_MC_BEGIN(3, 2);
5037 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5038 IEM_MC_ARG(uint64_t, u64Src, 1);
5039 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5041 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5042
5043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5044 if (pImpl->pfnLockedU16)
5045 IEMOP_HLP_DONE_DECODING();
5046 else
5047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5048 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5049 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5050 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5051 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5052 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5053 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5054 IEM_MC_FETCH_EFLAGS(EFlags);
5055
5056 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5057 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5058 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5059 else
5060 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5061 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5062
5063 IEM_MC_COMMIT_EFLAGS(EFlags);
5064 IEM_MC_ADVANCE_RIP();
5065 IEM_MC_END();
5066 return VINF_SUCCESS;
5067
5068 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5069 }
5070 }
5071}
5072
5073
5074/** Opcode 0x0f 0xa3. */
5075FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5076{
5077 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5078 IEMOP_HLP_MIN_386();
5079 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5080}
5081
5082
5083/**
5084 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5085 */
5086FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5087{
5088 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5089 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5090
5091 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5092 {
5093 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5095
5096 switch (pVCpu->iem.s.enmEffOpSize)
5097 {
5098 case IEMMODE_16BIT:
5099 IEM_MC_BEGIN(4, 0);
5100 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5101 IEM_MC_ARG(uint16_t, u16Src, 1);
5102 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5103 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5104
5105 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5106 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5107 IEM_MC_REF_EFLAGS(pEFlags);
5108 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5109
5110 IEM_MC_ADVANCE_RIP();
5111 IEM_MC_END();
5112 return VINF_SUCCESS;
5113
5114 case IEMMODE_32BIT:
5115 IEM_MC_BEGIN(4, 0);
5116 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5117 IEM_MC_ARG(uint32_t, u32Src, 1);
5118 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5119 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5120
5121 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5122 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5123 IEM_MC_REF_EFLAGS(pEFlags);
5124 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5125
5126 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5127 IEM_MC_ADVANCE_RIP();
5128 IEM_MC_END();
5129 return VINF_SUCCESS;
5130
5131 case IEMMODE_64BIT:
5132 IEM_MC_BEGIN(4, 0);
5133 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5134 IEM_MC_ARG(uint64_t, u64Src, 1);
5135 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5136 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5137
5138 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5139 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5140 IEM_MC_REF_EFLAGS(pEFlags);
5141 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5142
5143 IEM_MC_ADVANCE_RIP();
5144 IEM_MC_END();
5145 return VINF_SUCCESS;
5146
5147 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5148 }
5149 }
5150 else
5151 {
5152 switch (pVCpu->iem.s.enmEffOpSize)
5153 {
5154 case IEMMODE_16BIT:
5155 IEM_MC_BEGIN(4, 2);
5156 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5157 IEM_MC_ARG(uint16_t, u16Src, 1);
5158 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5161
5162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5163 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5164 IEM_MC_ASSIGN(cShiftArg, cShift);
5165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5166 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5167 IEM_MC_FETCH_EFLAGS(EFlags);
5168 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5169 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5170
5171 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5172 IEM_MC_COMMIT_EFLAGS(EFlags);
5173 IEM_MC_ADVANCE_RIP();
5174 IEM_MC_END();
5175 return VINF_SUCCESS;
5176
5177 case IEMMODE_32BIT:
5178 IEM_MC_BEGIN(4, 2);
5179 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5180 IEM_MC_ARG(uint32_t, u32Src, 1);
5181 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5182 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5184
5185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5186 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5187 IEM_MC_ASSIGN(cShiftArg, cShift);
5188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5189 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5190 IEM_MC_FETCH_EFLAGS(EFlags);
5191 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5192 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5193
5194 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5195 IEM_MC_COMMIT_EFLAGS(EFlags);
5196 IEM_MC_ADVANCE_RIP();
5197 IEM_MC_END();
5198 return VINF_SUCCESS;
5199
5200 case IEMMODE_64BIT:
5201 IEM_MC_BEGIN(4, 2);
5202 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5203 IEM_MC_ARG(uint64_t, u64Src, 1);
5204 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5205 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5207
5208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5209 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5210 IEM_MC_ASSIGN(cShiftArg, cShift);
5211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5212 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5213 IEM_MC_FETCH_EFLAGS(EFlags);
5214 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5215 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5216
5217 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5218 IEM_MC_COMMIT_EFLAGS(EFlags);
5219 IEM_MC_ADVANCE_RIP();
5220 IEM_MC_END();
5221 return VINF_SUCCESS;
5222
5223 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5224 }
5225 }
5226}
5227
5228
5229/**
5230 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5231 */
5232FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5233{
5234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5235 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5236
5237 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5238 {
5239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5240
5241 switch (pVCpu->iem.s.enmEffOpSize)
5242 {
5243 case IEMMODE_16BIT:
5244 IEM_MC_BEGIN(4, 0);
5245 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5246 IEM_MC_ARG(uint16_t, u16Src, 1);
5247 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5248 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5249
5250 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5251 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5252 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5253 IEM_MC_REF_EFLAGS(pEFlags);
5254 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5255
5256 IEM_MC_ADVANCE_RIP();
5257 IEM_MC_END();
5258 return VINF_SUCCESS;
5259
5260 case IEMMODE_32BIT:
5261 IEM_MC_BEGIN(4, 0);
5262 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5263 IEM_MC_ARG(uint32_t, u32Src, 1);
5264 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5265 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5266
5267 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5268 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5269 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5270 IEM_MC_REF_EFLAGS(pEFlags);
5271 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5272
5273 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5274 IEM_MC_ADVANCE_RIP();
5275 IEM_MC_END();
5276 return VINF_SUCCESS;
5277
5278 case IEMMODE_64BIT:
5279 IEM_MC_BEGIN(4, 0);
5280 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5281 IEM_MC_ARG(uint64_t, u64Src, 1);
5282 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5283 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5284
5285 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5286 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5287 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5288 IEM_MC_REF_EFLAGS(pEFlags);
5289 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5290
5291 IEM_MC_ADVANCE_RIP();
5292 IEM_MC_END();
5293 return VINF_SUCCESS;
5294
5295 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5296 }
5297 }
5298 else
5299 {
5300 switch (pVCpu->iem.s.enmEffOpSize)
5301 {
5302 case IEMMODE_16BIT:
5303 IEM_MC_BEGIN(4, 2);
5304 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5305 IEM_MC_ARG(uint16_t, u16Src, 1);
5306 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5307 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5309
5310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5312 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5313 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5314 IEM_MC_FETCH_EFLAGS(EFlags);
5315 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5316 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5317
5318 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5319 IEM_MC_COMMIT_EFLAGS(EFlags);
5320 IEM_MC_ADVANCE_RIP();
5321 IEM_MC_END();
5322 return VINF_SUCCESS;
5323
5324 case IEMMODE_32BIT:
5325 IEM_MC_BEGIN(4, 2);
5326 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5327 IEM_MC_ARG(uint32_t, u32Src, 1);
5328 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5329 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5331
5332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5334 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5335 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5336 IEM_MC_FETCH_EFLAGS(EFlags);
5337 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5338 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5339
5340 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5341 IEM_MC_COMMIT_EFLAGS(EFlags);
5342 IEM_MC_ADVANCE_RIP();
5343 IEM_MC_END();
5344 return VINF_SUCCESS;
5345
5346 case IEMMODE_64BIT:
5347 IEM_MC_BEGIN(4, 2);
5348 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5349 IEM_MC_ARG(uint64_t, u64Src, 1);
5350 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5351 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5353
5354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5356 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5357 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5358 IEM_MC_FETCH_EFLAGS(EFlags);
5359 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5360 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5361
5362 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5363 IEM_MC_COMMIT_EFLAGS(EFlags);
5364 IEM_MC_ADVANCE_RIP();
5365 IEM_MC_END();
5366 return VINF_SUCCESS;
5367
5368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5369 }
5370 }
5371}
5372
5373
5374
5375/** Opcode 0x0f 0xa4. */
5376FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5377{
5378 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5379 IEMOP_HLP_MIN_386();
5380 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5381}
5382
5383
5384/** Opcode 0x0f 0xa5. */
5385FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5386{
5387 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5388 IEMOP_HLP_MIN_386();
5389 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5390}
5391
5392
5393/** Opcode 0x0f 0xa8. */
5394FNIEMOP_DEF(iemOp_push_gs)
5395{
5396 IEMOP_MNEMONIC(push_gs, "push gs");
5397 IEMOP_HLP_MIN_386();
5398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5399 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5400}
5401
5402
5403/** Opcode 0x0f 0xa9. */
5404FNIEMOP_DEF(iemOp_pop_gs)
5405{
5406 IEMOP_MNEMONIC(pop_gs, "pop gs");
5407 IEMOP_HLP_MIN_386();
5408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5409 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5410}
5411
5412
5413/** Opcode 0x0f 0xaa. */
5414FNIEMOP_STUB(iemOp_rsm);
5415//IEMOP_HLP_MIN_386();
5416
5417
5418/** Opcode 0x0f 0xab. */
5419FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5420{
5421 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5422 IEMOP_HLP_MIN_386();
5423 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5424}
5425
5426
5427/** Opcode 0x0f 0xac. */
5428FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5429{
5430 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5431 IEMOP_HLP_MIN_386();
5432 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5433}
5434
5435
5436/** Opcode 0x0f 0xad. */
5437FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5438{
5439 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5440 IEMOP_HLP_MIN_386();
5441 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5442}
5443
5444
5445/** Opcode 0x0f 0xae mem/0. */
5446FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5447{
5448 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5449 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5450 return IEMOP_RAISE_INVALID_OPCODE();
5451
5452 IEM_MC_BEGIN(3, 1);
5453 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5454 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5455 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5458 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5459 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5460 IEM_MC_END();
5461 return VINF_SUCCESS;
5462}
5463
5464
5465/** Opcode 0x0f 0xae mem/1. */
5466FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5467{
5468 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5469 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5470 return IEMOP_RAISE_INVALID_OPCODE();
5471
5472 IEM_MC_BEGIN(3, 1);
5473 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5474 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5475 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5478 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5479 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5480 IEM_MC_END();
5481 return VINF_SUCCESS;
5482}
5483
5484
5485/** Opcode 0x0f 0xae mem/2. */
5486FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5487
5488/** Opcode 0x0f 0xae mem/3. */
5489FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5490
5491/** Opcode 0x0f 0xae mem/4. */
5492FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5493
5494/** Opcode 0x0f 0xae mem/5. */
5495FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5496
5497/** Opcode 0x0f 0xae mem/6. */
5498FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5499
5500/** Opcode 0x0f 0xae mem/7. */
5501FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5502
5503
5504/** Opcode 0x0f 0xae 11b/5. */
5505FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5506{
5507 RT_NOREF_PV(bRm);
5508 IEMOP_MNEMONIC(lfence, "lfence");
5509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5510 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5511 return IEMOP_RAISE_INVALID_OPCODE();
5512
5513 IEM_MC_BEGIN(0, 0);
5514 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5515 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5516 else
5517 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5518 IEM_MC_ADVANCE_RIP();
5519 IEM_MC_END();
5520 return VINF_SUCCESS;
5521}
5522
5523
5524/** Opcode 0x0f 0xae 11b/6. */
5525FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5526{
5527 RT_NOREF_PV(bRm);
5528 IEMOP_MNEMONIC(mfence, "mfence");
5529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5530 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5531 return IEMOP_RAISE_INVALID_OPCODE();
5532
5533 IEM_MC_BEGIN(0, 0);
5534 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5535 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5536 else
5537 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5538 IEM_MC_ADVANCE_RIP();
5539 IEM_MC_END();
5540 return VINF_SUCCESS;
5541}
5542
5543
5544/** Opcode 0x0f 0xae 11b/7. */
5545FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5546{
5547 RT_NOREF_PV(bRm);
5548 IEMOP_MNEMONIC(sfence, "sfence");
5549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5550 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5551 return IEMOP_RAISE_INVALID_OPCODE();
5552
5553 IEM_MC_BEGIN(0, 0);
5554 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5555 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5556 else
5557 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5558 IEM_MC_ADVANCE_RIP();
5559 IEM_MC_END();
5560 return VINF_SUCCESS;
5561}
5562
5563
5564/** Opcode 0xf3 0x0f 0xae 11b/0. */
5565FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5566
5567/** Opcode 0xf3 0x0f 0xae 11b/1. */
5568FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5569
5570/** Opcode 0xf3 0x0f 0xae 11b/2. */
5571FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5572
5573/** Opcode 0xf3 0x0f 0xae 11b/3. */
5574FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5575
5576
5577/** Opcode 0x0f 0xae. */
5578FNIEMOP_DEF(iemOp_Grp15)
5579{
5580/** @todo continue here tomorrow! (see bs3-cpu-decoding-1.c32 r113507). */
5581 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5582 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5583 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5584 {
5585 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5586 {
5587 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5588 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5589 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5590 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5591 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5592 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5593 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5594 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5595 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5596 }
5597 }
5598 else
5599 {
5600 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5601 {
5602 case 0:
5603 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5604 {
5605 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5606 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5607 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5608 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5609 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5610 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5611 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5612 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5613 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5614 }
5615 break;
5616
5617 case IEM_OP_PRF_REPZ:
5618 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5619 {
5620 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5621 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5622 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5623 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5624 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5625 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5626 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5627 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5628 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5629 }
5630 break;
5631
5632 default:
5633 return IEMOP_RAISE_INVALID_OPCODE();
5634 }
5635 }
5636}
5637
5638
5639/** Opcode 0x0f 0xaf. */
5640FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5641{
5642 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5643 IEMOP_HLP_MIN_386();
5644 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5645 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5646}
5647
5648
5649/** Opcode 0x0f 0xb0. */
5650FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5651{
5652 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5653 IEMOP_HLP_MIN_486();
5654 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5655
5656 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5657 {
5658 IEMOP_HLP_DONE_DECODING();
5659 IEM_MC_BEGIN(4, 0);
5660 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5661 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5662 IEM_MC_ARG(uint8_t, u8Src, 2);
5663 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5664
5665 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5666 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5667 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5668 IEM_MC_REF_EFLAGS(pEFlags);
5669 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5670 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5671 else
5672 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5673
5674 IEM_MC_ADVANCE_RIP();
5675 IEM_MC_END();
5676 }
5677 else
5678 {
5679 IEM_MC_BEGIN(4, 3);
5680 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5681 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5682 IEM_MC_ARG(uint8_t, u8Src, 2);
5683 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5684 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5685 IEM_MC_LOCAL(uint8_t, u8Al);
5686
5687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5688 IEMOP_HLP_DONE_DECODING();
5689 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5690 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5691 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5692 IEM_MC_FETCH_EFLAGS(EFlags);
5693 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5694 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5695 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5696 else
5697 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5698
5699 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5700 IEM_MC_COMMIT_EFLAGS(EFlags);
5701 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5702 IEM_MC_ADVANCE_RIP();
5703 IEM_MC_END();
5704 }
5705 return VINF_SUCCESS;
5706}
5707
5708/** Opcode 0x0f 0xb1. */
5709FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5710{
5711 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
5712 IEMOP_HLP_MIN_486();
5713 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5714
5715 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5716 {
5717 IEMOP_HLP_DONE_DECODING();
5718 switch (pVCpu->iem.s.enmEffOpSize)
5719 {
5720 case IEMMODE_16BIT:
5721 IEM_MC_BEGIN(4, 0);
5722 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5723 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5724 IEM_MC_ARG(uint16_t, u16Src, 2);
5725 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5726
5727 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5728 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5729 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5730 IEM_MC_REF_EFLAGS(pEFlags);
5731 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5732 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5733 else
5734 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5735
5736 IEM_MC_ADVANCE_RIP();
5737 IEM_MC_END();
5738 return VINF_SUCCESS;
5739
5740 case IEMMODE_32BIT:
5741 IEM_MC_BEGIN(4, 0);
5742 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5743 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5744 IEM_MC_ARG(uint32_t, u32Src, 2);
5745 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5746
5747 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5748 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5749 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5750 IEM_MC_REF_EFLAGS(pEFlags);
5751 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5752 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5753 else
5754 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5755
5756 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5757 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5758 IEM_MC_ADVANCE_RIP();
5759 IEM_MC_END();
5760 return VINF_SUCCESS;
5761
5762 case IEMMODE_64BIT:
5763 IEM_MC_BEGIN(4, 0);
5764 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5765 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5766#ifdef RT_ARCH_X86
5767 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5768#else
5769 IEM_MC_ARG(uint64_t, u64Src, 2);
5770#endif
5771 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5772
5773 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5774 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5775 IEM_MC_REF_EFLAGS(pEFlags);
5776#ifdef RT_ARCH_X86
5777 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5778 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5779 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5780 else
5781 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5782#else
5783 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5784 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5785 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5786 else
5787 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5788#endif
5789
5790 IEM_MC_ADVANCE_RIP();
5791 IEM_MC_END();
5792 return VINF_SUCCESS;
5793
5794 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5795 }
5796 }
5797 else
5798 {
5799 switch (pVCpu->iem.s.enmEffOpSize)
5800 {
5801 case IEMMODE_16BIT:
5802 IEM_MC_BEGIN(4, 3);
5803 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5804 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5805 IEM_MC_ARG(uint16_t, u16Src, 2);
5806 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5808 IEM_MC_LOCAL(uint16_t, u16Ax);
5809
5810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5811 IEMOP_HLP_DONE_DECODING();
5812 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5813 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5814 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5815 IEM_MC_FETCH_EFLAGS(EFlags);
5816 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5817 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5818 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5819 else
5820 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5821
5822 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5823 IEM_MC_COMMIT_EFLAGS(EFlags);
5824 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5825 IEM_MC_ADVANCE_RIP();
5826 IEM_MC_END();
5827 return VINF_SUCCESS;
5828
5829 case IEMMODE_32BIT:
5830 IEM_MC_BEGIN(4, 3);
5831 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5832 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5833 IEM_MC_ARG(uint32_t, u32Src, 2);
5834 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5836 IEM_MC_LOCAL(uint32_t, u32Eax);
5837
5838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5839 IEMOP_HLP_DONE_DECODING();
5840 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5841 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5842 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5843 IEM_MC_FETCH_EFLAGS(EFlags);
5844 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5845 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5846 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5847 else
5848 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5849
5850 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5851 IEM_MC_COMMIT_EFLAGS(EFlags);
5852 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5853 IEM_MC_ADVANCE_RIP();
5854 IEM_MC_END();
5855 return VINF_SUCCESS;
5856
5857 case IEMMODE_64BIT:
5858 IEM_MC_BEGIN(4, 3);
5859 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5860 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5861#ifdef RT_ARCH_X86
5862 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5863#else
5864 IEM_MC_ARG(uint64_t, u64Src, 2);
5865#endif
5866 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5867 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5868 IEM_MC_LOCAL(uint64_t, u64Rax);
5869
5870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5871 IEMOP_HLP_DONE_DECODING();
5872 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5873 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5874 IEM_MC_FETCH_EFLAGS(EFlags);
5875 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5876#ifdef RT_ARCH_X86
5877 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5878 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5879 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5880 else
5881 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5882#else
5883 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5884 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5885 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5886 else
5887 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5888#endif
5889
5890 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5891 IEM_MC_COMMIT_EFLAGS(EFlags);
5892 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5893 IEM_MC_ADVANCE_RIP();
5894 IEM_MC_END();
5895 return VINF_SUCCESS;
5896
5897 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5898 }
5899 }
5900}
5901
5902
5903FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5904{
5905 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5906 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
5907
5908 switch (pVCpu->iem.s.enmEffOpSize)
5909 {
5910 case IEMMODE_16BIT:
5911 IEM_MC_BEGIN(5, 1);
5912 IEM_MC_ARG(uint16_t, uSel, 0);
5913 IEM_MC_ARG(uint16_t, offSeg, 1);
5914 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5915 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5916 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5917 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5920 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5921 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
5922 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5923 IEM_MC_END();
5924 return VINF_SUCCESS;
5925
5926 case IEMMODE_32BIT:
5927 IEM_MC_BEGIN(5, 1);
5928 IEM_MC_ARG(uint16_t, uSel, 0);
5929 IEM_MC_ARG(uint32_t, offSeg, 1);
5930 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5931 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5932 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5933 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5936 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5937 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
5938 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5939 IEM_MC_END();
5940 return VINF_SUCCESS;
5941
5942 case IEMMODE_64BIT:
5943 IEM_MC_BEGIN(5, 1);
5944 IEM_MC_ARG(uint16_t, uSel, 0);
5945 IEM_MC_ARG(uint64_t, offSeg, 1);
5946 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5947 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5948 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5949 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5952 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5953 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5954 else
5955 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5956 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
5957 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5958 IEM_MC_END();
5959 return VINF_SUCCESS;
5960
5961 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5962 }
5963}
5964
5965
5966/** Opcode 0x0f 0xb2. */
5967FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5968{
5969 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
5970 IEMOP_HLP_MIN_386();
5971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5972 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5973 return IEMOP_RAISE_INVALID_OPCODE();
5974 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5975}
5976
5977
5978/** Opcode 0x0f 0xb3. */
5979FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5980{
5981 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
5982 IEMOP_HLP_MIN_386();
5983 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5984}
5985
5986
5987/** Opcode 0x0f 0xb4. */
5988FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5989{
5990 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
5991 IEMOP_HLP_MIN_386();
5992 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5993 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5994 return IEMOP_RAISE_INVALID_OPCODE();
5995 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5996}
5997
5998
5999/** Opcode 0x0f 0xb5. */
6000FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6001{
6002 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6003 IEMOP_HLP_MIN_386();
6004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6006 return IEMOP_RAISE_INVALID_OPCODE();
6007 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6008}
6009
6010
6011/** Opcode 0x0f 0xb6. */
6012FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6013{
6014 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6015 IEMOP_HLP_MIN_386();
6016
6017 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6018
6019 /*
6020 * If rm is denoting a register, no more instruction bytes.
6021 */
6022 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6023 {
6024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6025 switch (pVCpu->iem.s.enmEffOpSize)
6026 {
6027 case IEMMODE_16BIT:
6028 IEM_MC_BEGIN(0, 1);
6029 IEM_MC_LOCAL(uint16_t, u16Value);
6030 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6031 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6032 IEM_MC_ADVANCE_RIP();
6033 IEM_MC_END();
6034 return VINF_SUCCESS;
6035
6036 case IEMMODE_32BIT:
6037 IEM_MC_BEGIN(0, 1);
6038 IEM_MC_LOCAL(uint32_t, u32Value);
6039 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6040 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6041 IEM_MC_ADVANCE_RIP();
6042 IEM_MC_END();
6043 return VINF_SUCCESS;
6044
6045 case IEMMODE_64BIT:
6046 IEM_MC_BEGIN(0, 1);
6047 IEM_MC_LOCAL(uint64_t, u64Value);
6048 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6049 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6050 IEM_MC_ADVANCE_RIP();
6051 IEM_MC_END();
6052 return VINF_SUCCESS;
6053
6054 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6055 }
6056 }
6057 else
6058 {
6059 /*
6060 * We're loading a register from memory.
6061 */
6062 switch (pVCpu->iem.s.enmEffOpSize)
6063 {
6064 case IEMMODE_16BIT:
6065 IEM_MC_BEGIN(0, 2);
6066 IEM_MC_LOCAL(uint16_t, u16Value);
6067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6070 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6071 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6072 IEM_MC_ADVANCE_RIP();
6073 IEM_MC_END();
6074 return VINF_SUCCESS;
6075
6076 case IEMMODE_32BIT:
6077 IEM_MC_BEGIN(0, 2);
6078 IEM_MC_LOCAL(uint32_t, u32Value);
6079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6082 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6083 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6084 IEM_MC_ADVANCE_RIP();
6085 IEM_MC_END();
6086 return VINF_SUCCESS;
6087
6088 case IEMMODE_64BIT:
6089 IEM_MC_BEGIN(0, 2);
6090 IEM_MC_LOCAL(uint64_t, u64Value);
6091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6094 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6095 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6096 IEM_MC_ADVANCE_RIP();
6097 IEM_MC_END();
6098 return VINF_SUCCESS;
6099
6100 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6101 }
6102 }
6103}
6104
6105
6106/** Opcode 0x0f 0xb7. */
6107FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6108{
6109 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6110 IEMOP_HLP_MIN_386();
6111
6112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6113
6114 /** @todo Not entirely sure how the operand size prefix is handled here,
6115 * assuming that it will be ignored. Would be nice to have a few
6116 * test for this. */
6117 /*
6118 * If rm is denoting a register, no more instruction bytes.
6119 */
6120 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6121 {
6122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6123 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6124 {
6125 IEM_MC_BEGIN(0, 1);
6126 IEM_MC_LOCAL(uint32_t, u32Value);
6127 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6128 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6129 IEM_MC_ADVANCE_RIP();
6130 IEM_MC_END();
6131 }
6132 else
6133 {
6134 IEM_MC_BEGIN(0, 1);
6135 IEM_MC_LOCAL(uint64_t, u64Value);
6136 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6137 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6138 IEM_MC_ADVANCE_RIP();
6139 IEM_MC_END();
6140 }
6141 }
6142 else
6143 {
6144 /*
6145 * We're loading a register from memory.
6146 */
6147 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6148 {
6149 IEM_MC_BEGIN(0, 2);
6150 IEM_MC_LOCAL(uint32_t, u32Value);
6151 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6154 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6155 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6156 IEM_MC_ADVANCE_RIP();
6157 IEM_MC_END();
6158 }
6159 else
6160 {
6161 IEM_MC_BEGIN(0, 2);
6162 IEM_MC_LOCAL(uint64_t, u64Value);
6163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6166 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6167 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6168 IEM_MC_ADVANCE_RIP();
6169 IEM_MC_END();
6170 }
6171 }
6172 return VINF_SUCCESS;
6173}
6174
6175
6176/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6177FNIEMOP_UD_STUB(iemOp_jmpe);
6178/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6179FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6180
6181
6182/** Opcode 0x0f 0xb9. */
6183FNIEMOP_DEF(iemOp_Grp10)
6184{
6185 Log(("iemOp_Grp10 -> #UD\n"));
6186 return IEMOP_RAISE_INVALID_OPCODE();
6187}
6188
6189
6190/** Opcode 0x0f 0xba. */
6191FNIEMOP_DEF(iemOp_Grp8)
6192{
6193 IEMOP_HLP_MIN_386();
6194 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6195 PCIEMOPBINSIZES pImpl;
6196 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6197 {
6198 case 0: case 1: case 2: case 3:
6199 return IEMOP_RAISE_INVALID_OPCODE();
6200 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6201 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6202 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6203 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6204 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6205 }
6206 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6207
6208 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6209 {
6210 /* register destination. */
6211 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6213
6214 switch (pVCpu->iem.s.enmEffOpSize)
6215 {
6216 case IEMMODE_16BIT:
6217 IEM_MC_BEGIN(3, 0);
6218 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6219 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6220 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6221
6222 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6223 IEM_MC_REF_EFLAGS(pEFlags);
6224 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6225
6226 IEM_MC_ADVANCE_RIP();
6227 IEM_MC_END();
6228 return VINF_SUCCESS;
6229
6230 case IEMMODE_32BIT:
6231 IEM_MC_BEGIN(3, 0);
6232 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6233 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6234 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6235
6236 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6237 IEM_MC_REF_EFLAGS(pEFlags);
6238 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6239
6240 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6241 IEM_MC_ADVANCE_RIP();
6242 IEM_MC_END();
6243 return VINF_SUCCESS;
6244
6245 case IEMMODE_64BIT:
6246 IEM_MC_BEGIN(3, 0);
6247 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6248 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6249 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6250
6251 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6252 IEM_MC_REF_EFLAGS(pEFlags);
6253 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6254
6255 IEM_MC_ADVANCE_RIP();
6256 IEM_MC_END();
6257 return VINF_SUCCESS;
6258
6259 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6260 }
6261 }
6262 else
6263 {
6264 /* memory destination. */
6265
6266 uint32_t fAccess;
6267 if (pImpl->pfnLockedU16)
6268 fAccess = IEM_ACCESS_DATA_RW;
6269 else /* BT */
6270 fAccess = IEM_ACCESS_DATA_R;
6271
6272 /** @todo test negative bit offsets! */
6273 switch (pVCpu->iem.s.enmEffOpSize)
6274 {
6275 case IEMMODE_16BIT:
6276 IEM_MC_BEGIN(3, 1);
6277 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6278 IEM_MC_ARG(uint16_t, u16Src, 1);
6279 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6281
6282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6283 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6284 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6285 if (pImpl->pfnLockedU16)
6286 IEMOP_HLP_DONE_DECODING();
6287 else
6288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6289 IEM_MC_FETCH_EFLAGS(EFlags);
6290 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6291 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6292 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6293 else
6294 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6295 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6296
6297 IEM_MC_COMMIT_EFLAGS(EFlags);
6298 IEM_MC_ADVANCE_RIP();
6299 IEM_MC_END();
6300 return VINF_SUCCESS;
6301
6302 case IEMMODE_32BIT:
6303 IEM_MC_BEGIN(3, 1);
6304 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6305 IEM_MC_ARG(uint32_t, u32Src, 1);
6306 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6308
6309 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6310 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6311 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6312 if (pImpl->pfnLockedU16)
6313 IEMOP_HLP_DONE_DECODING();
6314 else
6315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6316 IEM_MC_FETCH_EFLAGS(EFlags);
6317 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6318 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6319 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6320 else
6321 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6322 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6323
6324 IEM_MC_COMMIT_EFLAGS(EFlags);
6325 IEM_MC_ADVANCE_RIP();
6326 IEM_MC_END();
6327 return VINF_SUCCESS;
6328
6329 case IEMMODE_64BIT:
6330 IEM_MC_BEGIN(3, 1);
6331 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6332 IEM_MC_ARG(uint64_t, u64Src, 1);
6333 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6335
6336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6337 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6338 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6339 if (pImpl->pfnLockedU16)
6340 IEMOP_HLP_DONE_DECODING();
6341 else
6342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6343 IEM_MC_FETCH_EFLAGS(EFlags);
6344 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6345 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6347 else
6348 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6349 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6350
6351 IEM_MC_COMMIT_EFLAGS(EFlags);
6352 IEM_MC_ADVANCE_RIP();
6353 IEM_MC_END();
6354 return VINF_SUCCESS;
6355
6356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6357 }
6358 }
6359
6360}
6361
6362
6363/** Opcode 0x0f 0xbb. */
6364FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6365{
6366 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6367 IEMOP_HLP_MIN_386();
6368 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6369}
6370
6371
6372/** Opcode 0x0f 0xbc. */
6373FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6374{
6375 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6376 IEMOP_HLP_MIN_386();
6377 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6378 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6379}
6380
6381
6382/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6383FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6384
6385
6386/** Opcode 0x0f 0xbd. */
6387FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6388{
6389 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6390 IEMOP_HLP_MIN_386();
6391 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6392 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6393}
6394
6395
6396/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6397FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6398
6399
6400/** Opcode 0x0f 0xbe. */
6401FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6402{
6403 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6404 IEMOP_HLP_MIN_386();
6405
6406 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6407
6408 /*
6409 * If rm is denoting a register, no more instruction bytes.
6410 */
6411 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6412 {
6413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6414 switch (pVCpu->iem.s.enmEffOpSize)
6415 {
6416 case IEMMODE_16BIT:
6417 IEM_MC_BEGIN(0, 1);
6418 IEM_MC_LOCAL(uint16_t, u16Value);
6419 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6420 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6421 IEM_MC_ADVANCE_RIP();
6422 IEM_MC_END();
6423 return VINF_SUCCESS;
6424
6425 case IEMMODE_32BIT:
6426 IEM_MC_BEGIN(0, 1);
6427 IEM_MC_LOCAL(uint32_t, u32Value);
6428 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6429 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6430 IEM_MC_ADVANCE_RIP();
6431 IEM_MC_END();
6432 return VINF_SUCCESS;
6433
6434 case IEMMODE_64BIT:
6435 IEM_MC_BEGIN(0, 1);
6436 IEM_MC_LOCAL(uint64_t, u64Value);
6437 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6438 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6439 IEM_MC_ADVANCE_RIP();
6440 IEM_MC_END();
6441 return VINF_SUCCESS;
6442
6443 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6444 }
6445 }
6446 else
6447 {
6448 /*
6449 * We're loading a register from memory.
6450 */
6451 switch (pVCpu->iem.s.enmEffOpSize)
6452 {
6453 case IEMMODE_16BIT:
6454 IEM_MC_BEGIN(0, 2);
6455 IEM_MC_LOCAL(uint16_t, u16Value);
6456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6459 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6460 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6461 IEM_MC_ADVANCE_RIP();
6462 IEM_MC_END();
6463 return VINF_SUCCESS;
6464
6465 case IEMMODE_32BIT:
6466 IEM_MC_BEGIN(0, 2);
6467 IEM_MC_LOCAL(uint32_t, u32Value);
6468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6471 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6472 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6473 IEM_MC_ADVANCE_RIP();
6474 IEM_MC_END();
6475 return VINF_SUCCESS;
6476
6477 case IEMMODE_64BIT:
6478 IEM_MC_BEGIN(0, 2);
6479 IEM_MC_LOCAL(uint64_t, u64Value);
6480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6483 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6484 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6485 IEM_MC_ADVANCE_RIP();
6486 IEM_MC_END();
6487 return VINF_SUCCESS;
6488
6489 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6490 }
6491 }
6492}
6493
6494
6495/** Opcode 0x0f 0xbf. */
6496FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6497{
6498 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6499 IEMOP_HLP_MIN_386();
6500
6501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6502
6503 /** @todo Not entirely sure how the operand size prefix is handled here,
6504 * assuming that it will be ignored. Would be nice to have a few
6505 * test for this. */
6506 /*
6507 * If rm is denoting a register, no more instruction bytes.
6508 */
6509 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6510 {
6511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6512 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6513 {
6514 IEM_MC_BEGIN(0, 1);
6515 IEM_MC_LOCAL(uint32_t, u32Value);
6516 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6517 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6518 IEM_MC_ADVANCE_RIP();
6519 IEM_MC_END();
6520 }
6521 else
6522 {
6523 IEM_MC_BEGIN(0, 1);
6524 IEM_MC_LOCAL(uint64_t, u64Value);
6525 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6526 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6527 IEM_MC_ADVANCE_RIP();
6528 IEM_MC_END();
6529 }
6530 }
6531 else
6532 {
6533 /*
6534 * We're loading a register from memory.
6535 */
6536 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6537 {
6538 IEM_MC_BEGIN(0, 2);
6539 IEM_MC_LOCAL(uint32_t, u32Value);
6540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6543 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6544 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6545 IEM_MC_ADVANCE_RIP();
6546 IEM_MC_END();
6547 }
6548 else
6549 {
6550 IEM_MC_BEGIN(0, 2);
6551 IEM_MC_LOCAL(uint64_t, u64Value);
6552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6555 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6556 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6557 IEM_MC_ADVANCE_RIP();
6558 IEM_MC_END();
6559 }
6560 }
6561 return VINF_SUCCESS;
6562}
6563
6564
6565/** Opcode 0x0f 0xc0. */
6566FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6567{
6568 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6569 IEMOP_HLP_MIN_486();
6570 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6571
6572 /*
6573 * If rm is denoting a register, no more instruction bytes.
6574 */
6575 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6576 {
6577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6578
6579 IEM_MC_BEGIN(3, 0);
6580 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6581 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6582 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6583
6584 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6585 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6586 IEM_MC_REF_EFLAGS(pEFlags);
6587 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6588
6589 IEM_MC_ADVANCE_RIP();
6590 IEM_MC_END();
6591 }
6592 else
6593 {
6594 /*
6595 * We're accessing memory.
6596 */
6597 IEM_MC_BEGIN(3, 3);
6598 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6599 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6600 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6601 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6603
6604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6605 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6606 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6607 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6608 IEM_MC_FETCH_EFLAGS(EFlags);
6609 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6610 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6611 else
6612 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6613
6614 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6615 IEM_MC_COMMIT_EFLAGS(EFlags);
6616 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6617 IEM_MC_ADVANCE_RIP();
6618 IEM_MC_END();
6619 return VINF_SUCCESS;
6620 }
6621 return VINF_SUCCESS;
6622}
6623
6624
6625/** Opcode 0x0f 0xc1. */
6626FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6627{
6628 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6629 IEMOP_HLP_MIN_486();
6630 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6631
6632 /*
6633 * If rm is denoting a register, no more instruction bytes.
6634 */
6635 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6636 {
6637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6638
6639 switch (pVCpu->iem.s.enmEffOpSize)
6640 {
6641 case IEMMODE_16BIT:
6642 IEM_MC_BEGIN(3, 0);
6643 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6644 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6645 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6646
6647 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6648 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6649 IEM_MC_REF_EFLAGS(pEFlags);
6650 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6651
6652 IEM_MC_ADVANCE_RIP();
6653 IEM_MC_END();
6654 return VINF_SUCCESS;
6655
6656 case IEMMODE_32BIT:
6657 IEM_MC_BEGIN(3, 0);
6658 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6659 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6660 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6661
6662 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6663 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6664 IEM_MC_REF_EFLAGS(pEFlags);
6665 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6666
6667 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6668 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6669 IEM_MC_ADVANCE_RIP();
6670 IEM_MC_END();
6671 return VINF_SUCCESS;
6672
6673 case IEMMODE_64BIT:
6674 IEM_MC_BEGIN(3, 0);
6675 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6676 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6677 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6678
6679 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6680 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6681 IEM_MC_REF_EFLAGS(pEFlags);
6682 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6683
6684 IEM_MC_ADVANCE_RIP();
6685 IEM_MC_END();
6686 return VINF_SUCCESS;
6687
6688 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6689 }
6690 }
6691 else
6692 {
6693 /*
6694 * We're accessing memory.
6695 */
6696 switch (pVCpu->iem.s.enmEffOpSize)
6697 {
6698 case IEMMODE_16BIT:
6699 IEM_MC_BEGIN(3, 3);
6700 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6701 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6702 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6703 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6705
6706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6707 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6708 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6709 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6710 IEM_MC_FETCH_EFLAGS(EFlags);
6711 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6712 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6713 else
6714 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6715
6716 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6717 IEM_MC_COMMIT_EFLAGS(EFlags);
6718 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
6719 IEM_MC_ADVANCE_RIP();
6720 IEM_MC_END();
6721 return VINF_SUCCESS;
6722
6723 case IEMMODE_32BIT:
6724 IEM_MC_BEGIN(3, 3);
6725 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6726 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6727 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6728 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6730
6731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6732 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6733 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6734 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6735 IEM_MC_FETCH_EFLAGS(EFlags);
6736 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6737 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6738 else
6739 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6740
6741 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6742 IEM_MC_COMMIT_EFLAGS(EFlags);
6743 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
6744 IEM_MC_ADVANCE_RIP();
6745 IEM_MC_END();
6746 return VINF_SUCCESS;
6747
6748 case IEMMODE_64BIT:
6749 IEM_MC_BEGIN(3, 3);
6750 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6751 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6752 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6753 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6755
6756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6757 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6758 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6759 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6760 IEM_MC_FETCH_EFLAGS(EFlags);
6761 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6762 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6763 else
6764 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6765
6766 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6767 IEM_MC_COMMIT_EFLAGS(EFlags);
6768 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
6769 IEM_MC_ADVANCE_RIP();
6770 IEM_MC_END();
6771 return VINF_SUCCESS;
6772
6773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6774 }
6775 }
6776}
6777
6778
6779/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
6780FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
6781/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
6782FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
6783/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
6784FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
6785/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
6786FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
6787
6788
6789/** Opcode 0x0f 0xc3. */
6790FNIEMOP_DEF(iemOp_movnti_My_Gy)
6791{
6792 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
6793
6794 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6795
6796 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6797 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6798 {
6799 switch (pVCpu->iem.s.enmEffOpSize)
6800 {
6801 case IEMMODE_32BIT:
6802 IEM_MC_BEGIN(0, 2);
6803 IEM_MC_LOCAL(uint32_t, u32Value);
6804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6805
6806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6808 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6809 return IEMOP_RAISE_INVALID_OPCODE();
6810
6811 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6812 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6813 IEM_MC_ADVANCE_RIP();
6814 IEM_MC_END();
6815 break;
6816
6817 case IEMMODE_64BIT:
6818 IEM_MC_BEGIN(0, 2);
6819 IEM_MC_LOCAL(uint64_t, u64Value);
6820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6821
6822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6824 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6825 return IEMOP_RAISE_INVALID_OPCODE();
6826
6827 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6828 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
6829 IEM_MC_ADVANCE_RIP();
6830 IEM_MC_END();
6831 break;
6832
6833 case IEMMODE_16BIT:
6834 /** @todo check this form. */
6835 return IEMOP_RAISE_INVALID_OPCODE();
6836 }
6837 }
6838 else
6839 return IEMOP_RAISE_INVALID_OPCODE();
6840 return VINF_SUCCESS;
6841}
6842/* Opcode 0x66 0x0f 0xc3 - invalid */
6843/* Opcode 0xf3 0x0f 0xc3 - invalid */
6844/* Opcode 0xf2 0x0f 0xc3 - invalid */
6845
6846/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
6847FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
6848/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
6849FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
6850/* Opcode 0xf3 0x0f 0xc4 - invalid */
6851/* Opcode 0xf2 0x0f 0xc4 - invalid */
6852
6853/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
6854FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
6855/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
6856FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
6857/* Opcode 0xf3 0x0f 0xc5 - invalid */
6858/* Opcode 0xf2 0x0f 0xc5 - invalid */
6859
6860/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
6861FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
6862/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
6863FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
6864/* Opcode 0xf3 0x0f 0xc6 - invalid */
6865/* Opcode 0xf2 0x0f 0xc6 - invalid */
6866
6867
6868/** Opcode 0x0f 0xc7 !11/1. */
6869FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6870{
6871 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
6872
6873 IEM_MC_BEGIN(4, 3);
6874 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6875 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6876 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6877 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6878 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6879 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6881
6882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6883 IEMOP_HLP_DONE_DECODING();
6884 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6885
6886 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6887 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6888 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6889
6890 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6891 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6892 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6893
6894 IEM_MC_FETCH_EFLAGS(EFlags);
6895 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6896 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6897 else
6898 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6899
6900 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6901 IEM_MC_COMMIT_EFLAGS(EFlags);
6902 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6903 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6904 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6905 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6906 IEM_MC_ENDIF();
6907 IEM_MC_ADVANCE_RIP();
6908
6909 IEM_MC_END();
6910 return VINF_SUCCESS;
6911}
6912
6913
6914/** Opcode REX.W 0x0f 0xc7 !11/1. */
6915FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
6916{
6917 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
6918 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6919 {
6920#if 0
6921 RT_NOREF(bRm);
6922 IEMOP_BITCH_ABOUT_STUB();
6923 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6924#else
6925 IEM_MC_BEGIN(4, 3);
6926 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
6927 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
6928 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
6929 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6930 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
6931 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
6932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6933
6934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6935 IEMOP_HLP_DONE_DECODING();
6936 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
6937 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6938
6939 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
6940 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
6941 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
6942
6943 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
6944 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
6945 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
6946
6947 IEM_MC_FETCH_EFLAGS(EFlags);
6948# ifdef RT_ARCH_AMD64
6949 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6950 {
6951 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6952 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6953 else
6954 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6955 }
6956 else
6957# endif
6958 {
6959 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
6960 accesses and not all all atomic, which works fine on in UNI CPU guest
6961 configuration (ignoring DMA). If guest SMP is active we have no choice
6962 but to use a rendezvous callback here. Sigh. */
6963 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
6964 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6965 else
6966 {
6967 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6968 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
6969 }
6970 }
6971
6972 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
6973 IEM_MC_COMMIT_EFLAGS(EFlags);
6974 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6975 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
6976 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
6977 IEM_MC_ENDIF();
6978 IEM_MC_ADVANCE_RIP();
6979
6980 IEM_MC_END();
6981 return VINF_SUCCESS;
6982#endif
6983 }
6984 Log(("cmpxchg16b -> #UD\n"));
6985 return IEMOP_RAISE_INVALID_OPCODE();
6986}
6987
6988
6989/** Opcode 0x0f 0xc7 11/6. */
6990FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6991
6992/** Opcode 0x0f 0xc7 !11/6. */
6993FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6994
6995/** Opcode 0x66 0x0f 0xc7 !11/6. */
6996FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6997
6998/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6999FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7000
7001/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7002FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7003
7004
7005/** Opcode 0x0f 0xc7. */
7006FNIEMOP_DEF(iemOp_Grp9)
7007{
7008 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
7009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7010 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7011 {
7012 case 0: case 2: case 3: case 4: case 5:
7013 return IEMOP_RAISE_INVALID_OPCODE();
7014 case 1:
7015 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
7016 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
7017 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
7018 return IEMOP_RAISE_INVALID_OPCODE();
7019 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7020 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7021 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7022 case 6:
7023 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7024 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
7025 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7026 {
7027 case 0:
7028 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
7029 case IEM_OP_PRF_SIZE_OP:
7030 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
7031 case IEM_OP_PRF_REPZ:
7032 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
7033 default:
7034 return IEMOP_RAISE_INVALID_OPCODE();
7035 }
7036 case 7:
7037 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7038 {
7039 case 0:
7040 case IEM_OP_PRF_REPZ:
7041 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
7042 default:
7043 return IEMOP_RAISE_INVALID_OPCODE();
7044 }
7045 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7046 }
7047}
7048
7049
7050/**
7051 * Common 'bswap register' helper.
7052 */
7053FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7054{
7055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7056 switch (pVCpu->iem.s.enmEffOpSize)
7057 {
7058 case IEMMODE_16BIT:
7059 IEM_MC_BEGIN(1, 0);
7060 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7061 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7062 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7063 IEM_MC_ADVANCE_RIP();
7064 IEM_MC_END();
7065 return VINF_SUCCESS;
7066
7067 case IEMMODE_32BIT:
7068 IEM_MC_BEGIN(1, 0);
7069 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7070 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7071 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7072 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7073 IEM_MC_ADVANCE_RIP();
7074 IEM_MC_END();
7075 return VINF_SUCCESS;
7076
7077 case IEMMODE_64BIT:
7078 IEM_MC_BEGIN(1, 0);
7079 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7080 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7081 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7082 IEM_MC_ADVANCE_RIP();
7083 IEM_MC_END();
7084 return VINF_SUCCESS;
7085
7086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7087 }
7088}
7089
7090
7091/** Opcode 0x0f 0xc8. */
7092FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7093{
7094 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7095 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7096 prefix. REX.B is the correct prefix it appears. For a parallel
7097 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7098 IEMOP_HLP_MIN_486();
7099 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7100}
7101
7102
7103/** Opcode 0x0f 0xc9. */
7104FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7105{
7106 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7107 IEMOP_HLP_MIN_486();
7108 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7109}
7110
7111
7112/** Opcode 0x0f 0xca. */
7113FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7114{
7115 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7116 IEMOP_HLP_MIN_486();
7117 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7118}
7119
7120
7121/** Opcode 0x0f 0xcb. */
7122FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7123{
7124 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7125 IEMOP_HLP_MIN_486();
7126 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7127}
7128
7129
7130/** Opcode 0x0f 0xcc. */
7131FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7132{
7133 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7134 IEMOP_HLP_MIN_486();
7135 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7136}
7137
7138
7139/** Opcode 0x0f 0xcd. */
7140FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7141{
7142 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7143 IEMOP_HLP_MIN_486();
7144 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7145}
7146
7147
7148/** Opcode 0x0f 0xce. */
7149FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7150{
7151 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7152 IEMOP_HLP_MIN_486();
7153 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7154}
7155
7156
7157/** Opcode 0x0f 0xcf. */
7158FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7159{
7160 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7161 IEMOP_HLP_MIN_486();
7162 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7163}
7164
7165
7166/* Opcode 0x0f 0xd0 - invalid */
7167/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7168FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7169/* Opcode 0xf3 0x0f 0xd0 - invalid */
7170/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7171FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7172
7173/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7174FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7175/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7176FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7177/* Opcode 0xf3 0x0f 0xd1 - invalid */
7178/* Opcode 0xf2 0x0f 0xd1 - invalid */
7179
7180/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7181FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7182/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7183FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7184/* Opcode 0xf3 0x0f 0xd2 - invalid */
7185/* Opcode 0xf2 0x0f 0xd2 - invalid */
7186
7187/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7188FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7189/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7190FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7191/* Opcode 0xf3 0x0f 0xd3 - invalid */
7192/* Opcode 0xf2 0x0f 0xd3 - invalid */
7193
7194/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7195FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7196/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7197FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7198/* Opcode 0xf3 0x0f 0xd4 - invalid */
7199/* Opcode 0xf2 0x0f 0xd4 - invalid */
7200
7201/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7202FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7203/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7204FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7205/* Opcode 0xf3 0x0f 0xd5 - invalid */
7206/* Opcode 0xf2 0x0f 0xd5 - invalid */
7207
7208/* Opcode 0x0f 0xd6 - invalid */
7209/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7210FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7211/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7212FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7213/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7214FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7215#if 0
7216FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7217{
7218 /* Docs says register only. */
7219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7220
7221 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7222 {
7223 case IEM_OP_PRF_SIZE_OP: /* SSE */
7224 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7225 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7226 IEM_MC_BEGIN(2, 0);
7227 IEM_MC_ARG(uint64_t *, pDst, 0);
7228 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7229 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7230 IEM_MC_PREPARE_SSE_USAGE();
7231 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7232 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7233 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7234 IEM_MC_ADVANCE_RIP();
7235 IEM_MC_END();
7236 return VINF_SUCCESS;
7237
7238 case 0: /* MMX */
7239 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7240 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7241 IEM_MC_BEGIN(2, 0);
7242 IEM_MC_ARG(uint64_t *, pDst, 0);
7243 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7244 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7245 IEM_MC_PREPARE_FPU_USAGE();
7246 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7247 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7248 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7249 IEM_MC_ADVANCE_RIP();
7250 IEM_MC_END();
7251 return VINF_SUCCESS;
7252
7253 default:
7254 return IEMOP_RAISE_INVALID_OPCODE();
7255 }
7256}
7257#endif
7258
7259
7260/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7261FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7262{
7263 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7264 /** @todo testcase: Check that the instruction implicitly clears the high
7265 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7266 * and opcode modifications are made to work with the whole width (not
7267 * just 128). */
7268 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
7269 /* Docs says register only. */
7270 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7271 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7272 {
7273 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7274 IEM_MC_BEGIN(2, 0);
7275 IEM_MC_ARG(uint64_t *, pDst, 0);
7276 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7277 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7278 IEM_MC_PREPARE_FPU_USAGE();
7279 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7280 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7281 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7282 IEM_MC_ADVANCE_RIP();
7283 IEM_MC_END();
7284 return VINF_SUCCESS;
7285 }
7286 return IEMOP_RAISE_INVALID_OPCODE();
7287}
7288
7289/** Opcode 0x66 0x0f 0xd7 - */
7290FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
7291{
7292 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7293 /** @todo testcase: Check that the instruction implicitly clears the high
7294 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7295 * and opcode modifications are made to work with the whole width (not
7296 * just 128). */
7297 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
7298 /* Docs says register only. */
7299 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7300 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7301 {
7302 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7303 IEM_MC_BEGIN(2, 0);
7304 IEM_MC_ARG(uint64_t *, pDst, 0);
7305 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7306 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7307 IEM_MC_PREPARE_SSE_USAGE();
7308 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7309 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7310 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7311 IEM_MC_ADVANCE_RIP();
7312 IEM_MC_END();
7313 return VINF_SUCCESS;
7314 }
7315 return IEMOP_RAISE_INVALID_OPCODE();
7316}
7317
7318/* Opcode 0xf3 0x0f 0xd7 - invalid */
7319/* Opcode 0xf2 0x0f 0xd7 - invalid */
7320
7321
7322/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7323FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7324/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7325FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7326/* Opcode 0xf3 0x0f 0xd8 - invalid */
7327/* Opcode 0xf2 0x0f 0xd8 - invalid */
7328
7329/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7330FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7331/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7332FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7333/* Opcode 0xf3 0x0f 0xd9 - invalid */
7334/* Opcode 0xf2 0x0f 0xd9 - invalid */
7335
7336/** Opcode 0x0f 0xda - pminub Pq, Qq */
7337FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7338/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7339FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7340/* Opcode 0xf3 0x0f 0xda - invalid */
7341/* Opcode 0xf2 0x0f 0xda - invalid */
7342
7343/** Opcode 0x0f 0xdb - pand Pq, Qq */
7344FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7345/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7346FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7347/* Opcode 0xf3 0x0f 0xdb - invalid */
7348/* Opcode 0xf2 0x0f 0xdb - invalid */
7349
7350/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7351FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7352/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7353FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7354/* Opcode 0xf3 0x0f 0xdc - invalid */
7355/* Opcode 0xf2 0x0f 0xdc - invalid */
7356
7357/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7358FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7359/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7360FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7361/* Opcode 0xf3 0x0f 0xdd - invalid */
7362/* Opcode 0xf2 0x0f 0xdd - invalid */
7363
7364/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7365FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7366/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7367FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7368/* Opcode 0xf3 0x0f 0xde - invalid */
7369/* Opcode 0xf2 0x0f 0xde - invalid */
7370
7371/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7372FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7373/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7374FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7375/* Opcode 0xf3 0x0f 0xdf - invalid */
7376/* Opcode 0xf2 0x0f 0xdf - invalid */
7377
7378/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7379FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7380/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7381FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7382/* Opcode 0xf3 0x0f 0xe0 - invalid */
7383/* Opcode 0xf2 0x0f 0xe0 - invalid */
7384
7385/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7386FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7387/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7388FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7389/* Opcode 0xf3 0x0f 0xe1 - invalid */
7390/* Opcode 0xf2 0x0f 0xe1 - invalid */
7391
7392/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7393FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7394/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7395FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7396/* Opcode 0xf3 0x0f 0xe2 - invalid */
7397/* Opcode 0xf2 0x0f 0xe2 - invalid */
7398
7399/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7400FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7401/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7402FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7403/* Opcode 0xf3 0x0f 0xe3 - invalid */
7404/* Opcode 0xf2 0x0f 0xe3 - invalid */
7405
7406/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7407FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7408/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7409FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7410/* Opcode 0xf3 0x0f 0xe4 - invalid */
7411/* Opcode 0xf2 0x0f 0xe4 - invalid */
7412
7413/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7414FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7415/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7416FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7417/* Opcode 0xf3 0x0f 0xe5 - invalid */
7418/* Opcode 0xf2 0x0f 0xe5 - invalid */
7419
7420/* Opcode 0x0f 0xe6 - invalid */
7421/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7422FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7423/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7424FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7425/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7426FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7427
7428
7429/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
7430FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
7431{
7432 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7433 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7434 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7435 {
7436 /* Register, memory. */
7437 IEM_MC_BEGIN(0, 2);
7438 IEM_MC_LOCAL(uint64_t, uSrc);
7439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7440
7441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7443 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7444 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7445
7446 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7447 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7448
7449 IEM_MC_ADVANCE_RIP();
7450 IEM_MC_END();
7451 return VINF_SUCCESS;
7452 }
7453 /* The register, register encoding is invalid. */
7454 return IEMOP_RAISE_INVALID_OPCODE();
7455}
7456
7457/** Opcode 0x66 0x0f 0xe7 - vmovntdq Mx, Vx */
7458FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
7459{
7460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7461 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7462 {
7463 /* Register, memory. */
7464 IEMOP_MNEMONIC(vmovntdq_Mx_Vx, "vmovntdq Mx,Vx");
7465 IEM_MC_BEGIN(0, 2);
7466 IEM_MC_LOCAL(uint128_t, uSrc);
7467 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7468
7469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7471 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7472 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7473
7474 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7475 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7476
7477 IEM_MC_ADVANCE_RIP();
7478 IEM_MC_END();
7479 return VINF_SUCCESS;
7480 }
7481
7482 /* The register, register encoding is invalid. */
7483 return IEMOP_RAISE_INVALID_OPCODE();
7484}
7485
7486/* Opcode 0xf3 0x0f 0xe7 - invalid */
7487/* Opcode 0xf2 0x0f 0xe7 - invalid */
7488
7489
7490/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7491FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7492/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7493FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7494/* Opcode 0xf3 0x0f 0xe8 - invalid */
7495/* Opcode 0xf2 0x0f 0xe8 - invalid */
7496
7497/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7498FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7499/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7500FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7501/* Opcode 0xf3 0x0f 0xe9 - invalid */
7502/* Opcode 0xf2 0x0f 0xe9 - invalid */
7503
7504/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7505FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7506/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7507FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7508/* Opcode 0xf3 0x0f 0xea - invalid */
7509/* Opcode 0xf2 0x0f 0xea - invalid */
7510
7511/** Opcode 0x0f 0xeb - por Pq, Qq */
7512FNIEMOP_STUB(iemOp_por_Pq_Qq);
7513/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7514FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7515/* Opcode 0xf3 0x0f 0xeb - invalid */
7516/* Opcode 0xf2 0x0f 0xeb - invalid */
7517
7518/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7519FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7520/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7521FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7522/* Opcode 0xf3 0x0f 0xec - invalid */
7523/* Opcode 0xf2 0x0f 0xec - invalid */
7524
7525/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7526FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7527/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7528FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7529/* Opcode 0xf3 0x0f 0xed - invalid */
7530/* Opcode 0xf2 0x0f 0xed - invalid */
7531
7532/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7533FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7534/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7535FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7536/* Opcode 0xf3 0x0f 0xee - invalid */
7537/* Opcode 0xf2 0x0f 0xee - invalid */
7538
7539
7540/** Opcode 0x0f 0xef - pxor Pq, Qq */
7541FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
7542{
7543 IEMOP_MNEMONIC(pxor, "pxor");
7544 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
7545}
7546
7547/** Opcode 0x66 0x0f 0xef - vpxor Vx, Hx, Wx */
7548FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
7549{
7550 IEMOP_MNEMONIC(vpxor, "vpxor");
7551 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7552}
7553
7554/* Opcode 0xf3 0x0f 0xef - invalid */
7555/* Opcode 0xf2 0x0f 0xef - invalid */
7556
7557/* Opcode 0x0f 0xf0 - invalid */
7558/* Opcode 0x66 0x0f 0xf0 - invalid */
7559/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7560FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7561
7562/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7563FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7564/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7565FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7566/* Opcode 0xf2 0x0f 0xf1 - invalid */
7567
7568/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7569FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7570/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7571FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7572/* Opcode 0xf2 0x0f 0xf2 - invalid */
7573
7574/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7575FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7576/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7577FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7578/* Opcode 0xf2 0x0f 0xf3 - invalid */
7579
7580/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7581FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7582/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7583FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7584/* Opcode 0xf2 0x0f 0xf4 - invalid */
7585
7586/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7587FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7588/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7589FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7590/* Opcode 0xf2 0x0f 0xf5 - invalid */
7591
7592/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7593FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7594/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7595FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7596/* Opcode 0xf2 0x0f 0xf6 - invalid */
7597
7598/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7599FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7600/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7601FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7602/* Opcode 0xf2 0x0f 0xf7 - invalid */
7603
7604/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7605FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7606/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7607FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7608/* Opcode 0xf2 0x0f 0xf8 - invalid */
7609
7610/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7611FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7612/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7613FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7614/* Opcode 0xf2 0x0f 0xf9 - invalid */
7615
7616/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7617FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7618/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7619FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7620/* Opcode 0xf2 0x0f 0xfa - invalid */
7621
7622/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7623FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7624/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7625FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7626/* Opcode 0xf2 0x0f 0xfb - invalid */
7627
7628/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7629FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7630/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7631FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7632/* Opcode 0xf2 0x0f 0xfc - invalid */
7633
7634/** Opcode 0x0f 0xfd - paddw Pq, Qq */
7635FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
7636/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
7637FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
7638/* Opcode 0xf2 0x0f 0xfd - invalid */
7639
7640/** Opcode 0x0f 0xfe - paddd Pq, Qq */
7641FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
7642/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
7643FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
7644/* Opcode 0xf2 0x0f 0xfe - invalid */
7645
7646
7647/** Opcode **** 0x0f 0xff - UD0 */
7648FNIEMOP_DEF(iemOp_ud0)
7649{
7650 IEMOP_MNEMONIC(ud0, "ud0");
7651 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7652 {
7653 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7654#ifndef TST_IEM_CHECK_MC
7655 RTGCPTR GCPtrEff;
7656 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
7657 if (rcStrict != VINF_SUCCESS)
7658 return rcStrict;
7659#endif
7660 IEMOP_HLP_DONE_DECODING();
7661 }
7662 return IEMOP_RAISE_INVALID_OPCODE();
7663}
7664
7665
7666
7667/**
7668 * Two byte opcode map, first byte 0x0f.
7669 *
7670 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
7671 * check if it needs updating as well when making changes.
7672 */
7673IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
7674{
7675 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7676 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
7677 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
7678 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
7679 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
7680 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
7681 /* 0x05 */ IEMOP_X4(iemOp_syscall),
7682 /* 0x06 */ IEMOP_X4(iemOp_clts),
7683 /* 0x07 */ IEMOP_X4(iemOp_sysret),
7684 /* 0x08 */ IEMOP_X4(iemOp_invd),
7685 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
7686 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
7687 /* 0x0b */ IEMOP_X4(iemOp_ud2),
7688 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
7689 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
7690 /* 0x0e */ IEMOP_X4(iemOp_femms),
7691 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
7692
7693 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7694 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7695 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7696 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7697 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7698 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7699 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7700 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7701 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
7702 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
7703 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
7704 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
7705 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
7706 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
7707 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
7708 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
7709
7710 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
7711 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
7712 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
7713 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
7714 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
7715 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7716 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
7717 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7718 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7719 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7720 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7721 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7722 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7723 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7724 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7725 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7726
7727 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
7728 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
7729 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
7730 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
7731 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
7732 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
7733 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
7734 /* 0x37 */ IEMOP_X4(iemOp_getsec),
7735 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
7736 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7737 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
7738 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7739 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7740 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7741 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7742 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7743
7744 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
7745 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
7746 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
7747 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
7748 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
7749 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
7750 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
7751 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
7752 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
7753 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
7754 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
7755 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
7756 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
7757 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
7758 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
7759 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
7760
7761 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7762 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
7763 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7764 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7765 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7766 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7767 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7768 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7769 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
7770 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
7771 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
7772 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
7773 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
7774 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
7775 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
7776 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
7777
7778 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7779 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7780 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7781 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7782 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7783 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7784 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7785 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7786 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7787 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7788 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7789 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7790 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7791 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7792 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7793 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
7794
7795 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
7796 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
7797 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
7798 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
7799 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7800 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7801 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7802 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7803
7804 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7805 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7806 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7807 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7808 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
7809 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
7810 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
7811 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
7812
7813 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
7814 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
7815 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
7816 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
7817 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
7818 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
7819 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
7820 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
7821 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
7822 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
7823 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
7824 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
7825 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
7826 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
7827 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
7828 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
7829
7830 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
7831 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
7832 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
7833 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
7834 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
7835 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
7836 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
7837 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
7838 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
7839 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
7840 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
7841 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
7842 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
7843 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
7844 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
7845 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
7846
7847 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
7848 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
7849 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
7850 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
7851 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
7852 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
7853 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
7854 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7855 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
7856 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
7857 /* 0xaa */ IEMOP_X4(iemOp_rsm),
7858 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
7859 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
7860 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
7861 /* 0xae */ IEMOP_X4(iemOp_Grp15),
7862 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
7863
7864 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
7865 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
7866 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
7867 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
7868 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
7869 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
7870 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
7871 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
7872 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
7873 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
7874 /* 0xba */ IEMOP_X4(iemOp_Grp8),
7875 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
7876 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
7877 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
7878 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
7879 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
7880
7881 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
7882 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
7883 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
7884 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7885 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7886 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7887 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
7888 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
7889 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
7890 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
7891 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
7892 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
7893 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
7894 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
7895 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
7896 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
7897
7898 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
7899 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7900 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7901 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7902 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7903 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7904 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
7905 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7906 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7907 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7908 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7909 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7910 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7911 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7912 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7913 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7914
7915 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7916 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7917 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7918 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7919 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7920 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7921 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
7922 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7923 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7924 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7925 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7926 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7927 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7928 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7929 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7930 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7931
7932 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
7933 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7934 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7935 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7936 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7937 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7938 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7939 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7940 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7941 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7942 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7943 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7944 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7945 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7946 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7947 /* 0xff */ IEMOP_X4(iemOp_ud0),
7948};
7949AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
7950
7951
7952/**
7953 * VEX opcode map \#1.
7954 *
7955 * @remarks This is (currently) a subset of g_apfnTwoByteMap, so please check if
7956 * it it needs updating too when making changes.
7957 */
7958IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
7959{
7960 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7961 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
7962 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
7963 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
7964 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
7965 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
7966 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
7967 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
7968 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
7969 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
7970 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
7971 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
7972 /* 0x0b */ IEMOP_X4(iemOp_InvalidNeedRM),
7973 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
7974 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
7975 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
7976 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
7977
7978 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7979 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7980 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7981 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7982 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7983 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7984 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7985 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7986 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
7987 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
7988 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
7989 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
7990 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
7991 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
7992 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
7993 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
7994
7995 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
7996 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
7997 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
7998 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
7999 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
8000 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
8001 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
8002 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
8003 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8004 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8005 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
8006 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8007 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
8008 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
8009 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8010 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8011
8012 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
8013 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
8014 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
8015 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
8016 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
8017 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
8018 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
8019 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
8020 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8021 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8022 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8023 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8024 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8025 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8026 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8027 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8028
8029 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
8030 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
8031 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
8032 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
8033 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
8034 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
8035 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
8036 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
8037 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
8038 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
8039 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
8040 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
8041 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
8042 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
8043 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
8044 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
8045
8046 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8047 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8048 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8049 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8050 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8051 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8052 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8053 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8054 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8055 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8056 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8057 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8058 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8059 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8060 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8061 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8062
8063 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8064 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8065 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8066 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8067 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8068 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8069 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8070 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8071 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8072 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8073 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8074 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8075 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8076 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8077 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8078 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8079
8080 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8081 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_Grp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8082 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_Grp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8083 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_Grp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8084 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8085 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8086 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8087 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8088 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
8089 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
8090 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
8091 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
8092 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8093 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8094 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
8095 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
8096
8097 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
8098 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
8099 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
8100 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
8101 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
8102 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
8103 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
8104 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
8105 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
8106 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
8107 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
8108 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
8109 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
8110 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
8111 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
8112 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
8113 IEMOP_X4(iemOp_InvalidNeedRM),
8114 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
8115 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
8116 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
8117 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
8118 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
8119 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
8120 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
8121 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
8122 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
8123 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
8124 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
8125 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
8126 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
8127 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
8128 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
8129 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
8130
8131 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8132 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8133 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8134 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8135 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8136 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8137 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8138 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8139 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8140 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8141 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
8142 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
8143 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
8144 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
8145 /* 0xae */ IEMOP_X4(iemOp_Grp15), /** @todo groups and vex */
8146 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
8147
8148 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8149 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8150 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8151 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8152 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8153 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8154 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8155 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8156 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8157 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8158 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
8159 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
8160 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
8161 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
8162 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
8163 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
8164
8165 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8166 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8167 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8168 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8169 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8170 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8171 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8172 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8173 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8174 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8175 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
8176 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
8177 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
8178 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
8179 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
8180 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
8181
8182 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8183 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8184 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8185 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8186 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8187 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8188 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8189 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8190 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8191 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8192 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8193 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8194 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8195 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8196 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8197 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8198
8199 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8200 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8201 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8202 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8203 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8204 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8205 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8206 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8207 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8208 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8209 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8210 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8211 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8212 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8213 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8214 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8215
8216 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8217 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8218 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8219 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8220 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8221 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8222 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8223 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8224 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8225 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8226 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8227 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8228 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8229 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8230 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8231 /* 0xff */ IEMOP_X4(iemOp_ud0),
8232};
8233AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8234/** @} */
8235
8236
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette