VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66313

Last change on this file since 66313 was 66313, checked in by vboxsync, 8 years ago

bs3-cpu-generated-1,IEM: Introducing @opcodesub to more effectively deal with mod=3 vs mod!=3 encoding different instructions; hacked stuff to test unused mod encodings.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 314.1 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66313 2017-03-28 19:28:08Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/** @name ..... opcodes.
26 *
27 * @{
28 */
29
30/** @} */
31
32
33/** @name Two byte opcodes (first byte 0x0f).
34 *
35 * @{
36 */
37
38/** Opcode 0x0f 0x00 /0. */
39FNIEMOPRM_DEF(iemOp_Grp6_sldt)
40{
41 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
42 IEMOP_HLP_MIN_286();
43 IEMOP_HLP_NO_REAL_OR_V86_MODE();
44
45 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
46 {
47 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
48 switch (pVCpu->iem.s.enmEffOpSize)
49 {
50 case IEMMODE_16BIT:
51 IEM_MC_BEGIN(0, 1);
52 IEM_MC_LOCAL(uint16_t, u16Ldtr);
53 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
54 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
55 IEM_MC_ADVANCE_RIP();
56 IEM_MC_END();
57 break;
58
59 case IEMMODE_32BIT:
60 IEM_MC_BEGIN(0, 1);
61 IEM_MC_LOCAL(uint32_t, u32Ldtr);
62 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
63 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
64 IEM_MC_ADVANCE_RIP();
65 IEM_MC_END();
66 break;
67
68 case IEMMODE_64BIT:
69 IEM_MC_BEGIN(0, 1);
70 IEM_MC_LOCAL(uint64_t, u64Ldtr);
71 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
72 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
73 IEM_MC_ADVANCE_RIP();
74 IEM_MC_END();
75 break;
76
77 IEM_NOT_REACHED_DEFAULT_CASE_RET();
78 }
79 }
80 else
81 {
82 IEM_MC_BEGIN(0, 2);
83 IEM_MC_LOCAL(uint16_t, u16Ldtr);
84 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
85 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
86 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
87 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
88 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
89 IEM_MC_ADVANCE_RIP();
90 IEM_MC_END();
91 }
92 return VINF_SUCCESS;
93}
94
95
96/** Opcode 0x0f 0x00 /1. */
97FNIEMOPRM_DEF(iemOp_Grp6_str)
98{
99 IEMOP_MNEMONIC(str, "str Rv/Mw");
100 IEMOP_HLP_MIN_286();
101 IEMOP_HLP_NO_REAL_OR_V86_MODE();
102
103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
104 {
105 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
106 switch (pVCpu->iem.s.enmEffOpSize)
107 {
108 case IEMMODE_16BIT:
109 IEM_MC_BEGIN(0, 1);
110 IEM_MC_LOCAL(uint16_t, u16Tr);
111 IEM_MC_FETCH_TR_U16(u16Tr);
112 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 break;
116
117 case IEMMODE_32BIT:
118 IEM_MC_BEGIN(0, 1);
119 IEM_MC_LOCAL(uint32_t, u32Tr);
120 IEM_MC_FETCH_TR_U32(u32Tr);
121 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
122 IEM_MC_ADVANCE_RIP();
123 IEM_MC_END();
124 break;
125
126 case IEMMODE_64BIT:
127 IEM_MC_BEGIN(0, 1);
128 IEM_MC_LOCAL(uint64_t, u64Tr);
129 IEM_MC_FETCH_TR_U64(u64Tr);
130 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
131 IEM_MC_ADVANCE_RIP();
132 IEM_MC_END();
133 break;
134
135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
136 }
137 }
138 else
139 {
140 IEM_MC_BEGIN(0, 2);
141 IEM_MC_LOCAL(uint16_t, u16Tr);
142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
144 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
145 IEM_MC_FETCH_TR_U16(u16Tr);
146 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
147 IEM_MC_ADVANCE_RIP();
148 IEM_MC_END();
149 }
150 return VINF_SUCCESS;
151}
152
153
154/** Opcode 0x0f 0x00 /2. */
155FNIEMOPRM_DEF(iemOp_Grp6_lldt)
156{
157 IEMOP_MNEMONIC(lldt, "lldt Ew");
158 IEMOP_HLP_MIN_286();
159 IEMOP_HLP_NO_REAL_OR_V86_MODE();
160
161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
162 {
163 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
164 IEM_MC_BEGIN(1, 0);
165 IEM_MC_ARG(uint16_t, u16Sel, 0);
166 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
167 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
168 IEM_MC_END();
169 }
170 else
171 {
172 IEM_MC_BEGIN(1, 1);
173 IEM_MC_ARG(uint16_t, u16Sel, 0);
174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
176 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
177 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
178 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
179 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
180 IEM_MC_END();
181 }
182 return VINF_SUCCESS;
183}
184
185
186/** Opcode 0x0f 0x00 /3. */
187FNIEMOPRM_DEF(iemOp_Grp6_ltr)
188{
189 IEMOP_MNEMONIC(ltr, "ltr Ew");
190 IEMOP_HLP_MIN_286();
191 IEMOP_HLP_NO_REAL_OR_V86_MODE();
192
193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
194 {
195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
196 IEM_MC_BEGIN(1, 0);
197 IEM_MC_ARG(uint16_t, u16Sel, 0);
198 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
199 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
200 IEM_MC_END();
201 }
202 else
203 {
204 IEM_MC_BEGIN(1, 1);
205 IEM_MC_ARG(uint16_t, u16Sel, 0);
206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
209 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
210 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
211 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
212 IEM_MC_END();
213 }
214 return VINF_SUCCESS;
215}
216
217
218/** Opcode 0x0f 0x00 /3. */
219FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
220{
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
223
224 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
225 {
226 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
227 IEM_MC_BEGIN(2, 0);
228 IEM_MC_ARG(uint16_t, u16Sel, 0);
229 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
230 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
231 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
232 IEM_MC_END();
233 }
234 else
235 {
236 IEM_MC_BEGIN(2, 1);
237 IEM_MC_ARG(uint16_t, u16Sel, 0);
238 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
241 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
242 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
243 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
244 IEM_MC_END();
245 }
246 return VINF_SUCCESS;
247}
248
249
250/** Opcode 0x0f 0x00 /4. */
251FNIEMOPRM_DEF(iemOp_Grp6_verr)
252{
253 IEMOP_MNEMONIC(verr, "verr Ew");
254 IEMOP_HLP_MIN_286();
255 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
256}
257
258
259/** Opcode 0x0f 0x00 /5. */
260FNIEMOPRM_DEF(iemOp_Grp6_verw)
261{
262 IEMOP_MNEMONIC(verw, "verw Ew");
263 IEMOP_HLP_MIN_286();
264 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
265}
266
267
268/**
269 * Group 6 jump table.
270 */
271IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
272{
273 iemOp_Grp6_sldt,
274 iemOp_Grp6_str,
275 iemOp_Grp6_lldt,
276 iemOp_Grp6_ltr,
277 iemOp_Grp6_verr,
278 iemOp_Grp6_verw,
279 iemOp_InvalidWithRM,
280 iemOp_InvalidWithRM
281};
282
283/** Opcode 0x0f 0x00. */
284FNIEMOP_DEF(iemOp_Grp6)
285{
286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
287 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
288}
289
290
291/** Opcode 0x0f 0x01 /0. */
292FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
293{
294 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
295 IEMOP_HLP_MIN_286();
296 IEMOP_HLP_64BIT_OP_SIZE();
297 IEM_MC_BEGIN(2, 1);
298 IEM_MC_ARG(uint8_t, iEffSeg, 0);
299 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
302 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
303 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
304 IEM_MC_END();
305 return VINF_SUCCESS;
306}
307
308
309/** Opcode 0x0f 0x01 /0. */
310FNIEMOP_DEF(iemOp_Grp7_vmcall)
311{
312 IEMOP_BITCH_ABOUT_STUB();
313 return IEMOP_RAISE_INVALID_OPCODE();
314}
315
316
317/** Opcode 0x0f 0x01 /0. */
318FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
319{
320 IEMOP_BITCH_ABOUT_STUB();
321 return IEMOP_RAISE_INVALID_OPCODE();
322}
323
324
325/** Opcode 0x0f 0x01 /0. */
326FNIEMOP_DEF(iemOp_Grp7_vmresume)
327{
328 IEMOP_BITCH_ABOUT_STUB();
329 return IEMOP_RAISE_INVALID_OPCODE();
330}
331
332
333/** Opcode 0x0f 0x01 /0. */
334FNIEMOP_DEF(iemOp_Grp7_vmxoff)
335{
336 IEMOP_BITCH_ABOUT_STUB();
337 return IEMOP_RAISE_INVALID_OPCODE();
338}
339
340
341/** Opcode 0x0f 0x01 /1. */
342FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
343{
344 IEMOP_MNEMONIC(sidt, "sidt Ms");
345 IEMOP_HLP_MIN_286();
346 IEMOP_HLP_64BIT_OP_SIZE();
347 IEM_MC_BEGIN(2, 1);
348 IEM_MC_ARG(uint8_t, iEffSeg, 0);
349 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
352 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
353 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
354 IEM_MC_END();
355 return VINF_SUCCESS;
356}
357
358
359/** Opcode 0x0f 0x01 /1. */
360FNIEMOP_DEF(iemOp_Grp7_monitor)
361{
362 IEMOP_MNEMONIC(monitor, "monitor");
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
364 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
365}
366
367
368/** Opcode 0x0f 0x01 /1. */
369FNIEMOP_DEF(iemOp_Grp7_mwait)
370{
371 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
374}
375
376
377/** Opcode 0x0f 0x01 /2. */
378FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
379{
380 IEMOP_MNEMONIC(lgdt, "lgdt");
381 IEMOP_HLP_64BIT_OP_SIZE();
382 IEM_MC_BEGIN(3, 1);
383 IEM_MC_ARG(uint8_t, iEffSeg, 0);
384 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
385 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
389 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
390 IEM_MC_END();
391 return VINF_SUCCESS;
392}
393
394
395/** Opcode 0x0f 0x01 0xd0. */
396FNIEMOP_DEF(iemOp_Grp7_xgetbv)
397{
398 IEMOP_MNEMONIC(xgetbv, "xgetbv");
399 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
400 {
401 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
402 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
403 }
404 return IEMOP_RAISE_INVALID_OPCODE();
405}
406
407
408/** Opcode 0x0f 0x01 0xd1. */
409FNIEMOP_DEF(iemOp_Grp7_xsetbv)
410{
411 IEMOP_MNEMONIC(xsetbv, "xsetbv");
412 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
413 {
414 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
416 }
417 return IEMOP_RAISE_INVALID_OPCODE();
418}
419
420
421/** Opcode 0x0f 0x01 /3. */
422FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
423{
424 IEMOP_MNEMONIC(lidt, "lidt");
425 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
426 ? IEMMODE_64BIT
427 : pVCpu->iem.s.enmEffOpSize;
428 IEM_MC_BEGIN(3, 1);
429 IEM_MC_ARG(uint8_t, iEffSeg, 0);
430 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
431 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
434 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
435 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
436 IEM_MC_END();
437 return VINF_SUCCESS;
438}
439
440
441#ifdef VBOX_WITH_NESTED_HWVIRT
442/** Opcode 0x0f 0x01 0xd8. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
444{
445 IEMOP_MNEMONIC(vmrun, "vmrun");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
447}
448
449/** Opcode 0x0f 0x01 0xd9. */
450FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
451{
452 IEMOP_MNEMONIC(vmmcall, "vmmcall");
453 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
454}
455
456
457/** Opcode 0x0f 0x01 0xda. */
458FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
459{
460 IEMOP_MNEMONIC(vmload, "vmload");
461 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
462}
463
464
465/** Opcode 0x0f 0x01 0xdb. */
466FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
467{
468 IEMOP_MNEMONIC(vmsave, "vmsave");
469 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
470}
471
472
473/** Opcode 0x0f 0x01 0xdc. */
474FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
475{
476 IEMOP_MNEMONIC(stgi, "stgi");
477 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
478}
479
480
481/** Opcode 0x0f 0x01 0xdd. */
482FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
483{
484 IEMOP_MNEMONIC(clgi, "clgi");
485 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
486}
487
488
489/** Opcode 0x0f 0x01 0xdf. */
490FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
491{
492 IEMOP_MNEMONIC(invlpga, "invlpga");
493 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
494}
495#else
496/** Opcode 0x0f 0x01 0xd8. */
497FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
498
499/** Opcode 0x0f 0x01 0xd9. */
500FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
501
502/** Opcode 0x0f 0x01 0xda. */
503FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
504
505/** Opcode 0x0f 0x01 0xdb. */
506FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
507
508/** Opcode 0x0f 0x01 0xdc. */
509FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
510
511/** Opcode 0x0f 0x01 0xdd. */
512FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
513
514/** Opcode 0x0f 0x01 0xdf. */
515FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
516#endif /* VBOX_WITH_NESTED_HWVIRT */
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520
521/** Opcode 0x0f 0x01 /4. */
522FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
523{
524 IEMOP_MNEMONIC(smsw, "smsw");
525 IEMOP_HLP_MIN_286();
526 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
527 {
528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
529 switch (pVCpu->iem.s.enmEffOpSize)
530 {
531 case IEMMODE_16BIT:
532 IEM_MC_BEGIN(0, 1);
533 IEM_MC_LOCAL(uint16_t, u16Tmp);
534 IEM_MC_FETCH_CR0_U16(u16Tmp);
535 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
536 { /* likely */ }
537 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
538 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
539 else
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
541 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
542 IEM_MC_ADVANCE_RIP();
543 IEM_MC_END();
544 return VINF_SUCCESS;
545
546 case IEMMODE_32BIT:
547 IEM_MC_BEGIN(0, 1);
548 IEM_MC_LOCAL(uint32_t, u32Tmp);
549 IEM_MC_FETCH_CR0_U32(u32Tmp);
550 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
551 IEM_MC_ADVANCE_RIP();
552 IEM_MC_END();
553 return VINF_SUCCESS;
554
555 case IEMMODE_64BIT:
556 IEM_MC_BEGIN(0, 1);
557 IEM_MC_LOCAL(uint64_t, u64Tmp);
558 IEM_MC_FETCH_CR0_U64(u64Tmp);
559 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
560 IEM_MC_ADVANCE_RIP();
561 IEM_MC_END();
562 return VINF_SUCCESS;
563
564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
565 }
566 }
567 else
568 {
569 /* Ignore operand size here, memory refs are always 16-bit. */
570 IEM_MC_BEGIN(0, 2);
571 IEM_MC_LOCAL(uint16_t, u16Tmp);
572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
575 IEM_MC_FETCH_CR0_U16(u16Tmp);
576 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
577 { /* likely */ }
578 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
579 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
580 else
581 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
582 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
583 IEM_MC_ADVANCE_RIP();
584 IEM_MC_END();
585 return VINF_SUCCESS;
586 }
587}
588
589
590/** Opcode 0x0f 0x01 /6. */
591FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
592{
593 /* The operand size is effectively ignored, all is 16-bit and only the
594 lower 3-bits are used. */
595 IEMOP_MNEMONIC(lmsw, "lmsw");
596 IEMOP_HLP_MIN_286();
597 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
598 {
599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
600 IEM_MC_BEGIN(1, 0);
601 IEM_MC_ARG(uint16_t, u16Tmp, 0);
602 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
603 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
604 IEM_MC_END();
605 }
606 else
607 {
608 IEM_MC_BEGIN(1, 1);
609 IEM_MC_ARG(uint16_t, u16Tmp, 0);
610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
613 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
614 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
615 IEM_MC_END();
616 }
617 return VINF_SUCCESS;
618}
619
620
621/** Opcode 0x0f 0x01 /7. */
622FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
623{
624 IEMOP_MNEMONIC(invlpg, "invlpg");
625 IEMOP_HLP_MIN_486();
626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
627 IEM_MC_BEGIN(1, 1);
628 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
630 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
631 IEM_MC_END();
632 return VINF_SUCCESS;
633}
634
635
636/** Opcode 0x0f 0x01 /7. */
637FNIEMOP_DEF(iemOp_Grp7_swapgs)
638{
639 IEMOP_MNEMONIC(swapgs, "swapgs");
640 IEMOP_HLP_ONLY_64BIT();
641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
642 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
643}
644
645
646/** Opcode 0x0f 0x01 /7. */
647FNIEMOP_DEF(iemOp_Grp7_rdtscp)
648{
649 NOREF(pVCpu);
650 IEMOP_BITCH_ABOUT_STUB();
651 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
652}
653
654
655/**
656 * Group 7 jump table, memory variant.
657 */
658IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
659{
660 iemOp_Grp7_sgdt,
661 iemOp_Grp7_sidt,
662 iemOp_Grp7_lgdt,
663 iemOp_Grp7_lidt,
664 iemOp_Grp7_smsw,
665 iemOp_InvalidWithRM,
666 iemOp_Grp7_lmsw,
667 iemOp_Grp7_invlpg
668};
669
670
671/** Opcode 0x0f 0x01. */
672FNIEMOP_DEF(iemOp_Grp7)
673{
674 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
675 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
676 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
677
678 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
679 {
680 case 0:
681 switch (bRm & X86_MODRM_RM_MASK)
682 {
683 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
684 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
685 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
686 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
687 }
688 return IEMOP_RAISE_INVALID_OPCODE();
689
690 case 1:
691 switch (bRm & X86_MODRM_RM_MASK)
692 {
693 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
694 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
695 }
696 return IEMOP_RAISE_INVALID_OPCODE();
697
698 case 2:
699 switch (bRm & X86_MODRM_RM_MASK)
700 {
701 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
702 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
703 }
704 return IEMOP_RAISE_INVALID_OPCODE();
705
706 case 3:
707 switch (bRm & X86_MODRM_RM_MASK)
708 {
709 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
710 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
711 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
712 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
713 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
714 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
715 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
716 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
718 }
719
720 case 4:
721 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
722
723 case 5:
724 return IEMOP_RAISE_INVALID_OPCODE();
725
726 case 6:
727 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
728
729 case 7:
730 switch (bRm & X86_MODRM_RM_MASK)
731 {
732 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
733 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
734 }
735 return IEMOP_RAISE_INVALID_OPCODE();
736
737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
738 }
739}
740
741/** Opcode 0x0f 0x00 /3. */
742FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
743{
744 IEMOP_HLP_NO_REAL_OR_V86_MODE();
745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
746
747 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
748 {
749 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
750 switch (pVCpu->iem.s.enmEffOpSize)
751 {
752 case IEMMODE_16BIT:
753 {
754 IEM_MC_BEGIN(3, 0);
755 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
756 IEM_MC_ARG(uint16_t, u16Sel, 1);
757 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
758
759 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
760 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
761 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
762
763 IEM_MC_END();
764 return VINF_SUCCESS;
765 }
766
767 case IEMMODE_32BIT:
768 case IEMMODE_64BIT:
769 {
770 IEM_MC_BEGIN(3, 0);
771 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
772 IEM_MC_ARG(uint16_t, u16Sel, 1);
773 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
774
775 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
776 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
777 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
778
779 IEM_MC_END();
780 return VINF_SUCCESS;
781 }
782
783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
784 }
785 }
786 else
787 {
788 switch (pVCpu->iem.s.enmEffOpSize)
789 {
790 case IEMMODE_16BIT:
791 {
792 IEM_MC_BEGIN(3, 1);
793 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
794 IEM_MC_ARG(uint16_t, u16Sel, 1);
795 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
797
798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
799 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
800
801 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
802 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
803 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
804
805 IEM_MC_END();
806 return VINF_SUCCESS;
807 }
808
809 case IEMMODE_32BIT:
810 case IEMMODE_64BIT:
811 {
812 IEM_MC_BEGIN(3, 1);
813 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
814 IEM_MC_ARG(uint16_t, u16Sel, 1);
815 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
817
818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
819 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
820/** @todo testcase: make sure it's a 16-bit read. */
821
822 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
823 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
824 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
825
826 IEM_MC_END();
827 return VINF_SUCCESS;
828 }
829
830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
831 }
832 }
833}
834
835
836
837/** Opcode 0x0f 0x02. */
838FNIEMOP_DEF(iemOp_lar_Gv_Ew)
839{
840 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
841 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
842}
843
844
845/** Opcode 0x0f 0x03. */
846FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
847{
848 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
849 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
850}
851
852
853/** Opcode 0x0f 0x05. */
854FNIEMOP_DEF(iemOp_syscall)
855{
856 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
858 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
859}
860
861
862/** Opcode 0x0f 0x06. */
863FNIEMOP_DEF(iemOp_clts)
864{
865 IEMOP_MNEMONIC(clts, "clts");
866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
867 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
868}
869
870
871/** Opcode 0x0f 0x07. */
872FNIEMOP_DEF(iemOp_sysret)
873{
874 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
876 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
877}
878
879
880/** Opcode 0x0f 0x08. */
881FNIEMOP_STUB(iemOp_invd);
882// IEMOP_HLP_MIN_486();
883
884
885/** Opcode 0x0f 0x09. */
886FNIEMOP_DEF(iemOp_wbinvd)
887{
888 IEMOP_MNEMONIC(wbinvd, "wbinvd");
889 IEMOP_HLP_MIN_486();
890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
891 IEM_MC_BEGIN(0, 0);
892 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
893 IEM_MC_ADVANCE_RIP();
894 IEM_MC_END();
895 return VINF_SUCCESS; /* ignore for now */
896}
897
898
899/** Opcode 0x0f 0x0b. */
900FNIEMOP_DEF(iemOp_ud2)
901{
902 IEMOP_MNEMONIC(ud2, "ud2");
903 return IEMOP_RAISE_INVALID_OPCODE();
904}
905
906/** Opcode 0x0f 0x0d. */
907FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
908{
909 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
910 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
911 {
912 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
913 return IEMOP_RAISE_INVALID_OPCODE();
914 }
915
916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
917 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
918 {
919 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
920 return IEMOP_RAISE_INVALID_OPCODE();
921 }
922
923 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
924 {
925 case 2: /* Aliased to /0 for the time being. */
926 case 4: /* Aliased to /0 for the time being. */
927 case 5: /* Aliased to /0 for the time being. */
928 case 6: /* Aliased to /0 for the time being. */
929 case 7: /* Aliased to /0 for the time being. */
930 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
931 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
932 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
933 IEM_NOT_REACHED_DEFAULT_CASE_RET();
934 }
935
936 IEM_MC_BEGIN(0, 1);
937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
940 /* Currently a NOP. */
941 NOREF(GCPtrEffSrc);
942 IEM_MC_ADVANCE_RIP();
943 IEM_MC_END();
944 return VINF_SUCCESS;
945}
946
947
948/** Opcode 0x0f 0x0e. */
949FNIEMOP_STUB(iemOp_femms);
950
951
952/** Opcode 0x0f 0x0f 0x0c. */
953FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
954
955/** Opcode 0x0f 0x0f 0x0d. */
956FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
957
958/** Opcode 0x0f 0x0f 0x1c. */
959FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
960
961/** Opcode 0x0f 0x0f 0x1d. */
962FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
963
964/** Opcode 0x0f 0x0f 0x8a. */
965FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
966
967/** Opcode 0x0f 0x0f 0x8e. */
968FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
969
970/** Opcode 0x0f 0x0f 0x90. */
971FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
972
973/** Opcode 0x0f 0x0f 0x94. */
974FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
975
976/** Opcode 0x0f 0x0f 0x96. */
977FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
978
979/** Opcode 0x0f 0x0f 0x97. */
980FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
981
982/** Opcode 0x0f 0x0f 0x9a. */
983FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
984
985/** Opcode 0x0f 0x0f 0x9e. */
986FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
987
988/** Opcode 0x0f 0x0f 0xa0. */
989FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
990
991/** Opcode 0x0f 0x0f 0xa4. */
992FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
993
994/** Opcode 0x0f 0x0f 0xa6. */
995FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
996
997/** Opcode 0x0f 0x0f 0xa7. */
998FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
999
1000/** Opcode 0x0f 0x0f 0xaa. */
1001FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1002
1003/** Opcode 0x0f 0x0f 0xae. */
1004FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1005
1006/** Opcode 0x0f 0x0f 0xb0. */
1007FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1008
1009/** Opcode 0x0f 0x0f 0xb4. */
1010FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1011
1012/** Opcode 0x0f 0x0f 0xb6. */
1013FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1014
1015/** Opcode 0x0f 0x0f 0xb7. */
1016FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1017
1018/** Opcode 0x0f 0x0f 0xbb. */
1019FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1020
1021/** Opcode 0x0f 0x0f 0xbf. */
1022FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1023
1024
1025/** Opcode 0x0f 0x0f. */
1026FNIEMOP_DEF(iemOp_3Dnow)
1027{
1028 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1029 {
1030 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1031 return IEMOP_RAISE_INVALID_OPCODE();
1032 }
1033
1034 /* This is pretty sparse, use switch instead of table. */
1035 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1036 switch (b)
1037 {
1038 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1039 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1040 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1041 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1042 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1043 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1044 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1045 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1046 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1047 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1048 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1049 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1050 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1051 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1052 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1053 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1054 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1055 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1056 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1057 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1058 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1059 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1060 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1061 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1062 default:
1063 return IEMOP_RAISE_INVALID_OPCODE();
1064 }
1065}
1066
1067
1068/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1069FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1070/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1071FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1072/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1073FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1074/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1075FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1076
1077
1078/**
1079 * @opcode 0x11
1080 * @oppfx none
1081 * @opcpuid sse
1082 * @opgroup og_sse_simdfp_datamove
1083 * @opxcpttype 4UA
1084 * @optest op1=1 op2=2 -> op1=2
1085 * @optest op1=0 op2=-42 -> op1=-42
1086 */
1087FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1088{
1089 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1091 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1092 {
1093 /*
1094 * Register, register.
1095 */
1096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1097 IEM_MC_BEGIN(0, 0);
1098 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1099 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1100 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1101 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1102 IEM_MC_ADVANCE_RIP();
1103 IEM_MC_END();
1104 }
1105 else
1106 {
1107 /*
1108 * Memory, register.
1109 */
1110 IEM_MC_BEGIN(0, 2);
1111 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1113
1114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1116 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1117 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1118
1119 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1120 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 return VINF_SUCCESS;
1126}
1127
1128
1129/**
1130 * @opcode 0x11
1131 * @oppfx 0x66
1132 * @opcpuid sse2
1133 * @opgroup og_sse2_pcksclr_datamove
1134 * @opxcpttype 4UA
1135 * @optest op1=1 op2=2 -> op1=2
1136 * @optest op1=0 op2=-42 -> op1=-42
1137 */
1138FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
1139{
1140 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1141 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1143 {
1144 /*
1145 * Register, register.
1146 */
1147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1148 IEM_MC_BEGIN(0, 0);
1149 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1150 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1151 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1152 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1153 IEM_MC_ADVANCE_RIP();
1154 IEM_MC_END();
1155 }
1156 else
1157 {
1158 /*
1159 * Memory, register.
1160 */
1161 IEM_MC_BEGIN(0, 2);
1162 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1164
1165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1167 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1168 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1169
1170 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1171 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1172
1173 IEM_MC_ADVANCE_RIP();
1174 IEM_MC_END();
1175 }
1176 return VINF_SUCCESS;
1177}
1178
1179
1180/**
1181 * @opcode 0x11
1182 * @oppfx 0xf3
1183 * @opcpuid sse
1184 * @opgroup og_sse_simdfp_datamove
1185 * @opxcpttype 5
1186 * @optest op1=1 op2=2 -> op1=2
1187 * @optest op1=0 op2=-22 -> op1=-22
1188 */
1189FNIEMOP_DEF(iemOp_vmovss_Wss_Hx_Vss)
1190{
1191 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1192 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1194 {
1195 /*
1196 * Register, register.
1197 */
1198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1199 IEM_MC_BEGIN(0, 1);
1200 IEM_MC_LOCAL(uint32_t, uSrc);
1201
1202 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1203 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1204 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1205 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1206
1207 IEM_MC_ADVANCE_RIP();
1208 IEM_MC_END();
1209 }
1210 else
1211 {
1212 /*
1213 * Memory, register.
1214 */
1215 IEM_MC_BEGIN(0, 2);
1216 IEM_MC_LOCAL(uint32_t, uSrc);
1217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1218
1219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1221 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1222 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1223
1224 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1225 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1226
1227 IEM_MC_ADVANCE_RIP();
1228 IEM_MC_END();
1229 }
1230 return VINF_SUCCESS;
1231}
1232
1233
1234/**
1235 * @opcode 0x11
1236 * @oppfx 0xf2
1237 * @opcpuid sse2
1238 * @opgroup og_sse2_pcksclr_datamove
1239 * @opxcpttype 5
1240 * @optest op1=1 op2=2 -> op1=2
1241 * @optest op1=0 op2=-42 -> op1=-42
1242 */
1243FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1244{
1245 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1247 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1248 {
1249 /*
1250 * Register, register.
1251 */
1252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1253 IEM_MC_BEGIN(0, 1);
1254 IEM_MC_LOCAL(uint64_t, uSrc);
1255
1256 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1257 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1258 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1259 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1260
1261 IEM_MC_ADVANCE_RIP();
1262 IEM_MC_END();
1263 }
1264 else
1265 {
1266 /*
1267 * Memory, register.
1268 */
1269 IEM_MC_BEGIN(0, 2);
1270 IEM_MC_LOCAL(uint64_t, uSrc);
1271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1272
1273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1275 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1276 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1277
1278 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1279 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1280
1281 IEM_MC_ADVANCE_RIP();
1282 IEM_MC_END();
1283 }
1284 return VINF_SUCCESS;
1285}
1286
1287
1288FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
1289{
1290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1291 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1292 {
1293 /**
1294 * @opcode 0x12
1295 * @opcodesub 11 mr/reg
1296 * @oppfx none
1297 * @opcpuid sse
1298 * @opgroup og_sse_simdfp_datamove
1299 * @opxcpttype 5
1300 * @optest op1=1 op2=2 -> op1=2
1301 * @optest op1=0 op2=-42 -> op1=-42
1302 */
1303 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1304
1305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1306 IEM_MC_BEGIN(0, 1);
1307 IEM_MC_LOCAL(uint64_t, uSrc);
1308
1309 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1310 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1311 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1312 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1313
1314 IEM_MC_ADVANCE_RIP();
1315 IEM_MC_END();
1316 }
1317 else
1318 {
1319 /**
1320 * @opdone
1321 * @opcode 0x12
1322 * @opcodesub !11 mr/reg
1323 * @oppfx none
1324 * @opcpuid sse
1325 * @opgroup og_sse_simdfp_datamove
1326 * @opxcpttype 5
1327 * @optest op1=1 op2=2 -> op1=2
1328 * @optest op1=0 op2=-42 -> op1=-42
1329 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1330 */
1331 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1332
1333 IEM_MC_BEGIN(0, 2);
1334 IEM_MC_LOCAL(uint64_t, uSrc);
1335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1336
1337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1339 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1340 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1341
1342 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1343 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1344
1345 IEM_MC_ADVANCE_RIP();
1346 IEM_MC_END();
1347 }
1348 return VINF_SUCCESS;
1349}
1350
1351
1352/**
1353 * @opcode 0x12
1354 * @opcodesub !11 mr/reg
1355 * @oppfx 0x66
1356 * @opcpuid sse2
1357 * @opgroup og_sse2_pcksclr_datamove
1358 * @opxcpttype 5
1359 * @optest op1=1 op2=2 -> op1=2
1360 * @optest op1=0 op2=-42 -> op1=-42
1361 */
1362FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1363{
1364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1365 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1366 {
1367 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1368
1369 IEM_MC_BEGIN(0, 2);
1370 IEM_MC_LOCAL(uint64_t, uSrc);
1371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1372
1373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1375 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1376 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1377
1378 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1379 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1380
1381 IEM_MC_ADVANCE_RIP();
1382 IEM_MC_END();
1383 return VINF_SUCCESS;
1384 }
1385
1386 /**
1387 * @opdone
1388 * @opmnemonic ud660f12m3
1389 * @opcode 0x12
1390 * @opcodesub 11 mr/reg
1391 * @oppfx 0x66
1392 * @opunused immediate
1393 * @opcpuid sse
1394 * @optest ->
1395 */
1396 return IEMOP_RAISE_INVALID_OPCODE();
1397}
1398
1399
1400/** Opcode 0xf3 0x0f 0x12. */
1401FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1402
1403/** Opcode 0xf2 0x0f 0x12. */
1404FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1405
1406/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1407FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1408
1409/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1410FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1411{
1412 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1413 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1414 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1415 {
1416#if 0
1417 /*
1418 * Register, register.
1419 */
1420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1421 IEM_MC_BEGIN(0, 1);
1422 IEM_MC_LOCAL(uint64_t, uSrc);
1423 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1424 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1425 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1426 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1427 IEM_MC_ADVANCE_RIP();
1428 IEM_MC_END();
1429#else
1430 return IEMOP_RAISE_INVALID_OPCODE();
1431#endif
1432 }
1433 else
1434 {
1435 /*
1436 * Memory, register.
1437 */
1438 IEM_MC_BEGIN(0, 2);
1439 IEM_MC_LOCAL(uint64_t, uSrc);
1440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1441
1442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1444 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1445 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1446
1447 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1448 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1449
1450 IEM_MC_ADVANCE_RIP();
1451 IEM_MC_END();
1452 }
1453 return VINF_SUCCESS;
1454}
1455
1456/* Opcode 0xf3 0x0f 0x13 - invalid */
1457/* Opcode 0xf2 0x0f 0x13 - invalid */
1458
1459/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1460FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1461/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1462FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1463/* Opcode 0xf3 0x0f 0x14 - invalid */
1464/* Opcode 0xf2 0x0f 0x14 - invalid */
1465/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1466FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1467/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1468FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1469/* Opcode 0xf3 0x0f 0x15 - invalid */
1470/* Opcode 0xf2 0x0f 0x15 - invalid */
1471/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1472FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1473/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1474FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1475/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1476FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1477/* Opcode 0xf2 0x0f 0x16 - invalid */
1478/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1479FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1480/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1481FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1482/* Opcode 0xf3 0x0f 0x17 - invalid */
1483/* Opcode 0xf2 0x0f 0x17 - invalid */
1484
1485
1486/** Opcode 0x0f 0x18. */
1487FNIEMOP_DEF(iemOp_prefetch_Grp16)
1488{
1489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1490 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1491 {
1492 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1493 {
1494 case 4: /* Aliased to /0 for the time being according to AMD. */
1495 case 5: /* Aliased to /0 for the time being according to AMD. */
1496 case 6: /* Aliased to /0 for the time being according to AMD. */
1497 case 7: /* Aliased to /0 for the time being according to AMD. */
1498 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1499 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1500 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1501 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1502 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1503 }
1504
1505 IEM_MC_BEGIN(0, 1);
1506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1509 /* Currently a NOP. */
1510 NOREF(GCPtrEffSrc);
1511 IEM_MC_ADVANCE_RIP();
1512 IEM_MC_END();
1513 return VINF_SUCCESS;
1514 }
1515
1516 return IEMOP_RAISE_INVALID_OPCODE();
1517}
1518
1519
1520/** Opcode 0x0f 0x19..0x1f. */
1521FNIEMOP_DEF(iemOp_nop_Ev)
1522{
1523 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1524 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1525 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1526 {
1527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1528 IEM_MC_BEGIN(0, 0);
1529 IEM_MC_ADVANCE_RIP();
1530 IEM_MC_END();
1531 }
1532 else
1533 {
1534 IEM_MC_BEGIN(0, 1);
1535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1538 /* Currently a NOP. */
1539 NOREF(GCPtrEffSrc);
1540 IEM_MC_ADVANCE_RIP();
1541 IEM_MC_END();
1542 }
1543 return VINF_SUCCESS;
1544}
1545
1546
1547/** Opcode 0x0f 0x20. */
1548FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1549{
1550 /* mod is ignored, as is operand size overrides. */
1551 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1552 IEMOP_HLP_MIN_386();
1553 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1554 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1555 else
1556 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1557
1558 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1559 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1560 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1561 {
1562 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1563 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1564 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1565 iCrReg |= 8;
1566 }
1567 switch (iCrReg)
1568 {
1569 case 0: case 2: case 3: case 4: case 8:
1570 break;
1571 default:
1572 return IEMOP_RAISE_INVALID_OPCODE();
1573 }
1574 IEMOP_HLP_DONE_DECODING();
1575
1576 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1577}
1578
1579
1580/** Opcode 0x0f 0x21. */
1581FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1582{
1583 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1584 IEMOP_HLP_MIN_386();
1585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1587 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1588 return IEMOP_RAISE_INVALID_OPCODE();
1589 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1590 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1591 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1592}
1593
1594
1595/** Opcode 0x0f 0x22. */
1596FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1597{
1598 /* mod is ignored, as is operand size overrides. */
1599 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1600 IEMOP_HLP_MIN_386();
1601 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1602 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1603 else
1604 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1605
1606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1607 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1608 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1609 {
1610 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1611 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1612 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1613 iCrReg |= 8;
1614 }
1615 switch (iCrReg)
1616 {
1617 case 0: case 2: case 3: case 4: case 8:
1618 break;
1619 default:
1620 return IEMOP_RAISE_INVALID_OPCODE();
1621 }
1622 IEMOP_HLP_DONE_DECODING();
1623
1624 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1625}
1626
1627
1628/** Opcode 0x0f 0x23. */
1629FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1630{
1631 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1632 IEMOP_HLP_MIN_386();
1633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1635 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1636 return IEMOP_RAISE_INVALID_OPCODE();
1637 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1638 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1639 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1640}
1641
1642
1643/** Opcode 0x0f 0x24. */
1644FNIEMOP_DEF(iemOp_mov_Rd_Td)
1645{
1646 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1647 /** @todo works on 386 and 486. */
1648 /* The RM byte is not considered, see testcase. */
1649 return IEMOP_RAISE_INVALID_OPCODE();
1650}
1651
1652
1653/** Opcode 0x0f 0x26. */
1654FNIEMOP_DEF(iemOp_mov_Td_Rd)
1655{
1656 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1657 /** @todo works on 386 and 486. */
1658 /* The RM byte is not considered, see testcase. */
1659 return IEMOP_RAISE_INVALID_OPCODE();
1660}
1661
1662
1663/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
1664FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1665{
1666 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1668 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1669 {
1670 /*
1671 * Register, register.
1672 */
1673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1674 IEM_MC_BEGIN(0, 0);
1675 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1676 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1677 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1678 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1679 IEM_MC_ADVANCE_RIP();
1680 IEM_MC_END();
1681 }
1682 else
1683 {
1684 /*
1685 * Register, memory.
1686 */
1687 IEM_MC_BEGIN(0, 2);
1688 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1690
1691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1693 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1694 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1695
1696 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1697 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1698
1699 IEM_MC_ADVANCE_RIP();
1700 IEM_MC_END();
1701 }
1702 return VINF_SUCCESS;
1703}
1704
1705/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
1706FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1707{
1708 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1709 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1710 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1711 {
1712 /*
1713 * Register, register.
1714 */
1715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1716 IEM_MC_BEGIN(0, 0);
1717 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1718 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1719 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1720 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1721 IEM_MC_ADVANCE_RIP();
1722 IEM_MC_END();
1723 }
1724 else
1725 {
1726 /*
1727 * Register, memory.
1728 */
1729 IEM_MC_BEGIN(0, 2);
1730 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1732
1733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1735 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1736 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1737
1738 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1739 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1740
1741 IEM_MC_ADVANCE_RIP();
1742 IEM_MC_END();
1743 }
1744 return VINF_SUCCESS;
1745}
1746
1747/* Opcode 0xf3 0x0f 0x28 - invalid */
1748/* Opcode 0xf2 0x0f 0x28 - invalid */
1749
1750/** Opcode 0x0f 0x29 - vmovaps Wps, Vps */
1751FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1752{
1753 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1754 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1755 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1756 {
1757 /*
1758 * Register, register.
1759 */
1760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1761 IEM_MC_BEGIN(0, 0);
1762 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1763 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1764 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1765 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1766 IEM_MC_ADVANCE_RIP();
1767 IEM_MC_END();
1768 }
1769 else
1770 {
1771 /*
1772 * Memory, register.
1773 */
1774 IEM_MC_BEGIN(0, 2);
1775 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1777
1778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1780 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1781 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1782
1783 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1784 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1785
1786 IEM_MC_ADVANCE_RIP();
1787 IEM_MC_END();
1788 }
1789 return VINF_SUCCESS;
1790}
1791
1792/** Opcode 0x66 0x0f 0x29 - vmovapd Wpd,Vpd */
1793FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
1794{
1795 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1796 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1797 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1798 {
1799 /*
1800 * Register, register.
1801 */
1802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1803 IEM_MC_BEGIN(0, 0);
1804 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1805 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1806 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1807 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1808 IEM_MC_ADVANCE_RIP();
1809 IEM_MC_END();
1810 }
1811 else
1812 {
1813 /*
1814 * Memory, register.
1815 */
1816 IEM_MC_BEGIN(0, 2);
1817 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1819
1820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1822 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1823 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1824
1825 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1826 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1827
1828 IEM_MC_ADVANCE_RIP();
1829 IEM_MC_END();
1830 }
1831 return VINF_SUCCESS;
1832}
1833
1834/* Opcode 0xf3 0x0f 0x29 - invalid */
1835/* Opcode 0xf2 0x0f 0x29 - invalid */
1836
1837
1838/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
1839FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
1840/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
1841FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
1842/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
1843FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
1844/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1845FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
1846
1847
1848/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
1849FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
1850{
1851 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1852 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1853 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1854 {
1855 /*
1856 * memory, register.
1857 */
1858 IEM_MC_BEGIN(0, 2);
1859 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1861
1862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1866
1867 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1868 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1869
1870 IEM_MC_ADVANCE_RIP();
1871 IEM_MC_END();
1872 }
1873 /* The register, register encoding is invalid. */
1874 else
1875 return IEMOP_RAISE_INVALID_OPCODE();
1876 return VINF_SUCCESS;
1877}
1878
1879/** Opcode 0x66 0x0f 0x2b - vmovntpd Mpd, Vpd */
1880FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
1881{
1882 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1884 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1885 {
1886 /*
1887 * memory, register.
1888 */
1889 IEM_MC_BEGIN(0, 2);
1890 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1892
1893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1895 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1896 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1897
1898 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1899 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1900
1901 IEM_MC_ADVANCE_RIP();
1902 IEM_MC_END();
1903 }
1904 /* The register, register encoding is invalid. */
1905 else
1906 return IEMOP_RAISE_INVALID_OPCODE();
1907 return VINF_SUCCESS;
1908}
1909/* Opcode 0xf3 0x0f 0x2b - invalid */
1910/* Opcode 0xf2 0x0f 0x2b - invalid */
1911
1912
1913/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
1914FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
1915/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
1916FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
1917/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
1918FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
1919/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
1920FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
1921
1922/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
1923FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
1924/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
1925FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
1926/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
1927FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
1928/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
1929FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
1930
1931/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
1932FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
1933/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
1934FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
1935/* Opcode 0xf3 0x0f 0x2e - invalid */
1936/* Opcode 0xf2 0x0f 0x2e - invalid */
1937
1938/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
1939FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
1940/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
1941FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
1942/* Opcode 0xf3 0x0f 0x2f - invalid */
1943/* Opcode 0xf2 0x0f 0x2f - invalid */
1944
1945/** Opcode 0x0f 0x30. */
1946FNIEMOP_DEF(iemOp_wrmsr)
1947{
1948 IEMOP_MNEMONIC(wrmsr, "wrmsr");
1949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1950 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1951}
1952
1953
1954/** Opcode 0x0f 0x31. */
1955FNIEMOP_DEF(iemOp_rdtsc)
1956{
1957 IEMOP_MNEMONIC(rdtsc, "rdtsc");
1958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1959 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1960}
1961
1962
1963/** Opcode 0x0f 0x33. */
1964FNIEMOP_DEF(iemOp_rdmsr)
1965{
1966 IEMOP_MNEMONIC(rdmsr, "rdmsr");
1967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1968 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1969}
1970
1971
1972/** Opcode 0x0f 0x34. */
1973FNIEMOP_STUB(iemOp_rdpmc);
1974/** Opcode 0x0f 0x34. */
1975FNIEMOP_STUB(iemOp_sysenter);
1976/** Opcode 0x0f 0x35. */
1977FNIEMOP_STUB(iemOp_sysexit);
1978/** Opcode 0x0f 0x37. */
1979FNIEMOP_STUB(iemOp_getsec);
1980/** Opcode 0x0f 0x38. */
1981FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1982/** Opcode 0x0f 0x3a. */
1983FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1984
1985
1986/**
1987 * Implements a conditional move.
1988 *
1989 * Wish there was an obvious way to do this where we could share and reduce
1990 * code bloat.
1991 *
1992 * @param a_Cnd The conditional "microcode" operation.
1993 */
1994#define CMOV_X(a_Cnd) \
1995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1996 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1997 { \
1998 switch (pVCpu->iem.s.enmEffOpSize) \
1999 { \
2000 case IEMMODE_16BIT: \
2001 IEM_MC_BEGIN(0, 1); \
2002 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2003 a_Cnd { \
2004 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2005 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2006 } IEM_MC_ENDIF(); \
2007 IEM_MC_ADVANCE_RIP(); \
2008 IEM_MC_END(); \
2009 return VINF_SUCCESS; \
2010 \
2011 case IEMMODE_32BIT: \
2012 IEM_MC_BEGIN(0, 1); \
2013 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2014 a_Cnd { \
2015 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2016 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2017 } IEM_MC_ELSE() { \
2018 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2019 } IEM_MC_ENDIF(); \
2020 IEM_MC_ADVANCE_RIP(); \
2021 IEM_MC_END(); \
2022 return VINF_SUCCESS; \
2023 \
2024 case IEMMODE_64BIT: \
2025 IEM_MC_BEGIN(0, 1); \
2026 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2027 a_Cnd { \
2028 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2029 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2030 } IEM_MC_ENDIF(); \
2031 IEM_MC_ADVANCE_RIP(); \
2032 IEM_MC_END(); \
2033 return VINF_SUCCESS; \
2034 \
2035 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2036 } \
2037 } \
2038 else \
2039 { \
2040 switch (pVCpu->iem.s.enmEffOpSize) \
2041 { \
2042 case IEMMODE_16BIT: \
2043 IEM_MC_BEGIN(0, 2); \
2044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2045 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2047 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2048 a_Cnd { \
2049 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2050 } IEM_MC_ENDIF(); \
2051 IEM_MC_ADVANCE_RIP(); \
2052 IEM_MC_END(); \
2053 return VINF_SUCCESS; \
2054 \
2055 case IEMMODE_32BIT: \
2056 IEM_MC_BEGIN(0, 2); \
2057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2058 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2060 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2061 a_Cnd { \
2062 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2063 } IEM_MC_ELSE() { \
2064 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2065 } IEM_MC_ENDIF(); \
2066 IEM_MC_ADVANCE_RIP(); \
2067 IEM_MC_END(); \
2068 return VINF_SUCCESS; \
2069 \
2070 case IEMMODE_64BIT: \
2071 IEM_MC_BEGIN(0, 2); \
2072 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2073 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2075 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2076 a_Cnd { \
2077 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2078 } IEM_MC_ENDIF(); \
2079 IEM_MC_ADVANCE_RIP(); \
2080 IEM_MC_END(); \
2081 return VINF_SUCCESS; \
2082 \
2083 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2084 } \
2085 } do {} while (0)
2086
2087
2088
2089/** Opcode 0x0f 0x40. */
2090FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2091{
2092 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2093 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2094}
2095
2096
2097/** Opcode 0x0f 0x41. */
2098FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2099{
2100 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2101 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2102}
2103
2104
2105/** Opcode 0x0f 0x42. */
2106FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2107{
2108 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2109 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2110}
2111
2112
2113/** Opcode 0x0f 0x43. */
2114FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2115{
2116 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2117 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2118}
2119
2120
2121/** Opcode 0x0f 0x44. */
2122FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2123{
2124 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2125 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2126}
2127
2128
2129/** Opcode 0x0f 0x45. */
2130FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2131{
2132 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2133 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2134}
2135
2136
2137/** Opcode 0x0f 0x46. */
2138FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2139{
2140 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2141 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2142}
2143
2144
2145/** Opcode 0x0f 0x47. */
2146FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2147{
2148 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2149 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2150}
2151
2152
2153/** Opcode 0x0f 0x48. */
2154FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2155{
2156 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2157 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2158}
2159
2160
2161/** Opcode 0x0f 0x49. */
2162FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2163{
2164 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2165 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2166}
2167
2168
2169/** Opcode 0x0f 0x4a. */
2170FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2171{
2172 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2173 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2174}
2175
2176
2177/** Opcode 0x0f 0x4b. */
2178FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2179{
2180 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2181 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2182}
2183
2184
2185/** Opcode 0x0f 0x4c. */
2186FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2187{
2188 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2189 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2190}
2191
2192
2193/** Opcode 0x0f 0x4d. */
2194FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2195{
2196 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2197 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2198}
2199
2200
2201/** Opcode 0x0f 0x4e. */
2202FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2203{
2204 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2205 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2206}
2207
2208
2209/** Opcode 0x0f 0x4f. */
2210FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2211{
2212 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2213 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2214}
2215
2216#undef CMOV_X
2217
2218/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
2219FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
2220/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
2221FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
2222/* Opcode 0xf3 0x0f 0x50 - invalid */
2223/* Opcode 0xf2 0x0f 0x50 - invalid */
2224
2225/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
2226FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2227/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
2228FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2229/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
2230FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2231/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2232FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2233
2234/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
2235FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2236/* Opcode 0x66 0x0f 0x52 - invalid */
2237/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
2238FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2239/* Opcode 0xf2 0x0f 0x52 - invalid */
2240
2241/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
2242FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2243/* Opcode 0x66 0x0f 0x53 - invalid */
2244/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
2245FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2246/* Opcode 0xf2 0x0f 0x53 - invalid */
2247
2248/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
2249FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
2250/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
2251FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
2252/* Opcode 0xf3 0x0f 0x54 - invalid */
2253/* Opcode 0xf2 0x0f 0x54 - invalid */
2254
2255/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
2256FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
2257/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
2258FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
2259/* Opcode 0xf3 0x0f 0x55 - invalid */
2260/* Opcode 0xf2 0x0f 0x55 - invalid */
2261
2262/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
2263FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
2264/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
2265FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
2266/* Opcode 0xf3 0x0f 0x56 - invalid */
2267/* Opcode 0xf2 0x0f 0x56 - invalid */
2268
2269/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
2270FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
2271/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
2272FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
2273/* Opcode 0xf3 0x0f 0x57 - invalid */
2274/* Opcode 0xf2 0x0f 0x57 - invalid */
2275
2276/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2277FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2278/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2279FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2280/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2281FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2282/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2283FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2284
2285/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2286FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2287/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2288FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2289/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2290FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2291/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2292FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2293
2294/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2295FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2296/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2297FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2298/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2299FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2300/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2301FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2302
2303/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2304FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2305/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2306FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2307/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2308FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2309/* Opcode 0xf2 0x0f 0x5b - invalid */
2310
2311/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2312FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2313/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2314FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2315/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2316FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2317/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2318FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2319
2320/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2321FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2322/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2323FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2324/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2325FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2326/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2327FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2328
2329/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2330FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2331/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2332FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2333/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2334FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2335/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2336FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2337
2338/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2339FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2340/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2341FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2342/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2343FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2344/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2345FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2346
2347/**
2348 * Common worker for MMX instructions on the forms:
2349 * pxxxx mm1, mm2/mem32
2350 *
2351 * The 2nd operand is the first half of a register, which in the memory case
2352 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2353 * memory accessed for MMX.
2354 *
2355 * Exceptions type 4.
2356 */
2357FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2358{
2359 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2360 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2361 {
2362 /*
2363 * Register, register.
2364 */
2365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2366 IEM_MC_BEGIN(2, 0);
2367 IEM_MC_ARG(uint128_t *, pDst, 0);
2368 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2369 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2370 IEM_MC_PREPARE_SSE_USAGE();
2371 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2372 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2373 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2374 IEM_MC_ADVANCE_RIP();
2375 IEM_MC_END();
2376 }
2377 else
2378 {
2379 /*
2380 * Register, memory.
2381 */
2382 IEM_MC_BEGIN(2, 2);
2383 IEM_MC_ARG(uint128_t *, pDst, 0);
2384 IEM_MC_LOCAL(uint64_t, uSrc);
2385 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2387
2388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2390 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2391 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2392
2393 IEM_MC_PREPARE_SSE_USAGE();
2394 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2395 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2396
2397 IEM_MC_ADVANCE_RIP();
2398 IEM_MC_END();
2399 }
2400 return VINF_SUCCESS;
2401}
2402
2403
2404/**
2405 * Common worker for SSE2 instructions on the forms:
2406 * pxxxx xmm1, xmm2/mem128
2407 *
2408 * The 2nd operand is the first half of a register, which in the memory case
2409 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2410 * memory accessed for MMX.
2411 *
2412 * Exceptions type 4.
2413 */
2414FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2415{
2416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2417 if (!pImpl->pfnU64)
2418 return IEMOP_RAISE_INVALID_OPCODE();
2419 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2420 {
2421 /*
2422 * Register, register.
2423 */
2424 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2425 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2427 IEM_MC_BEGIN(2, 0);
2428 IEM_MC_ARG(uint64_t *, pDst, 0);
2429 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2430 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2431 IEM_MC_PREPARE_FPU_USAGE();
2432 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2433 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2434 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2435 IEM_MC_ADVANCE_RIP();
2436 IEM_MC_END();
2437 }
2438 else
2439 {
2440 /*
2441 * Register, memory.
2442 */
2443 IEM_MC_BEGIN(2, 2);
2444 IEM_MC_ARG(uint64_t *, pDst, 0);
2445 IEM_MC_LOCAL(uint32_t, uSrc);
2446 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2448
2449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2451 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2452 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2453
2454 IEM_MC_PREPARE_FPU_USAGE();
2455 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2456 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2457
2458 IEM_MC_ADVANCE_RIP();
2459 IEM_MC_END();
2460 }
2461 return VINF_SUCCESS;
2462}
2463
2464
2465/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2466FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2467{
2468 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2469 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2470}
2471
2472/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2473FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2474{
2475 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2476 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2477}
2478
2479/* Opcode 0xf3 0x0f 0x60 - invalid */
2480
2481
2482/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2483FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2484{
2485 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2486 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2487}
2488
2489/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2490FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2491{
2492 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2493 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2494}
2495
2496/* Opcode 0xf3 0x0f 0x61 - invalid */
2497
2498
2499/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2500FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2501{
2502 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2503 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2504}
2505
2506/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2507FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2508{
2509 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2510 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2511}
2512
2513/* Opcode 0xf3 0x0f 0x62 - invalid */
2514
2515
2516
2517/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2518FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2519/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2520FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2521/* Opcode 0xf3 0x0f 0x63 - invalid */
2522
2523/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2524FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2525/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2526FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2527/* Opcode 0xf3 0x0f 0x64 - invalid */
2528
2529/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2530FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2531/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2532FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2533/* Opcode 0xf3 0x0f 0x65 - invalid */
2534
2535/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2536FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2537/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2538FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2539/* Opcode 0xf3 0x0f 0x66 - invalid */
2540
2541/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2542FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2543/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2544FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2545/* Opcode 0xf3 0x0f 0x67 - invalid */
2546
2547
2548/**
2549 * Common worker for MMX instructions on the form:
2550 * pxxxx mm1, mm2/mem64
2551 *
2552 * The 2nd operand is the second half of a register, which in the memory case
2553 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2554 * where it may read the full 128 bits or only the upper 64 bits.
2555 *
2556 * Exceptions type 4.
2557 */
2558FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2559{
2560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2561 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2562 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2563 {
2564 /*
2565 * Register, register.
2566 */
2567 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2568 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2570 IEM_MC_BEGIN(2, 0);
2571 IEM_MC_ARG(uint64_t *, pDst, 0);
2572 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2573 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2574 IEM_MC_PREPARE_FPU_USAGE();
2575 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2576 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2577 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2578 IEM_MC_ADVANCE_RIP();
2579 IEM_MC_END();
2580 }
2581 else
2582 {
2583 /*
2584 * Register, memory.
2585 */
2586 IEM_MC_BEGIN(2, 2);
2587 IEM_MC_ARG(uint64_t *, pDst, 0);
2588 IEM_MC_LOCAL(uint64_t, uSrc);
2589 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2591
2592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2594 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2595 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2596
2597 IEM_MC_PREPARE_FPU_USAGE();
2598 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2599 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2600
2601 IEM_MC_ADVANCE_RIP();
2602 IEM_MC_END();
2603 }
2604 return VINF_SUCCESS;
2605}
2606
2607
2608/**
2609 * Common worker for SSE2 instructions on the form:
2610 * pxxxx xmm1, xmm2/mem128
2611 *
2612 * The 2nd operand is the second half of a register, which in the memory case
2613 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2614 * where it may read the full 128 bits or only the upper 64 bits.
2615 *
2616 * Exceptions type 4.
2617 */
2618FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2619{
2620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2621 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2622 {
2623 /*
2624 * Register, register.
2625 */
2626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2627 IEM_MC_BEGIN(2, 0);
2628 IEM_MC_ARG(uint128_t *, pDst, 0);
2629 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2630 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2631 IEM_MC_PREPARE_SSE_USAGE();
2632 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2633 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2634 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2635 IEM_MC_ADVANCE_RIP();
2636 IEM_MC_END();
2637 }
2638 else
2639 {
2640 /*
2641 * Register, memory.
2642 */
2643 IEM_MC_BEGIN(2, 2);
2644 IEM_MC_ARG(uint128_t *, pDst, 0);
2645 IEM_MC_LOCAL(uint128_t, uSrc);
2646 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2648
2649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2651 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2652 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2653
2654 IEM_MC_PREPARE_SSE_USAGE();
2655 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2656 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2657
2658 IEM_MC_ADVANCE_RIP();
2659 IEM_MC_END();
2660 }
2661 return VINF_SUCCESS;
2662}
2663
2664
2665/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2666FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2667{
2668 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2669 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2670}
2671
2672/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2673FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2674{
2675 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2676 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2677}
2678/* Opcode 0xf3 0x0f 0x68 - invalid */
2679
2680
2681/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2682FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2683{
2684 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2685 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2686}
2687
2688/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2689FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2690{
2691 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2692 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2693
2694}
2695/* Opcode 0xf3 0x0f 0x69 - invalid */
2696
2697
2698/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2699FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2700{
2701 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2702 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2703}
2704
2705/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2706FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2707{
2708 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2709 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2710}
2711/* Opcode 0xf3 0x0f 0x6a - invalid */
2712
2713
2714/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2715FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2716/** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */
2717FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2718/* Opcode 0xf3 0x0f 0x6b - invalid */
2719
2720
2721/* Opcode 0x0f 0x6c - invalid */
2722
2723/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2724FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2725{
2726 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
2727 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2728}
2729
2730/* Opcode 0xf3 0x0f 0x6c - invalid */
2731/* Opcode 0xf2 0x0f 0x6c - invalid */
2732
2733
2734/* Opcode 0x0f 0x6d - invalid */
2735
2736/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
2737FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
2738{
2739 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2740 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2741}
2742
2743/* Opcode 0xf3 0x0f 0x6d - invalid */
2744
2745
2746/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
2747FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
2748{
2749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2750 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2751 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2752 else
2753 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2754 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2755 {
2756 /* MMX, greg */
2757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2758 IEM_MC_BEGIN(0, 1);
2759 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2760 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2761 IEM_MC_LOCAL(uint64_t, u64Tmp);
2762 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2763 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2764 else
2765 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2766 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2767 IEM_MC_ADVANCE_RIP();
2768 IEM_MC_END();
2769 }
2770 else
2771 {
2772 /* MMX, [mem] */
2773 IEM_MC_BEGIN(0, 2);
2774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2775 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2778 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2779 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2780 {
2781 IEM_MC_LOCAL(uint64_t, u64Tmp);
2782 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2783 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2784 }
2785 else
2786 {
2787 IEM_MC_LOCAL(uint32_t, u32Tmp);
2788 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2789 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2790 }
2791 IEM_MC_ADVANCE_RIP();
2792 IEM_MC_END();
2793 }
2794 return VINF_SUCCESS;
2795}
2796
2797/** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */
2798FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
2799{
2800 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2801 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2802 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
2803 else
2804 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
2805 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2806 {
2807 /* XMM, greg*/
2808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2809 IEM_MC_BEGIN(0, 1);
2810 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2811 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2812 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2813 {
2814 IEM_MC_LOCAL(uint64_t, u64Tmp);
2815 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2816 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2817 }
2818 else
2819 {
2820 IEM_MC_LOCAL(uint32_t, u32Tmp);
2821 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2822 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2823 }
2824 IEM_MC_ADVANCE_RIP();
2825 IEM_MC_END();
2826 }
2827 else
2828 {
2829 /* XMM, [mem] */
2830 IEM_MC_BEGIN(0, 2);
2831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2832 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2835 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2836 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2837 {
2838 IEM_MC_LOCAL(uint64_t, u64Tmp);
2839 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2840 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2841 }
2842 else
2843 {
2844 IEM_MC_LOCAL(uint32_t, u32Tmp);
2845 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2846 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2847 }
2848 IEM_MC_ADVANCE_RIP();
2849 IEM_MC_END();
2850 }
2851 return VINF_SUCCESS;
2852}
2853
2854/* Opcode 0xf3 0x0f 0x6e - invalid */
2855
2856
2857/** Opcode 0x0f 0x6f - movq Pq, Qq */
2858FNIEMOP_DEF(iemOp_movq_Pq_Qq)
2859{
2860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2861 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2862 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2863 {
2864 /*
2865 * Register, register.
2866 */
2867 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2868 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2870 IEM_MC_BEGIN(0, 1);
2871 IEM_MC_LOCAL(uint64_t, u64Tmp);
2872 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2873 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2874 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2875 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2876 IEM_MC_ADVANCE_RIP();
2877 IEM_MC_END();
2878 }
2879 else
2880 {
2881 /*
2882 * Register, memory.
2883 */
2884 IEM_MC_BEGIN(0, 2);
2885 IEM_MC_LOCAL(uint64_t, u64Tmp);
2886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2887
2888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2890 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2891 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2892 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2893 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2894
2895 IEM_MC_ADVANCE_RIP();
2896 IEM_MC_END();
2897 }
2898 return VINF_SUCCESS;
2899}
2900
2901/** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */
2902FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
2903{
2904 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2905 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
2906 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2907 {
2908 /*
2909 * Register, register.
2910 */
2911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2912 IEM_MC_BEGIN(0, 0);
2913 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2914 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2915 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2916 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2917 IEM_MC_ADVANCE_RIP();
2918 IEM_MC_END();
2919 }
2920 else
2921 {
2922 /*
2923 * Register, memory.
2924 */
2925 IEM_MC_BEGIN(0, 2);
2926 IEM_MC_LOCAL(uint128_t, u128Tmp);
2927 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2928
2929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2931 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2932 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2933 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2934 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2935
2936 IEM_MC_ADVANCE_RIP();
2937 IEM_MC_END();
2938 }
2939 return VINF_SUCCESS;
2940}
2941
2942/** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */
2943FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
2944{
2945 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2946 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
2947 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2948 {
2949 /*
2950 * Register, register.
2951 */
2952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2953 IEM_MC_BEGIN(0, 0);
2954 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2955 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2956 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2957 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2958 IEM_MC_ADVANCE_RIP();
2959 IEM_MC_END();
2960 }
2961 else
2962 {
2963 /*
2964 * Register, memory.
2965 */
2966 IEM_MC_BEGIN(0, 2);
2967 IEM_MC_LOCAL(uint128_t, u128Tmp);
2968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2969
2970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2972 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2973 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2974 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2975 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2976
2977 IEM_MC_ADVANCE_RIP();
2978 IEM_MC_END();
2979 }
2980 return VINF_SUCCESS;
2981}
2982
2983
2984/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
2985FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
2986{
2987 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
2988 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2989 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2990 {
2991 /*
2992 * Register, register.
2993 */
2994 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2996
2997 IEM_MC_BEGIN(3, 0);
2998 IEM_MC_ARG(uint64_t *, pDst, 0);
2999 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3000 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3001 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3002 IEM_MC_PREPARE_FPU_USAGE();
3003 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3004 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3005 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3006 IEM_MC_ADVANCE_RIP();
3007 IEM_MC_END();
3008 }
3009 else
3010 {
3011 /*
3012 * Register, memory.
3013 */
3014 IEM_MC_BEGIN(3, 2);
3015 IEM_MC_ARG(uint64_t *, pDst, 0);
3016 IEM_MC_LOCAL(uint64_t, uSrc);
3017 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3019
3020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3021 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3022 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3024 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3025
3026 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3027 IEM_MC_PREPARE_FPU_USAGE();
3028 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3029 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3030
3031 IEM_MC_ADVANCE_RIP();
3032 IEM_MC_END();
3033 }
3034 return VINF_SUCCESS;
3035}
3036
3037/** Opcode 0x66 0x0f 0x70 - vpshufd Vx, Wx, Ib */
3038FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3039{
3040 IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
3041 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3042 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3043 {
3044 /*
3045 * Register, register.
3046 */
3047 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3049
3050 IEM_MC_BEGIN(3, 0);
3051 IEM_MC_ARG(uint128_t *, pDst, 0);
3052 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3053 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3054 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3055 IEM_MC_PREPARE_SSE_USAGE();
3056 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3057 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3058 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3059 IEM_MC_ADVANCE_RIP();
3060 IEM_MC_END();
3061 }
3062 else
3063 {
3064 /*
3065 * Register, memory.
3066 */
3067 IEM_MC_BEGIN(3, 2);
3068 IEM_MC_ARG(uint128_t *, pDst, 0);
3069 IEM_MC_LOCAL(uint128_t, uSrc);
3070 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3072
3073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3074 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3075 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3077 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3078
3079 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3080 IEM_MC_PREPARE_SSE_USAGE();
3081 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3082 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3083
3084 IEM_MC_ADVANCE_RIP();
3085 IEM_MC_END();
3086 }
3087 return VINF_SUCCESS;
3088}
3089
3090/** Opcode 0xf3 0x0f 0x70 - vpshufhw Vx, Wx, Ib */
3091FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3092{
3093 IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
3094 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3095 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3096 {
3097 /*
3098 * Register, register.
3099 */
3100 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3102
3103 IEM_MC_BEGIN(3, 0);
3104 IEM_MC_ARG(uint128_t *, pDst, 0);
3105 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3106 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3107 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3108 IEM_MC_PREPARE_SSE_USAGE();
3109 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3110 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3111 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3112 IEM_MC_ADVANCE_RIP();
3113 IEM_MC_END();
3114 }
3115 else
3116 {
3117 /*
3118 * Register, memory.
3119 */
3120 IEM_MC_BEGIN(3, 2);
3121 IEM_MC_ARG(uint128_t *, pDst, 0);
3122 IEM_MC_LOCAL(uint128_t, uSrc);
3123 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3125
3126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3127 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3128 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3130 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3131
3132 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3133 IEM_MC_PREPARE_SSE_USAGE();
3134 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3135 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3136
3137 IEM_MC_ADVANCE_RIP();
3138 IEM_MC_END();
3139 }
3140 return VINF_SUCCESS;
3141}
3142
3143/** Opcode 0xf2 0x0f 0x70 - vpshuflw Vx, Wx, Ib */
3144FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3145{
3146 IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
3147 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3148 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3149 {
3150 /*
3151 * Register, register.
3152 */
3153 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3155
3156 IEM_MC_BEGIN(3, 0);
3157 IEM_MC_ARG(uint128_t *, pDst, 0);
3158 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3159 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3160 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3161 IEM_MC_PREPARE_SSE_USAGE();
3162 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3163 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3164 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3165 IEM_MC_ADVANCE_RIP();
3166 IEM_MC_END();
3167 }
3168 else
3169 {
3170 /*
3171 * Register, memory.
3172 */
3173 IEM_MC_BEGIN(3, 2);
3174 IEM_MC_ARG(uint128_t *, pDst, 0);
3175 IEM_MC_LOCAL(uint128_t, uSrc);
3176 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3178
3179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3180 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3181 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3183 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3184
3185 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3186 IEM_MC_PREPARE_SSE_USAGE();
3187 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3188 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3189
3190 IEM_MC_ADVANCE_RIP();
3191 IEM_MC_END();
3192 }
3193 return VINF_SUCCESS;
3194}
3195
3196
3197/** Opcode 0x0f 0x71 11/2. */
3198FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3199
3200/** Opcode 0x66 0x0f 0x71 11/2. */
3201FNIEMOP_STUB_1(iemOp_Grp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
3202
3203/** Opcode 0x0f 0x71 11/4. */
3204FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3205
3206/** Opcode 0x66 0x0f 0x71 11/4. */
3207FNIEMOP_STUB_1(iemOp_Grp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
3208
3209/** Opcode 0x0f 0x71 11/6. */
3210FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3211
3212/** Opcode 0x66 0x0f 0x71 11/6. */
3213FNIEMOP_STUB_1(iemOp_Grp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
3214
3215
3216/**
3217 * Group 12 jump table for register variant.
3218 */
3219IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3220{
3221 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3222 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3223 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3224 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3225 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3226 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3227 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3228 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3229};
3230AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3231
3232
3233/** Opcode 0x0f 0x71. */
3234FNIEMOP_DEF(iemOp_Grp12)
3235{
3236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3237 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3238 /* register, register */
3239 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3240 + pVCpu->iem.s.idxPrefix], bRm);
3241 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3242}
3243
3244
3245/** Opcode 0x0f 0x72 11/2. */
3246FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3247
3248/** Opcode 0x66 0x0f 0x72 11/2. */
3249FNIEMOP_STUB_1(iemOp_Grp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
3250
3251/** Opcode 0x0f 0x72 11/4. */
3252FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3253
3254/** Opcode 0x66 0x0f 0x72 11/4. */
3255FNIEMOP_STUB_1(iemOp_Grp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
3256
3257/** Opcode 0x0f 0x72 11/6. */
3258FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3259
3260/** Opcode 0x66 0x0f 0x72 11/6. */
3261FNIEMOP_STUB_1(iemOp_Grp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
3262
3263
3264/**
3265 * Group 13 jump table for register variant.
3266 */
3267IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3268{
3269 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3270 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3271 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3272 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3273 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3274 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3275 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3276 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3277};
3278AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3279
3280/** Opcode 0x0f 0x72. */
3281FNIEMOP_DEF(iemOp_Grp13)
3282{
3283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3284 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3285 /* register, register */
3286 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3287 + pVCpu->iem.s.idxPrefix], bRm);
3288 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3289}
3290
3291
3292/** Opcode 0x0f 0x73 11/2. */
3293FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3294
3295/** Opcode 0x66 0x0f 0x73 11/2. */
3296FNIEMOP_STUB_1(iemOp_Grp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
3297
3298/** Opcode 0x66 0x0f 0x73 11/3. */
3299FNIEMOP_STUB_1(iemOp_Grp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3300
3301/** Opcode 0x0f 0x73 11/6. */
3302FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3303
3304/** Opcode 0x66 0x0f 0x73 11/6. */
3305FNIEMOP_STUB_1(iemOp_Grp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
3306
3307/** Opcode 0x66 0x0f 0x73 11/7. */
3308FNIEMOP_STUB_1(iemOp_Grp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3309
3310/**
3311 * Group 14 jump table for register variant.
3312 */
3313IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
3314{
3315 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3316 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3317 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3318 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3319 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3320 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3321 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3322 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3323};
3324AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
3325
3326
3327/** Opcode 0x0f 0x73. */
3328FNIEMOP_DEF(iemOp_Grp14)
3329{
3330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3331 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3332 /* register, register */
3333 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3334 + pVCpu->iem.s.idxPrefix], bRm);
3335 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3336}
3337
3338
3339/**
3340 * Common worker for MMX instructions on the form:
3341 * pxxx mm1, mm2/mem64
3342 */
3343FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3344{
3345 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3346 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3347 {
3348 /*
3349 * Register, register.
3350 */
3351 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3352 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3354 IEM_MC_BEGIN(2, 0);
3355 IEM_MC_ARG(uint64_t *, pDst, 0);
3356 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3357 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3358 IEM_MC_PREPARE_FPU_USAGE();
3359 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3360 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3361 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3362 IEM_MC_ADVANCE_RIP();
3363 IEM_MC_END();
3364 }
3365 else
3366 {
3367 /*
3368 * Register, memory.
3369 */
3370 IEM_MC_BEGIN(2, 2);
3371 IEM_MC_ARG(uint64_t *, pDst, 0);
3372 IEM_MC_LOCAL(uint64_t, uSrc);
3373 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3375
3376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3378 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3379 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3380
3381 IEM_MC_PREPARE_FPU_USAGE();
3382 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3383 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3384
3385 IEM_MC_ADVANCE_RIP();
3386 IEM_MC_END();
3387 }
3388 return VINF_SUCCESS;
3389}
3390
3391
3392/**
3393 * Common worker for SSE2 instructions on the forms:
3394 * pxxx xmm1, xmm2/mem128
3395 *
3396 * Proper alignment of the 128-bit operand is enforced.
3397 * Exceptions type 4. SSE2 cpuid checks.
3398 */
3399FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3400{
3401 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3402 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3403 {
3404 /*
3405 * Register, register.
3406 */
3407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3408 IEM_MC_BEGIN(2, 0);
3409 IEM_MC_ARG(uint128_t *, pDst, 0);
3410 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3411 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3412 IEM_MC_PREPARE_SSE_USAGE();
3413 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3414 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3415 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3416 IEM_MC_ADVANCE_RIP();
3417 IEM_MC_END();
3418 }
3419 else
3420 {
3421 /*
3422 * Register, memory.
3423 */
3424 IEM_MC_BEGIN(2, 2);
3425 IEM_MC_ARG(uint128_t *, pDst, 0);
3426 IEM_MC_LOCAL(uint128_t, uSrc);
3427 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3429
3430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3432 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3433 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3434
3435 IEM_MC_PREPARE_SSE_USAGE();
3436 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3437 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3438
3439 IEM_MC_ADVANCE_RIP();
3440 IEM_MC_END();
3441 }
3442 return VINF_SUCCESS;
3443}
3444
3445
3446/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3447FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3448{
3449 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3450 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3451}
3452
3453/** Opcode 0x66 0x0f 0x74 - vpcmpeqb Vx, Hx, Wx */
3454FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3455{
3456 IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb");
3457 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3458}
3459
3460/* Opcode 0xf3 0x0f 0x74 - invalid */
3461/* Opcode 0xf2 0x0f 0x74 - invalid */
3462
3463
3464/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3465FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3466{
3467 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3468 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3469}
3470
3471/** Opcode 0x66 0x0f 0x75 - vpcmpeqw Vx, Hx, Wx */
3472FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3473{
3474 IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw");
3475 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3476}
3477
3478/* Opcode 0xf3 0x0f 0x75 - invalid */
3479/* Opcode 0xf2 0x0f 0x75 - invalid */
3480
3481
3482/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3483FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3484{
3485 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3486 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3487}
3488
3489/** Opcode 0x66 0x0f 0x76 - vpcmpeqd Vx, Hx, Wx */
3490FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3491{
3492 IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd");
3493 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3494}
3495
3496/* Opcode 0xf3 0x0f 0x76 - invalid */
3497/* Opcode 0xf2 0x0f 0x76 - invalid */
3498
3499
3500/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3501FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3502/* Opcode 0x66 0x0f 0x77 - invalid */
3503/* Opcode 0xf3 0x0f 0x77 - invalid */
3504/* Opcode 0xf2 0x0f 0x77 - invalid */
3505
3506/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3507FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3508/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3509FNIEMOP_STUB(iemOp_AmdGrp17);
3510/* Opcode 0xf3 0x0f 0x78 - invalid */
3511/* Opcode 0xf2 0x0f 0x78 - invalid */
3512
3513/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3514FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3515/* Opcode 0x66 0x0f 0x79 - invalid */
3516/* Opcode 0xf3 0x0f 0x79 - invalid */
3517/* Opcode 0xf2 0x0f 0x79 - invalid */
3518
3519/* Opcode 0x0f 0x7a - invalid */
3520/* Opcode 0x66 0x0f 0x7a - invalid */
3521/* Opcode 0xf3 0x0f 0x7a - invalid */
3522/* Opcode 0xf2 0x0f 0x7a - invalid */
3523
3524/* Opcode 0x0f 0x7b - invalid */
3525/* Opcode 0x66 0x0f 0x7b - invalid */
3526/* Opcode 0xf3 0x0f 0x7b - invalid */
3527/* Opcode 0xf2 0x0f 0x7b - invalid */
3528
3529/* Opcode 0x0f 0x7c - invalid */
3530/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3531FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3532/* Opcode 0xf3 0x0f 0x7c - invalid */
3533/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3534FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3535
3536/* Opcode 0x0f 0x7d - invalid */
3537/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3538FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3539/* Opcode 0xf3 0x0f 0x7d - invalid */
3540/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3541FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3542
3543
3544/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3545FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3546{
3547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3548 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3549 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3550 else
3551 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3552 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3553 {
3554 /* greg, MMX */
3555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3556 IEM_MC_BEGIN(0, 1);
3557 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3558 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3559 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3560 {
3561 IEM_MC_LOCAL(uint64_t, u64Tmp);
3562 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3563 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3564 }
3565 else
3566 {
3567 IEM_MC_LOCAL(uint32_t, u32Tmp);
3568 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3569 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3570 }
3571 IEM_MC_ADVANCE_RIP();
3572 IEM_MC_END();
3573 }
3574 else
3575 {
3576 /* [mem], MMX */
3577 IEM_MC_BEGIN(0, 2);
3578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3579 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3582 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3583 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3584 {
3585 IEM_MC_LOCAL(uint64_t, u64Tmp);
3586 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3587 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3588 }
3589 else
3590 {
3591 IEM_MC_LOCAL(uint32_t, u32Tmp);
3592 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3593 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3594 }
3595 IEM_MC_ADVANCE_RIP();
3596 IEM_MC_END();
3597 }
3598 return VINF_SUCCESS;
3599}
3600
3601/** Opcode 0x66 0x0f 0x7e - vmovd_q Ey, Vy */
3602FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3603{
3604 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3605 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3606 IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq");
3607 else
3608 IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd");
3609 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3610 {
3611 /* greg, XMM */
3612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3613 IEM_MC_BEGIN(0, 1);
3614 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3615 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3616 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3617 {
3618 IEM_MC_LOCAL(uint64_t, u64Tmp);
3619 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3620 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3621 }
3622 else
3623 {
3624 IEM_MC_LOCAL(uint32_t, u32Tmp);
3625 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3626 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3627 }
3628 IEM_MC_ADVANCE_RIP();
3629 IEM_MC_END();
3630 }
3631 else
3632 {
3633 /* [mem], XMM */
3634 IEM_MC_BEGIN(0, 2);
3635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3636 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3639 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3640 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3641 {
3642 IEM_MC_LOCAL(uint64_t, u64Tmp);
3643 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3644 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3645 }
3646 else
3647 {
3648 IEM_MC_LOCAL(uint32_t, u32Tmp);
3649 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3650 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3651 }
3652 IEM_MC_ADVANCE_RIP();
3653 IEM_MC_END();
3654 }
3655 return VINF_SUCCESS;
3656}
3657
3658/** Opcode 0xf3 0x0f 0x7e - vmovq Vq, Wq */
3659FNIEMOP_STUB(iemOp_vmovq_Vq_Wq);
3660/* Opcode 0xf2 0x0f 0x7e - invalid */
3661
3662
3663/** Opcode 0x0f 0x7f - movq Qq, Pq */
3664FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3665{
3666 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3668 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3669 {
3670 /*
3671 * Register, register.
3672 */
3673 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3674 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3676 IEM_MC_BEGIN(0, 1);
3677 IEM_MC_LOCAL(uint64_t, u64Tmp);
3678 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3679 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3680 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3681 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3682 IEM_MC_ADVANCE_RIP();
3683 IEM_MC_END();
3684 }
3685 else
3686 {
3687 /*
3688 * Register, memory.
3689 */
3690 IEM_MC_BEGIN(0, 2);
3691 IEM_MC_LOCAL(uint64_t, u64Tmp);
3692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3693
3694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3696 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3697 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3698
3699 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3700 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3701
3702 IEM_MC_ADVANCE_RIP();
3703 IEM_MC_END();
3704 }
3705 return VINF_SUCCESS;
3706}
3707
3708/** Opcode 0x66 0x0f 0x7f - vmovdqa Wx,Vx */
3709FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3710{
3711 IEMOP_MNEMONIC(vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx");
3712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3713 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3714 {
3715 /*
3716 * Register, register.
3717 */
3718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3719 IEM_MC_BEGIN(0, 0);
3720 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3721 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3722 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3723 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3724 IEM_MC_ADVANCE_RIP();
3725 IEM_MC_END();
3726 }
3727 else
3728 {
3729 /*
3730 * Register, memory.
3731 */
3732 IEM_MC_BEGIN(0, 2);
3733 IEM_MC_LOCAL(uint128_t, u128Tmp);
3734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3735
3736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3738 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3739 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3740
3741 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3742 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3743
3744 IEM_MC_ADVANCE_RIP();
3745 IEM_MC_END();
3746 }
3747 return VINF_SUCCESS;
3748}
3749
3750/** Opcode 0xf3 0x0f 0x7f - vmovdqu Wx,Vx */
3751FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
3752{
3753 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3754 IEMOP_MNEMONIC(vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx");
3755 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3756 {
3757 /*
3758 * Register, register.
3759 */
3760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3761 IEM_MC_BEGIN(0, 0);
3762 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3763 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3764 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3765 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3766 IEM_MC_ADVANCE_RIP();
3767 IEM_MC_END();
3768 }
3769 else
3770 {
3771 /*
3772 * Register, memory.
3773 */
3774 IEM_MC_BEGIN(0, 2);
3775 IEM_MC_LOCAL(uint128_t, u128Tmp);
3776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3777
3778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3780 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3781 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3782
3783 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3784 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3785
3786 IEM_MC_ADVANCE_RIP();
3787 IEM_MC_END();
3788 }
3789 return VINF_SUCCESS;
3790}
3791
3792/* Opcode 0xf2 0x0f 0x7f - invalid */
3793
3794
3795
3796/** Opcode 0x0f 0x80. */
3797FNIEMOP_DEF(iemOp_jo_Jv)
3798{
3799 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3800 IEMOP_HLP_MIN_386();
3801 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3802 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3803 {
3804 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3806
3807 IEM_MC_BEGIN(0, 0);
3808 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3809 IEM_MC_REL_JMP_S16(i16Imm);
3810 } IEM_MC_ELSE() {
3811 IEM_MC_ADVANCE_RIP();
3812 } IEM_MC_ENDIF();
3813 IEM_MC_END();
3814 }
3815 else
3816 {
3817 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3819
3820 IEM_MC_BEGIN(0, 0);
3821 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3822 IEM_MC_REL_JMP_S32(i32Imm);
3823 } IEM_MC_ELSE() {
3824 IEM_MC_ADVANCE_RIP();
3825 } IEM_MC_ENDIF();
3826 IEM_MC_END();
3827 }
3828 return VINF_SUCCESS;
3829}
3830
3831
3832/** Opcode 0x0f 0x81. */
3833FNIEMOP_DEF(iemOp_jno_Jv)
3834{
3835 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3836 IEMOP_HLP_MIN_386();
3837 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3838 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3839 {
3840 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3842
3843 IEM_MC_BEGIN(0, 0);
3844 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3845 IEM_MC_ADVANCE_RIP();
3846 } IEM_MC_ELSE() {
3847 IEM_MC_REL_JMP_S16(i16Imm);
3848 } IEM_MC_ENDIF();
3849 IEM_MC_END();
3850 }
3851 else
3852 {
3853 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3855
3856 IEM_MC_BEGIN(0, 0);
3857 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3858 IEM_MC_ADVANCE_RIP();
3859 } IEM_MC_ELSE() {
3860 IEM_MC_REL_JMP_S32(i32Imm);
3861 } IEM_MC_ENDIF();
3862 IEM_MC_END();
3863 }
3864 return VINF_SUCCESS;
3865}
3866
3867
3868/** Opcode 0x0f 0x82. */
3869FNIEMOP_DEF(iemOp_jc_Jv)
3870{
3871 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3872 IEMOP_HLP_MIN_386();
3873 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3874 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3875 {
3876 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3878
3879 IEM_MC_BEGIN(0, 0);
3880 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3881 IEM_MC_REL_JMP_S16(i16Imm);
3882 } IEM_MC_ELSE() {
3883 IEM_MC_ADVANCE_RIP();
3884 } IEM_MC_ENDIF();
3885 IEM_MC_END();
3886 }
3887 else
3888 {
3889 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3891
3892 IEM_MC_BEGIN(0, 0);
3893 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3894 IEM_MC_REL_JMP_S32(i32Imm);
3895 } IEM_MC_ELSE() {
3896 IEM_MC_ADVANCE_RIP();
3897 } IEM_MC_ENDIF();
3898 IEM_MC_END();
3899 }
3900 return VINF_SUCCESS;
3901}
3902
3903
3904/** Opcode 0x0f 0x83. */
3905FNIEMOP_DEF(iemOp_jnc_Jv)
3906{
3907 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3908 IEMOP_HLP_MIN_386();
3909 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3910 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3911 {
3912 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3914
3915 IEM_MC_BEGIN(0, 0);
3916 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3917 IEM_MC_ADVANCE_RIP();
3918 } IEM_MC_ELSE() {
3919 IEM_MC_REL_JMP_S16(i16Imm);
3920 } IEM_MC_ENDIF();
3921 IEM_MC_END();
3922 }
3923 else
3924 {
3925 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3927
3928 IEM_MC_BEGIN(0, 0);
3929 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3930 IEM_MC_ADVANCE_RIP();
3931 } IEM_MC_ELSE() {
3932 IEM_MC_REL_JMP_S32(i32Imm);
3933 } IEM_MC_ENDIF();
3934 IEM_MC_END();
3935 }
3936 return VINF_SUCCESS;
3937}
3938
3939
3940/** Opcode 0x0f 0x84. */
3941FNIEMOP_DEF(iemOp_je_Jv)
3942{
3943 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
3944 IEMOP_HLP_MIN_386();
3945 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3946 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3947 {
3948 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3950
3951 IEM_MC_BEGIN(0, 0);
3952 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3953 IEM_MC_REL_JMP_S16(i16Imm);
3954 } IEM_MC_ELSE() {
3955 IEM_MC_ADVANCE_RIP();
3956 } IEM_MC_ENDIF();
3957 IEM_MC_END();
3958 }
3959 else
3960 {
3961 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3963
3964 IEM_MC_BEGIN(0, 0);
3965 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3966 IEM_MC_REL_JMP_S32(i32Imm);
3967 } IEM_MC_ELSE() {
3968 IEM_MC_ADVANCE_RIP();
3969 } IEM_MC_ENDIF();
3970 IEM_MC_END();
3971 }
3972 return VINF_SUCCESS;
3973}
3974
3975
3976/** Opcode 0x0f 0x85. */
3977FNIEMOP_DEF(iemOp_jne_Jv)
3978{
3979 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
3980 IEMOP_HLP_MIN_386();
3981 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3982 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3983 {
3984 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3986
3987 IEM_MC_BEGIN(0, 0);
3988 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3989 IEM_MC_ADVANCE_RIP();
3990 } IEM_MC_ELSE() {
3991 IEM_MC_REL_JMP_S16(i16Imm);
3992 } IEM_MC_ENDIF();
3993 IEM_MC_END();
3994 }
3995 else
3996 {
3997 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3999
4000 IEM_MC_BEGIN(0, 0);
4001 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4002 IEM_MC_ADVANCE_RIP();
4003 } IEM_MC_ELSE() {
4004 IEM_MC_REL_JMP_S32(i32Imm);
4005 } IEM_MC_ENDIF();
4006 IEM_MC_END();
4007 }
4008 return VINF_SUCCESS;
4009}
4010
4011
4012/** Opcode 0x0f 0x86. */
4013FNIEMOP_DEF(iemOp_jbe_Jv)
4014{
4015 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4016 IEMOP_HLP_MIN_386();
4017 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4018 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4019 {
4020 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4022
4023 IEM_MC_BEGIN(0, 0);
4024 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4025 IEM_MC_REL_JMP_S16(i16Imm);
4026 } IEM_MC_ELSE() {
4027 IEM_MC_ADVANCE_RIP();
4028 } IEM_MC_ENDIF();
4029 IEM_MC_END();
4030 }
4031 else
4032 {
4033 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4035
4036 IEM_MC_BEGIN(0, 0);
4037 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4038 IEM_MC_REL_JMP_S32(i32Imm);
4039 } IEM_MC_ELSE() {
4040 IEM_MC_ADVANCE_RIP();
4041 } IEM_MC_ENDIF();
4042 IEM_MC_END();
4043 }
4044 return VINF_SUCCESS;
4045}
4046
4047
4048/** Opcode 0x0f 0x87. */
4049FNIEMOP_DEF(iemOp_jnbe_Jv)
4050{
4051 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4052 IEMOP_HLP_MIN_386();
4053 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4054 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4055 {
4056 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4058
4059 IEM_MC_BEGIN(0, 0);
4060 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4061 IEM_MC_ADVANCE_RIP();
4062 } IEM_MC_ELSE() {
4063 IEM_MC_REL_JMP_S16(i16Imm);
4064 } IEM_MC_ENDIF();
4065 IEM_MC_END();
4066 }
4067 else
4068 {
4069 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4071
4072 IEM_MC_BEGIN(0, 0);
4073 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4074 IEM_MC_ADVANCE_RIP();
4075 } IEM_MC_ELSE() {
4076 IEM_MC_REL_JMP_S32(i32Imm);
4077 } IEM_MC_ENDIF();
4078 IEM_MC_END();
4079 }
4080 return VINF_SUCCESS;
4081}
4082
4083
4084/** Opcode 0x0f 0x88. */
4085FNIEMOP_DEF(iemOp_js_Jv)
4086{
4087 IEMOP_MNEMONIC(js_Jv, "js Jv");
4088 IEMOP_HLP_MIN_386();
4089 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4090 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4091 {
4092 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4094
4095 IEM_MC_BEGIN(0, 0);
4096 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4097 IEM_MC_REL_JMP_S16(i16Imm);
4098 } IEM_MC_ELSE() {
4099 IEM_MC_ADVANCE_RIP();
4100 } IEM_MC_ENDIF();
4101 IEM_MC_END();
4102 }
4103 else
4104 {
4105 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4107
4108 IEM_MC_BEGIN(0, 0);
4109 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4110 IEM_MC_REL_JMP_S32(i32Imm);
4111 } IEM_MC_ELSE() {
4112 IEM_MC_ADVANCE_RIP();
4113 } IEM_MC_ENDIF();
4114 IEM_MC_END();
4115 }
4116 return VINF_SUCCESS;
4117}
4118
4119
4120/** Opcode 0x0f 0x89. */
4121FNIEMOP_DEF(iemOp_jns_Jv)
4122{
4123 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4124 IEMOP_HLP_MIN_386();
4125 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4126 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4127 {
4128 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4130
4131 IEM_MC_BEGIN(0, 0);
4132 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4133 IEM_MC_ADVANCE_RIP();
4134 } IEM_MC_ELSE() {
4135 IEM_MC_REL_JMP_S16(i16Imm);
4136 } IEM_MC_ENDIF();
4137 IEM_MC_END();
4138 }
4139 else
4140 {
4141 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4143
4144 IEM_MC_BEGIN(0, 0);
4145 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4146 IEM_MC_ADVANCE_RIP();
4147 } IEM_MC_ELSE() {
4148 IEM_MC_REL_JMP_S32(i32Imm);
4149 } IEM_MC_ENDIF();
4150 IEM_MC_END();
4151 }
4152 return VINF_SUCCESS;
4153}
4154
4155
4156/** Opcode 0x0f 0x8a. */
4157FNIEMOP_DEF(iemOp_jp_Jv)
4158{
4159 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4160 IEMOP_HLP_MIN_386();
4161 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4162 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4163 {
4164 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4166
4167 IEM_MC_BEGIN(0, 0);
4168 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4169 IEM_MC_REL_JMP_S16(i16Imm);
4170 } IEM_MC_ELSE() {
4171 IEM_MC_ADVANCE_RIP();
4172 } IEM_MC_ENDIF();
4173 IEM_MC_END();
4174 }
4175 else
4176 {
4177 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4179
4180 IEM_MC_BEGIN(0, 0);
4181 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4182 IEM_MC_REL_JMP_S32(i32Imm);
4183 } IEM_MC_ELSE() {
4184 IEM_MC_ADVANCE_RIP();
4185 } IEM_MC_ENDIF();
4186 IEM_MC_END();
4187 }
4188 return VINF_SUCCESS;
4189}
4190
4191
4192/** Opcode 0x0f 0x8b. */
4193FNIEMOP_DEF(iemOp_jnp_Jv)
4194{
4195 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4196 IEMOP_HLP_MIN_386();
4197 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4198 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4199 {
4200 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4202
4203 IEM_MC_BEGIN(0, 0);
4204 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4205 IEM_MC_ADVANCE_RIP();
4206 } IEM_MC_ELSE() {
4207 IEM_MC_REL_JMP_S16(i16Imm);
4208 } IEM_MC_ENDIF();
4209 IEM_MC_END();
4210 }
4211 else
4212 {
4213 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4215
4216 IEM_MC_BEGIN(0, 0);
4217 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4218 IEM_MC_ADVANCE_RIP();
4219 } IEM_MC_ELSE() {
4220 IEM_MC_REL_JMP_S32(i32Imm);
4221 } IEM_MC_ENDIF();
4222 IEM_MC_END();
4223 }
4224 return VINF_SUCCESS;
4225}
4226
4227
4228/** Opcode 0x0f 0x8c. */
4229FNIEMOP_DEF(iemOp_jl_Jv)
4230{
4231 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4232 IEMOP_HLP_MIN_386();
4233 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4234 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4235 {
4236 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4238
4239 IEM_MC_BEGIN(0, 0);
4240 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4241 IEM_MC_REL_JMP_S16(i16Imm);
4242 } IEM_MC_ELSE() {
4243 IEM_MC_ADVANCE_RIP();
4244 } IEM_MC_ENDIF();
4245 IEM_MC_END();
4246 }
4247 else
4248 {
4249 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4251
4252 IEM_MC_BEGIN(0, 0);
4253 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4254 IEM_MC_REL_JMP_S32(i32Imm);
4255 } IEM_MC_ELSE() {
4256 IEM_MC_ADVANCE_RIP();
4257 } IEM_MC_ENDIF();
4258 IEM_MC_END();
4259 }
4260 return VINF_SUCCESS;
4261}
4262
4263
4264/** Opcode 0x0f 0x8d. */
4265FNIEMOP_DEF(iemOp_jnl_Jv)
4266{
4267 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4268 IEMOP_HLP_MIN_386();
4269 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4270 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4271 {
4272 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4274
4275 IEM_MC_BEGIN(0, 0);
4276 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4277 IEM_MC_ADVANCE_RIP();
4278 } IEM_MC_ELSE() {
4279 IEM_MC_REL_JMP_S16(i16Imm);
4280 } IEM_MC_ENDIF();
4281 IEM_MC_END();
4282 }
4283 else
4284 {
4285 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4287
4288 IEM_MC_BEGIN(0, 0);
4289 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4290 IEM_MC_ADVANCE_RIP();
4291 } IEM_MC_ELSE() {
4292 IEM_MC_REL_JMP_S32(i32Imm);
4293 } IEM_MC_ENDIF();
4294 IEM_MC_END();
4295 }
4296 return VINF_SUCCESS;
4297}
4298
4299
4300/** Opcode 0x0f 0x8e. */
4301FNIEMOP_DEF(iemOp_jle_Jv)
4302{
4303 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4304 IEMOP_HLP_MIN_386();
4305 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4306 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4307 {
4308 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4310
4311 IEM_MC_BEGIN(0, 0);
4312 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4313 IEM_MC_REL_JMP_S16(i16Imm);
4314 } IEM_MC_ELSE() {
4315 IEM_MC_ADVANCE_RIP();
4316 } IEM_MC_ENDIF();
4317 IEM_MC_END();
4318 }
4319 else
4320 {
4321 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4323
4324 IEM_MC_BEGIN(0, 0);
4325 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4326 IEM_MC_REL_JMP_S32(i32Imm);
4327 } IEM_MC_ELSE() {
4328 IEM_MC_ADVANCE_RIP();
4329 } IEM_MC_ENDIF();
4330 IEM_MC_END();
4331 }
4332 return VINF_SUCCESS;
4333}
4334
4335
4336/** Opcode 0x0f 0x8f. */
4337FNIEMOP_DEF(iemOp_jnle_Jv)
4338{
4339 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4340 IEMOP_HLP_MIN_386();
4341 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4342 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4343 {
4344 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4346
4347 IEM_MC_BEGIN(0, 0);
4348 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4349 IEM_MC_ADVANCE_RIP();
4350 } IEM_MC_ELSE() {
4351 IEM_MC_REL_JMP_S16(i16Imm);
4352 } IEM_MC_ENDIF();
4353 IEM_MC_END();
4354 }
4355 else
4356 {
4357 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4359
4360 IEM_MC_BEGIN(0, 0);
4361 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4362 IEM_MC_ADVANCE_RIP();
4363 } IEM_MC_ELSE() {
4364 IEM_MC_REL_JMP_S32(i32Imm);
4365 } IEM_MC_ENDIF();
4366 IEM_MC_END();
4367 }
4368 return VINF_SUCCESS;
4369}
4370
4371
4372/** Opcode 0x0f 0x90. */
4373FNIEMOP_DEF(iemOp_seto_Eb)
4374{
4375 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4376 IEMOP_HLP_MIN_386();
4377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4378
4379 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4380 * any way. AMD says it's "unused", whatever that means. We're
4381 * ignoring for now. */
4382 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4383 {
4384 /* register target */
4385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4386 IEM_MC_BEGIN(0, 0);
4387 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4388 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4389 } IEM_MC_ELSE() {
4390 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4391 } IEM_MC_ENDIF();
4392 IEM_MC_ADVANCE_RIP();
4393 IEM_MC_END();
4394 }
4395 else
4396 {
4397 /* memory target */
4398 IEM_MC_BEGIN(0, 1);
4399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4402 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4403 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4404 } IEM_MC_ELSE() {
4405 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4406 } IEM_MC_ENDIF();
4407 IEM_MC_ADVANCE_RIP();
4408 IEM_MC_END();
4409 }
4410 return VINF_SUCCESS;
4411}
4412
4413
4414/** Opcode 0x0f 0x91. */
4415FNIEMOP_DEF(iemOp_setno_Eb)
4416{
4417 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4418 IEMOP_HLP_MIN_386();
4419 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4420
4421 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4422 * any way. AMD says it's "unused", whatever that means. We're
4423 * ignoring for now. */
4424 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4425 {
4426 /* register target */
4427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4428 IEM_MC_BEGIN(0, 0);
4429 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4430 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4431 } IEM_MC_ELSE() {
4432 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4433 } IEM_MC_ENDIF();
4434 IEM_MC_ADVANCE_RIP();
4435 IEM_MC_END();
4436 }
4437 else
4438 {
4439 /* memory target */
4440 IEM_MC_BEGIN(0, 1);
4441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4444 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4445 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4446 } IEM_MC_ELSE() {
4447 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4448 } IEM_MC_ENDIF();
4449 IEM_MC_ADVANCE_RIP();
4450 IEM_MC_END();
4451 }
4452 return VINF_SUCCESS;
4453}
4454
4455
4456/** Opcode 0x0f 0x92. */
4457FNIEMOP_DEF(iemOp_setc_Eb)
4458{
4459 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4460 IEMOP_HLP_MIN_386();
4461 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4462
4463 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4464 * any way. AMD says it's "unused", whatever that means. We're
4465 * ignoring for now. */
4466 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4467 {
4468 /* register target */
4469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4470 IEM_MC_BEGIN(0, 0);
4471 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4472 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4473 } IEM_MC_ELSE() {
4474 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4475 } IEM_MC_ENDIF();
4476 IEM_MC_ADVANCE_RIP();
4477 IEM_MC_END();
4478 }
4479 else
4480 {
4481 /* memory target */
4482 IEM_MC_BEGIN(0, 1);
4483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4486 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4487 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4488 } IEM_MC_ELSE() {
4489 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4490 } IEM_MC_ENDIF();
4491 IEM_MC_ADVANCE_RIP();
4492 IEM_MC_END();
4493 }
4494 return VINF_SUCCESS;
4495}
4496
4497
4498/** Opcode 0x0f 0x93. */
4499FNIEMOP_DEF(iemOp_setnc_Eb)
4500{
4501 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4502 IEMOP_HLP_MIN_386();
4503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4504
4505 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4506 * any way. AMD says it's "unused", whatever that means. We're
4507 * ignoring for now. */
4508 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4509 {
4510 /* register target */
4511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4512 IEM_MC_BEGIN(0, 0);
4513 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4514 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4515 } IEM_MC_ELSE() {
4516 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4517 } IEM_MC_ENDIF();
4518 IEM_MC_ADVANCE_RIP();
4519 IEM_MC_END();
4520 }
4521 else
4522 {
4523 /* memory target */
4524 IEM_MC_BEGIN(0, 1);
4525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4528 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4529 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4530 } IEM_MC_ELSE() {
4531 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4532 } IEM_MC_ENDIF();
4533 IEM_MC_ADVANCE_RIP();
4534 IEM_MC_END();
4535 }
4536 return VINF_SUCCESS;
4537}
4538
4539
4540/** Opcode 0x0f 0x94. */
4541FNIEMOP_DEF(iemOp_sete_Eb)
4542{
4543 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4544 IEMOP_HLP_MIN_386();
4545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4546
4547 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4548 * any way. AMD says it's "unused", whatever that means. We're
4549 * ignoring for now. */
4550 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4551 {
4552 /* register target */
4553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4554 IEM_MC_BEGIN(0, 0);
4555 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4556 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4557 } IEM_MC_ELSE() {
4558 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4559 } IEM_MC_ENDIF();
4560 IEM_MC_ADVANCE_RIP();
4561 IEM_MC_END();
4562 }
4563 else
4564 {
4565 /* memory target */
4566 IEM_MC_BEGIN(0, 1);
4567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4570 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4571 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4572 } IEM_MC_ELSE() {
4573 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4574 } IEM_MC_ENDIF();
4575 IEM_MC_ADVANCE_RIP();
4576 IEM_MC_END();
4577 }
4578 return VINF_SUCCESS;
4579}
4580
4581
4582/** Opcode 0x0f 0x95. */
4583FNIEMOP_DEF(iemOp_setne_Eb)
4584{
4585 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4586 IEMOP_HLP_MIN_386();
4587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4588
4589 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4590 * any way. AMD says it's "unused", whatever that means. We're
4591 * ignoring for now. */
4592 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4593 {
4594 /* register target */
4595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4596 IEM_MC_BEGIN(0, 0);
4597 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4598 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4599 } IEM_MC_ELSE() {
4600 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4601 } IEM_MC_ENDIF();
4602 IEM_MC_ADVANCE_RIP();
4603 IEM_MC_END();
4604 }
4605 else
4606 {
4607 /* memory target */
4608 IEM_MC_BEGIN(0, 1);
4609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4612 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4613 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4614 } IEM_MC_ELSE() {
4615 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4616 } IEM_MC_ENDIF();
4617 IEM_MC_ADVANCE_RIP();
4618 IEM_MC_END();
4619 }
4620 return VINF_SUCCESS;
4621}
4622
4623
4624/** Opcode 0x0f 0x96. */
4625FNIEMOP_DEF(iemOp_setbe_Eb)
4626{
4627 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4628 IEMOP_HLP_MIN_386();
4629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4630
4631 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4632 * any way. AMD says it's "unused", whatever that means. We're
4633 * ignoring for now. */
4634 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4635 {
4636 /* register target */
4637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4638 IEM_MC_BEGIN(0, 0);
4639 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4640 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4641 } IEM_MC_ELSE() {
4642 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4643 } IEM_MC_ENDIF();
4644 IEM_MC_ADVANCE_RIP();
4645 IEM_MC_END();
4646 }
4647 else
4648 {
4649 /* memory target */
4650 IEM_MC_BEGIN(0, 1);
4651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4654 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4655 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4656 } IEM_MC_ELSE() {
4657 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4658 } IEM_MC_ENDIF();
4659 IEM_MC_ADVANCE_RIP();
4660 IEM_MC_END();
4661 }
4662 return VINF_SUCCESS;
4663}
4664
4665
4666/** Opcode 0x0f 0x97. */
4667FNIEMOP_DEF(iemOp_setnbe_Eb)
4668{
4669 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4670 IEMOP_HLP_MIN_386();
4671 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4672
4673 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4674 * any way. AMD says it's "unused", whatever that means. We're
4675 * ignoring for now. */
4676 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4677 {
4678 /* register target */
4679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4680 IEM_MC_BEGIN(0, 0);
4681 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4682 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4683 } IEM_MC_ELSE() {
4684 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4685 } IEM_MC_ENDIF();
4686 IEM_MC_ADVANCE_RIP();
4687 IEM_MC_END();
4688 }
4689 else
4690 {
4691 /* memory target */
4692 IEM_MC_BEGIN(0, 1);
4693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4696 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4697 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4698 } IEM_MC_ELSE() {
4699 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4700 } IEM_MC_ENDIF();
4701 IEM_MC_ADVANCE_RIP();
4702 IEM_MC_END();
4703 }
4704 return VINF_SUCCESS;
4705}
4706
4707
4708/** Opcode 0x0f 0x98. */
4709FNIEMOP_DEF(iemOp_sets_Eb)
4710{
4711 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4712 IEMOP_HLP_MIN_386();
4713 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4714
4715 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4716 * any way. AMD says it's "unused", whatever that means. We're
4717 * ignoring for now. */
4718 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4719 {
4720 /* register target */
4721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4722 IEM_MC_BEGIN(0, 0);
4723 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4724 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4725 } IEM_MC_ELSE() {
4726 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4727 } IEM_MC_ENDIF();
4728 IEM_MC_ADVANCE_RIP();
4729 IEM_MC_END();
4730 }
4731 else
4732 {
4733 /* memory target */
4734 IEM_MC_BEGIN(0, 1);
4735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4738 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4739 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4740 } IEM_MC_ELSE() {
4741 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4742 } IEM_MC_ENDIF();
4743 IEM_MC_ADVANCE_RIP();
4744 IEM_MC_END();
4745 }
4746 return VINF_SUCCESS;
4747}
4748
4749
4750/** Opcode 0x0f 0x99. */
4751FNIEMOP_DEF(iemOp_setns_Eb)
4752{
4753 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4754 IEMOP_HLP_MIN_386();
4755 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4756
4757 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4758 * any way. AMD says it's "unused", whatever that means. We're
4759 * ignoring for now. */
4760 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4761 {
4762 /* register target */
4763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4764 IEM_MC_BEGIN(0, 0);
4765 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4766 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4767 } IEM_MC_ELSE() {
4768 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4769 } IEM_MC_ENDIF();
4770 IEM_MC_ADVANCE_RIP();
4771 IEM_MC_END();
4772 }
4773 else
4774 {
4775 /* memory target */
4776 IEM_MC_BEGIN(0, 1);
4777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4780 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4781 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4782 } IEM_MC_ELSE() {
4783 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4784 } IEM_MC_ENDIF();
4785 IEM_MC_ADVANCE_RIP();
4786 IEM_MC_END();
4787 }
4788 return VINF_SUCCESS;
4789}
4790
4791
4792/** Opcode 0x0f 0x9a. */
4793FNIEMOP_DEF(iemOp_setp_Eb)
4794{
4795 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4796 IEMOP_HLP_MIN_386();
4797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4798
4799 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4800 * any way. AMD says it's "unused", whatever that means. We're
4801 * ignoring for now. */
4802 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4803 {
4804 /* register target */
4805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4806 IEM_MC_BEGIN(0, 0);
4807 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4808 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4809 } IEM_MC_ELSE() {
4810 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4811 } IEM_MC_ENDIF();
4812 IEM_MC_ADVANCE_RIP();
4813 IEM_MC_END();
4814 }
4815 else
4816 {
4817 /* memory target */
4818 IEM_MC_BEGIN(0, 1);
4819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4822 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4823 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4824 } IEM_MC_ELSE() {
4825 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4826 } IEM_MC_ENDIF();
4827 IEM_MC_ADVANCE_RIP();
4828 IEM_MC_END();
4829 }
4830 return VINF_SUCCESS;
4831}
4832
4833
4834/** Opcode 0x0f 0x9b. */
4835FNIEMOP_DEF(iemOp_setnp_Eb)
4836{
4837 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4838 IEMOP_HLP_MIN_386();
4839 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4840
4841 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4842 * any way. AMD says it's "unused", whatever that means. We're
4843 * ignoring for now. */
4844 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4845 {
4846 /* register target */
4847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4848 IEM_MC_BEGIN(0, 0);
4849 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4850 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4851 } IEM_MC_ELSE() {
4852 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4853 } IEM_MC_ENDIF();
4854 IEM_MC_ADVANCE_RIP();
4855 IEM_MC_END();
4856 }
4857 else
4858 {
4859 /* memory target */
4860 IEM_MC_BEGIN(0, 1);
4861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4864 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4865 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4866 } IEM_MC_ELSE() {
4867 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4868 } IEM_MC_ENDIF();
4869 IEM_MC_ADVANCE_RIP();
4870 IEM_MC_END();
4871 }
4872 return VINF_SUCCESS;
4873}
4874
4875
4876/** Opcode 0x0f 0x9c. */
4877FNIEMOP_DEF(iemOp_setl_Eb)
4878{
4879 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4880 IEMOP_HLP_MIN_386();
4881 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4882
4883 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4884 * any way. AMD says it's "unused", whatever that means. We're
4885 * ignoring for now. */
4886 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4887 {
4888 /* register target */
4889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4890 IEM_MC_BEGIN(0, 0);
4891 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4892 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4893 } IEM_MC_ELSE() {
4894 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4895 } IEM_MC_ENDIF();
4896 IEM_MC_ADVANCE_RIP();
4897 IEM_MC_END();
4898 }
4899 else
4900 {
4901 /* memory target */
4902 IEM_MC_BEGIN(0, 1);
4903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4906 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4907 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4908 } IEM_MC_ELSE() {
4909 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4910 } IEM_MC_ENDIF();
4911 IEM_MC_ADVANCE_RIP();
4912 IEM_MC_END();
4913 }
4914 return VINF_SUCCESS;
4915}
4916
4917
4918/** Opcode 0x0f 0x9d. */
4919FNIEMOP_DEF(iemOp_setnl_Eb)
4920{
4921 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4922 IEMOP_HLP_MIN_386();
4923 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4924
4925 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4926 * any way. AMD says it's "unused", whatever that means. We're
4927 * ignoring for now. */
4928 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4929 {
4930 /* register target */
4931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4932 IEM_MC_BEGIN(0, 0);
4933 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4934 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4935 } IEM_MC_ELSE() {
4936 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4937 } IEM_MC_ENDIF();
4938 IEM_MC_ADVANCE_RIP();
4939 IEM_MC_END();
4940 }
4941 else
4942 {
4943 /* memory target */
4944 IEM_MC_BEGIN(0, 1);
4945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4948 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4949 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4950 } IEM_MC_ELSE() {
4951 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4952 } IEM_MC_ENDIF();
4953 IEM_MC_ADVANCE_RIP();
4954 IEM_MC_END();
4955 }
4956 return VINF_SUCCESS;
4957}
4958
4959
4960/** Opcode 0x0f 0x9e. */
4961FNIEMOP_DEF(iemOp_setle_Eb)
4962{
4963 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
4964 IEMOP_HLP_MIN_386();
4965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4966
4967 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4968 * any way. AMD says it's "unused", whatever that means. We're
4969 * ignoring for now. */
4970 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4971 {
4972 /* register target */
4973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4974 IEM_MC_BEGIN(0, 0);
4975 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4976 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4977 } IEM_MC_ELSE() {
4978 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4979 } IEM_MC_ENDIF();
4980 IEM_MC_ADVANCE_RIP();
4981 IEM_MC_END();
4982 }
4983 else
4984 {
4985 /* memory target */
4986 IEM_MC_BEGIN(0, 1);
4987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4990 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4991 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4992 } IEM_MC_ELSE() {
4993 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4994 } IEM_MC_ENDIF();
4995 IEM_MC_ADVANCE_RIP();
4996 IEM_MC_END();
4997 }
4998 return VINF_SUCCESS;
4999}
5000
5001
5002/** Opcode 0x0f 0x9f. */
5003FNIEMOP_DEF(iemOp_setnle_Eb)
5004{
5005 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5006 IEMOP_HLP_MIN_386();
5007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5008
5009 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5010 * any way. AMD says it's "unused", whatever that means. We're
5011 * ignoring for now. */
5012 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5013 {
5014 /* register target */
5015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5016 IEM_MC_BEGIN(0, 0);
5017 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5018 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5019 } IEM_MC_ELSE() {
5020 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5021 } IEM_MC_ENDIF();
5022 IEM_MC_ADVANCE_RIP();
5023 IEM_MC_END();
5024 }
5025 else
5026 {
5027 /* memory target */
5028 IEM_MC_BEGIN(0, 1);
5029 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5032 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5033 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5034 } IEM_MC_ELSE() {
5035 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5036 } IEM_MC_ENDIF();
5037 IEM_MC_ADVANCE_RIP();
5038 IEM_MC_END();
5039 }
5040 return VINF_SUCCESS;
5041}
5042
5043
5044/**
5045 * Common 'push segment-register' helper.
5046 */
5047FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5048{
5049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5050 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5051 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5052
5053 switch (pVCpu->iem.s.enmEffOpSize)
5054 {
5055 case IEMMODE_16BIT:
5056 IEM_MC_BEGIN(0, 1);
5057 IEM_MC_LOCAL(uint16_t, u16Value);
5058 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5059 IEM_MC_PUSH_U16(u16Value);
5060 IEM_MC_ADVANCE_RIP();
5061 IEM_MC_END();
5062 break;
5063
5064 case IEMMODE_32BIT:
5065 IEM_MC_BEGIN(0, 1);
5066 IEM_MC_LOCAL(uint32_t, u32Value);
5067 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5068 IEM_MC_PUSH_U32_SREG(u32Value);
5069 IEM_MC_ADVANCE_RIP();
5070 IEM_MC_END();
5071 break;
5072
5073 case IEMMODE_64BIT:
5074 IEM_MC_BEGIN(0, 1);
5075 IEM_MC_LOCAL(uint64_t, u64Value);
5076 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5077 IEM_MC_PUSH_U64(u64Value);
5078 IEM_MC_ADVANCE_RIP();
5079 IEM_MC_END();
5080 break;
5081 }
5082
5083 return VINF_SUCCESS;
5084}
5085
5086
5087/** Opcode 0x0f 0xa0. */
5088FNIEMOP_DEF(iemOp_push_fs)
5089{
5090 IEMOP_MNEMONIC(push_fs, "push fs");
5091 IEMOP_HLP_MIN_386();
5092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5093 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5094}
5095
5096
5097/** Opcode 0x0f 0xa1. */
5098FNIEMOP_DEF(iemOp_pop_fs)
5099{
5100 IEMOP_MNEMONIC(pop_fs, "pop fs");
5101 IEMOP_HLP_MIN_386();
5102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5103 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5104}
5105
5106
5107/** Opcode 0x0f 0xa2. */
5108FNIEMOP_DEF(iemOp_cpuid)
5109{
5110 IEMOP_MNEMONIC(cpuid, "cpuid");
5111 IEMOP_HLP_MIN_486(); /* not all 486es. */
5112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5113 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5114}
5115
5116
5117/**
5118 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5119 * iemOp_bts_Ev_Gv.
5120 */
5121FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5122{
5123 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5124 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5125
5126 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5127 {
5128 /* register destination. */
5129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5130 switch (pVCpu->iem.s.enmEffOpSize)
5131 {
5132 case IEMMODE_16BIT:
5133 IEM_MC_BEGIN(3, 0);
5134 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5135 IEM_MC_ARG(uint16_t, u16Src, 1);
5136 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5137
5138 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5139 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5140 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5141 IEM_MC_REF_EFLAGS(pEFlags);
5142 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5143
5144 IEM_MC_ADVANCE_RIP();
5145 IEM_MC_END();
5146 return VINF_SUCCESS;
5147
5148 case IEMMODE_32BIT:
5149 IEM_MC_BEGIN(3, 0);
5150 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5151 IEM_MC_ARG(uint32_t, u32Src, 1);
5152 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5153
5154 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5155 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5156 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5157 IEM_MC_REF_EFLAGS(pEFlags);
5158 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5159
5160 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5161 IEM_MC_ADVANCE_RIP();
5162 IEM_MC_END();
5163 return VINF_SUCCESS;
5164
5165 case IEMMODE_64BIT:
5166 IEM_MC_BEGIN(3, 0);
5167 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5168 IEM_MC_ARG(uint64_t, u64Src, 1);
5169 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5170
5171 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5172 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5173 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5174 IEM_MC_REF_EFLAGS(pEFlags);
5175 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5176
5177 IEM_MC_ADVANCE_RIP();
5178 IEM_MC_END();
5179 return VINF_SUCCESS;
5180
5181 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5182 }
5183 }
5184 else
5185 {
5186 /* memory destination. */
5187
5188 uint32_t fAccess;
5189 if (pImpl->pfnLockedU16)
5190 fAccess = IEM_ACCESS_DATA_RW;
5191 else /* BT */
5192 fAccess = IEM_ACCESS_DATA_R;
5193
5194 /** @todo test negative bit offsets! */
5195 switch (pVCpu->iem.s.enmEffOpSize)
5196 {
5197 case IEMMODE_16BIT:
5198 IEM_MC_BEGIN(3, 2);
5199 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5200 IEM_MC_ARG(uint16_t, u16Src, 1);
5201 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5203 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5204
5205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5206 if (pImpl->pfnLockedU16)
5207 IEMOP_HLP_DONE_DECODING();
5208 else
5209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5210 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5211 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5212 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5213 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5214 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5215 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5216 IEM_MC_FETCH_EFLAGS(EFlags);
5217
5218 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5219 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5221 else
5222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5223 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5224
5225 IEM_MC_COMMIT_EFLAGS(EFlags);
5226 IEM_MC_ADVANCE_RIP();
5227 IEM_MC_END();
5228 return VINF_SUCCESS;
5229
5230 case IEMMODE_32BIT:
5231 IEM_MC_BEGIN(3, 2);
5232 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5233 IEM_MC_ARG(uint32_t, u32Src, 1);
5234 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5236 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5237
5238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5239 if (pImpl->pfnLockedU16)
5240 IEMOP_HLP_DONE_DECODING();
5241 else
5242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5243 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5244 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5245 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5246 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5247 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5248 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5249 IEM_MC_FETCH_EFLAGS(EFlags);
5250
5251 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5252 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5253 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5254 else
5255 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5256 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5257
5258 IEM_MC_COMMIT_EFLAGS(EFlags);
5259 IEM_MC_ADVANCE_RIP();
5260 IEM_MC_END();
5261 return VINF_SUCCESS;
5262
5263 case IEMMODE_64BIT:
5264 IEM_MC_BEGIN(3, 2);
5265 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5266 IEM_MC_ARG(uint64_t, u64Src, 1);
5267 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5269 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5270
5271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5272 if (pImpl->pfnLockedU16)
5273 IEMOP_HLP_DONE_DECODING();
5274 else
5275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5276 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5277 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5278 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5279 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5280 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5281 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5282 IEM_MC_FETCH_EFLAGS(EFlags);
5283
5284 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5285 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5286 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5287 else
5288 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5289 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5290
5291 IEM_MC_COMMIT_EFLAGS(EFlags);
5292 IEM_MC_ADVANCE_RIP();
5293 IEM_MC_END();
5294 return VINF_SUCCESS;
5295
5296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5297 }
5298 }
5299}
5300
5301
5302/** Opcode 0x0f 0xa3. */
5303FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5304{
5305 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5306 IEMOP_HLP_MIN_386();
5307 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5308}
5309
5310
5311/**
5312 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5313 */
5314FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5315{
5316 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5317 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5318
5319 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5320 {
5321 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5323
5324 switch (pVCpu->iem.s.enmEffOpSize)
5325 {
5326 case IEMMODE_16BIT:
5327 IEM_MC_BEGIN(4, 0);
5328 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5329 IEM_MC_ARG(uint16_t, u16Src, 1);
5330 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5331 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5332
5333 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5334 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5335 IEM_MC_REF_EFLAGS(pEFlags);
5336 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5337
5338 IEM_MC_ADVANCE_RIP();
5339 IEM_MC_END();
5340 return VINF_SUCCESS;
5341
5342 case IEMMODE_32BIT:
5343 IEM_MC_BEGIN(4, 0);
5344 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5345 IEM_MC_ARG(uint32_t, u32Src, 1);
5346 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5347 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5348
5349 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5350 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5351 IEM_MC_REF_EFLAGS(pEFlags);
5352 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5353
5354 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5355 IEM_MC_ADVANCE_RIP();
5356 IEM_MC_END();
5357 return VINF_SUCCESS;
5358
5359 case IEMMODE_64BIT:
5360 IEM_MC_BEGIN(4, 0);
5361 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5362 IEM_MC_ARG(uint64_t, u64Src, 1);
5363 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5364 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5365
5366 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5367 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5368 IEM_MC_REF_EFLAGS(pEFlags);
5369 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5370
5371 IEM_MC_ADVANCE_RIP();
5372 IEM_MC_END();
5373 return VINF_SUCCESS;
5374
5375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5376 }
5377 }
5378 else
5379 {
5380 switch (pVCpu->iem.s.enmEffOpSize)
5381 {
5382 case IEMMODE_16BIT:
5383 IEM_MC_BEGIN(4, 2);
5384 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5385 IEM_MC_ARG(uint16_t, u16Src, 1);
5386 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5387 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5389
5390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5391 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5392 IEM_MC_ASSIGN(cShiftArg, cShift);
5393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5394 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5395 IEM_MC_FETCH_EFLAGS(EFlags);
5396 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5397 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5398
5399 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5400 IEM_MC_COMMIT_EFLAGS(EFlags);
5401 IEM_MC_ADVANCE_RIP();
5402 IEM_MC_END();
5403 return VINF_SUCCESS;
5404
5405 case IEMMODE_32BIT:
5406 IEM_MC_BEGIN(4, 2);
5407 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5408 IEM_MC_ARG(uint32_t, u32Src, 1);
5409 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5410 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5412
5413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5414 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5415 IEM_MC_ASSIGN(cShiftArg, cShift);
5416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5417 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5418 IEM_MC_FETCH_EFLAGS(EFlags);
5419 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5420 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5421
5422 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5423 IEM_MC_COMMIT_EFLAGS(EFlags);
5424 IEM_MC_ADVANCE_RIP();
5425 IEM_MC_END();
5426 return VINF_SUCCESS;
5427
5428 case IEMMODE_64BIT:
5429 IEM_MC_BEGIN(4, 2);
5430 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5431 IEM_MC_ARG(uint64_t, u64Src, 1);
5432 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5433 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5435
5436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5437 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5438 IEM_MC_ASSIGN(cShiftArg, cShift);
5439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5440 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5441 IEM_MC_FETCH_EFLAGS(EFlags);
5442 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5443 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5444
5445 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5446 IEM_MC_COMMIT_EFLAGS(EFlags);
5447 IEM_MC_ADVANCE_RIP();
5448 IEM_MC_END();
5449 return VINF_SUCCESS;
5450
5451 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5452 }
5453 }
5454}
5455
5456
5457/**
5458 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5459 */
5460FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5461{
5462 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5463 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5464
5465 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5466 {
5467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5468
5469 switch (pVCpu->iem.s.enmEffOpSize)
5470 {
5471 case IEMMODE_16BIT:
5472 IEM_MC_BEGIN(4, 0);
5473 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5474 IEM_MC_ARG(uint16_t, u16Src, 1);
5475 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5476 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5477
5478 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5479 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5480 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5481 IEM_MC_REF_EFLAGS(pEFlags);
5482 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5483
5484 IEM_MC_ADVANCE_RIP();
5485 IEM_MC_END();
5486 return VINF_SUCCESS;
5487
5488 case IEMMODE_32BIT:
5489 IEM_MC_BEGIN(4, 0);
5490 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5491 IEM_MC_ARG(uint32_t, u32Src, 1);
5492 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5493 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5494
5495 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5496 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5497 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5498 IEM_MC_REF_EFLAGS(pEFlags);
5499 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5500
5501 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5502 IEM_MC_ADVANCE_RIP();
5503 IEM_MC_END();
5504 return VINF_SUCCESS;
5505
5506 case IEMMODE_64BIT:
5507 IEM_MC_BEGIN(4, 0);
5508 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5509 IEM_MC_ARG(uint64_t, u64Src, 1);
5510 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5511 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5512
5513 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5514 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5515 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5516 IEM_MC_REF_EFLAGS(pEFlags);
5517 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5518
5519 IEM_MC_ADVANCE_RIP();
5520 IEM_MC_END();
5521 return VINF_SUCCESS;
5522
5523 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5524 }
5525 }
5526 else
5527 {
5528 switch (pVCpu->iem.s.enmEffOpSize)
5529 {
5530 case IEMMODE_16BIT:
5531 IEM_MC_BEGIN(4, 2);
5532 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5533 IEM_MC_ARG(uint16_t, u16Src, 1);
5534 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5535 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5537
5538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5540 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5541 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5542 IEM_MC_FETCH_EFLAGS(EFlags);
5543 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5544 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5545
5546 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5547 IEM_MC_COMMIT_EFLAGS(EFlags);
5548 IEM_MC_ADVANCE_RIP();
5549 IEM_MC_END();
5550 return VINF_SUCCESS;
5551
5552 case IEMMODE_32BIT:
5553 IEM_MC_BEGIN(4, 2);
5554 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5555 IEM_MC_ARG(uint32_t, u32Src, 1);
5556 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5557 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5559
5560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5562 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5563 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5564 IEM_MC_FETCH_EFLAGS(EFlags);
5565 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5566 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5567
5568 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5569 IEM_MC_COMMIT_EFLAGS(EFlags);
5570 IEM_MC_ADVANCE_RIP();
5571 IEM_MC_END();
5572 return VINF_SUCCESS;
5573
5574 case IEMMODE_64BIT:
5575 IEM_MC_BEGIN(4, 2);
5576 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5577 IEM_MC_ARG(uint64_t, u64Src, 1);
5578 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5581
5582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5584 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5585 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5586 IEM_MC_FETCH_EFLAGS(EFlags);
5587 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5588 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5589
5590 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5591 IEM_MC_COMMIT_EFLAGS(EFlags);
5592 IEM_MC_ADVANCE_RIP();
5593 IEM_MC_END();
5594 return VINF_SUCCESS;
5595
5596 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5597 }
5598 }
5599}
5600
5601
5602
5603/** Opcode 0x0f 0xa4. */
5604FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5605{
5606 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5607 IEMOP_HLP_MIN_386();
5608 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5609}
5610
5611
5612/** Opcode 0x0f 0xa5. */
5613FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5614{
5615 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5616 IEMOP_HLP_MIN_386();
5617 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5618}
5619
5620
5621/** Opcode 0x0f 0xa8. */
5622FNIEMOP_DEF(iemOp_push_gs)
5623{
5624 IEMOP_MNEMONIC(push_gs, "push gs");
5625 IEMOP_HLP_MIN_386();
5626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5627 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5628}
5629
5630
5631/** Opcode 0x0f 0xa9. */
5632FNIEMOP_DEF(iemOp_pop_gs)
5633{
5634 IEMOP_MNEMONIC(pop_gs, "pop gs");
5635 IEMOP_HLP_MIN_386();
5636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5637 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5638}
5639
5640
5641/** Opcode 0x0f 0xaa. */
5642FNIEMOP_STUB(iemOp_rsm);
5643//IEMOP_HLP_MIN_386();
5644
5645
5646/** Opcode 0x0f 0xab. */
5647FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5648{
5649 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5650 IEMOP_HLP_MIN_386();
5651 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5652}
5653
5654
5655/** Opcode 0x0f 0xac. */
5656FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5657{
5658 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5659 IEMOP_HLP_MIN_386();
5660 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5661}
5662
5663
5664/** Opcode 0x0f 0xad. */
5665FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5666{
5667 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5668 IEMOP_HLP_MIN_386();
5669 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5670}
5671
5672
5673/** Opcode 0x0f 0xae mem/0. */
5674FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5675{
5676 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5677 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5678 return IEMOP_RAISE_INVALID_OPCODE();
5679
5680 IEM_MC_BEGIN(3, 1);
5681 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5682 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5683 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5686 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5687 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5688 IEM_MC_END();
5689 return VINF_SUCCESS;
5690}
5691
5692
5693/** Opcode 0x0f 0xae mem/1. */
5694FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5695{
5696 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5697 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5698 return IEMOP_RAISE_INVALID_OPCODE();
5699
5700 IEM_MC_BEGIN(3, 1);
5701 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5702 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5703 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5706 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5707 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5708 IEM_MC_END();
5709 return VINF_SUCCESS;
5710}
5711
5712
5713/** Opcode 0x0f 0xae mem/2. */
5714FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5715
5716/** Opcode 0x0f 0xae mem/3. */
5717FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5718
5719/** Opcode 0x0f 0xae mem/4. */
5720FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5721
5722/** Opcode 0x0f 0xae mem/5. */
5723FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5724
5725/** Opcode 0x0f 0xae mem/6. */
5726FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5727
5728/** Opcode 0x0f 0xae mem/7. */
5729FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5730
5731
5732/** Opcode 0x0f 0xae 11b/5. */
5733FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5734{
5735 RT_NOREF_PV(bRm);
5736 IEMOP_MNEMONIC(lfence, "lfence");
5737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5738 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5739 return IEMOP_RAISE_INVALID_OPCODE();
5740
5741 IEM_MC_BEGIN(0, 0);
5742 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5743 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5744 else
5745 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5746 IEM_MC_ADVANCE_RIP();
5747 IEM_MC_END();
5748 return VINF_SUCCESS;
5749}
5750
5751
5752/** Opcode 0x0f 0xae 11b/6. */
5753FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5754{
5755 RT_NOREF_PV(bRm);
5756 IEMOP_MNEMONIC(mfence, "mfence");
5757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5758 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5759 return IEMOP_RAISE_INVALID_OPCODE();
5760
5761 IEM_MC_BEGIN(0, 0);
5762 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5763 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5764 else
5765 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5766 IEM_MC_ADVANCE_RIP();
5767 IEM_MC_END();
5768 return VINF_SUCCESS;
5769}
5770
5771
5772/** Opcode 0x0f 0xae 11b/7. */
5773FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5774{
5775 RT_NOREF_PV(bRm);
5776 IEMOP_MNEMONIC(sfence, "sfence");
5777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5778 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5779 return IEMOP_RAISE_INVALID_OPCODE();
5780
5781 IEM_MC_BEGIN(0, 0);
5782 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5783 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5784 else
5785 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5786 IEM_MC_ADVANCE_RIP();
5787 IEM_MC_END();
5788 return VINF_SUCCESS;
5789}
5790
5791
5792/** Opcode 0xf3 0x0f 0xae 11b/0. */
5793FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5794
5795/** Opcode 0xf3 0x0f 0xae 11b/1. */
5796FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5797
5798/** Opcode 0xf3 0x0f 0xae 11b/2. */
5799FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5800
5801/** Opcode 0xf3 0x0f 0xae 11b/3. */
5802FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5803
5804
5805/** Opcode 0x0f 0xae. */
5806FNIEMOP_DEF(iemOp_Grp15)
5807{
5808/** @todo continue here tomorrow! (see bs3-cpu-decoding-1.c32 r113507). */
5809 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5810 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5811 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5812 {
5813 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5814 {
5815 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5816 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5817 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5818 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5819 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5820 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5821 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5822 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5823 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5824 }
5825 }
5826 else
5827 {
5828 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5829 {
5830 case 0:
5831 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5832 {
5833 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5834 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5835 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5836 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5837 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5838 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5839 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5840 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5841 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5842 }
5843 break;
5844
5845 case IEM_OP_PRF_REPZ:
5846 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5847 {
5848 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5849 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5850 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5851 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5852 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5853 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5854 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5855 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5856 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5857 }
5858 break;
5859
5860 default:
5861 return IEMOP_RAISE_INVALID_OPCODE();
5862 }
5863 }
5864}
5865
5866
5867/** Opcode 0x0f 0xaf. */
5868FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5869{
5870 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5871 IEMOP_HLP_MIN_386();
5872 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5873 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5874}
5875
5876
5877/** Opcode 0x0f 0xb0. */
5878FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5879{
5880 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5881 IEMOP_HLP_MIN_486();
5882 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5883
5884 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5885 {
5886 IEMOP_HLP_DONE_DECODING();
5887 IEM_MC_BEGIN(4, 0);
5888 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5889 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5890 IEM_MC_ARG(uint8_t, u8Src, 2);
5891 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5892
5893 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5894 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5895 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5896 IEM_MC_REF_EFLAGS(pEFlags);
5897 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5898 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5899 else
5900 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5901
5902 IEM_MC_ADVANCE_RIP();
5903 IEM_MC_END();
5904 }
5905 else
5906 {
5907 IEM_MC_BEGIN(4, 3);
5908 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5909 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5910 IEM_MC_ARG(uint8_t, u8Src, 2);
5911 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5912 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5913 IEM_MC_LOCAL(uint8_t, u8Al);
5914
5915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5916 IEMOP_HLP_DONE_DECODING();
5917 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5918 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5919 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5920 IEM_MC_FETCH_EFLAGS(EFlags);
5921 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5922 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5923 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5924 else
5925 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5926
5927 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5928 IEM_MC_COMMIT_EFLAGS(EFlags);
5929 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5930 IEM_MC_ADVANCE_RIP();
5931 IEM_MC_END();
5932 }
5933 return VINF_SUCCESS;
5934}
5935
5936/** Opcode 0x0f 0xb1. */
5937FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5938{
5939 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
5940 IEMOP_HLP_MIN_486();
5941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5942
5943 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5944 {
5945 IEMOP_HLP_DONE_DECODING();
5946 switch (pVCpu->iem.s.enmEffOpSize)
5947 {
5948 case IEMMODE_16BIT:
5949 IEM_MC_BEGIN(4, 0);
5950 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5951 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5952 IEM_MC_ARG(uint16_t, u16Src, 2);
5953 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5954
5955 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5956 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5957 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5958 IEM_MC_REF_EFLAGS(pEFlags);
5959 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5960 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5961 else
5962 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5963
5964 IEM_MC_ADVANCE_RIP();
5965 IEM_MC_END();
5966 return VINF_SUCCESS;
5967
5968 case IEMMODE_32BIT:
5969 IEM_MC_BEGIN(4, 0);
5970 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5971 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5972 IEM_MC_ARG(uint32_t, u32Src, 2);
5973 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5974
5975 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5976 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5977 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5978 IEM_MC_REF_EFLAGS(pEFlags);
5979 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5980 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5981 else
5982 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5983
5984 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5985 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5986 IEM_MC_ADVANCE_RIP();
5987 IEM_MC_END();
5988 return VINF_SUCCESS;
5989
5990 case IEMMODE_64BIT:
5991 IEM_MC_BEGIN(4, 0);
5992 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5993 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5994#ifdef RT_ARCH_X86
5995 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5996#else
5997 IEM_MC_ARG(uint64_t, u64Src, 2);
5998#endif
5999 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6000
6001 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6002 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6003 IEM_MC_REF_EFLAGS(pEFlags);
6004#ifdef RT_ARCH_X86
6005 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6006 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6007 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6008 else
6009 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6010#else
6011 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6012 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6013 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6014 else
6015 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6016#endif
6017
6018 IEM_MC_ADVANCE_RIP();
6019 IEM_MC_END();
6020 return VINF_SUCCESS;
6021
6022 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6023 }
6024 }
6025 else
6026 {
6027 switch (pVCpu->iem.s.enmEffOpSize)
6028 {
6029 case IEMMODE_16BIT:
6030 IEM_MC_BEGIN(4, 3);
6031 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6032 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6033 IEM_MC_ARG(uint16_t, u16Src, 2);
6034 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6036 IEM_MC_LOCAL(uint16_t, u16Ax);
6037
6038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6039 IEMOP_HLP_DONE_DECODING();
6040 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6041 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6042 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6043 IEM_MC_FETCH_EFLAGS(EFlags);
6044 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6045 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6046 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6047 else
6048 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6049
6050 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6051 IEM_MC_COMMIT_EFLAGS(EFlags);
6052 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6053 IEM_MC_ADVANCE_RIP();
6054 IEM_MC_END();
6055 return VINF_SUCCESS;
6056
6057 case IEMMODE_32BIT:
6058 IEM_MC_BEGIN(4, 3);
6059 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6060 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6061 IEM_MC_ARG(uint32_t, u32Src, 2);
6062 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6063 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6064 IEM_MC_LOCAL(uint32_t, u32Eax);
6065
6066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6067 IEMOP_HLP_DONE_DECODING();
6068 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6069 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6070 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6071 IEM_MC_FETCH_EFLAGS(EFlags);
6072 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6073 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6074 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6075 else
6076 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6077
6078 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6079 IEM_MC_COMMIT_EFLAGS(EFlags);
6080 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6081 IEM_MC_ADVANCE_RIP();
6082 IEM_MC_END();
6083 return VINF_SUCCESS;
6084
6085 case IEMMODE_64BIT:
6086 IEM_MC_BEGIN(4, 3);
6087 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6088 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6089#ifdef RT_ARCH_X86
6090 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6091#else
6092 IEM_MC_ARG(uint64_t, u64Src, 2);
6093#endif
6094 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6096 IEM_MC_LOCAL(uint64_t, u64Rax);
6097
6098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6099 IEMOP_HLP_DONE_DECODING();
6100 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6101 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6102 IEM_MC_FETCH_EFLAGS(EFlags);
6103 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6104#ifdef RT_ARCH_X86
6105 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6106 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6107 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6108 else
6109 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6110#else
6111 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6112 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6113 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6114 else
6115 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6116#endif
6117
6118 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6119 IEM_MC_COMMIT_EFLAGS(EFlags);
6120 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6121 IEM_MC_ADVANCE_RIP();
6122 IEM_MC_END();
6123 return VINF_SUCCESS;
6124
6125 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6126 }
6127 }
6128}
6129
6130
6131FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6132{
6133 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6134 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6135
6136 switch (pVCpu->iem.s.enmEffOpSize)
6137 {
6138 case IEMMODE_16BIT:
6139 IEM_MC_BEGIN(5, 1);
6140 IEM_MC_ARG(uint16_t, uSel, 0);
6141 IEM_MC_ARG(uint16_t, offSeg, 1);
6142 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6143 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6144 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6145 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6148 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6149 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6150 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6151 IEM_MC_END();
6152 return VINF_SUCCESS;
6153
6154 case IEMMODE_32BIT:
6155 IEM_MC_BEGIN(5, 1);
6156 IEM_MC_ARG(uint16_t, uSel, 0);
6157 IEM_MC_ARG(uint32_t, offSeg, 1);
6158 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6159 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6160 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6161 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6164 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6165 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6166 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6167 IEM_MC_END();
6168 return VINF_SUCCESS;
6169
6170 case IEMMODE_64BIT:
6171 IEM_MC_BEGIN(5, 1);
6172 IEM_MC_ARG(uint16_t, uSel, 0);
6173 IEM_MC_ARG(uint64_t, offSeg, 1);
6174 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6175 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6176 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6177 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6180 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6181 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6182 else
6183 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6184 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6185 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6186 IEM_MC_END();
6187 return VINF_SUCCESS;
6188
6189 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6190 }
6191}
6192
6193
6194/** Opcode 0x0f 0xb2. */
6195FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6196{
6197 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6198 IEMOP_HLP_MIN_386();
6199 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6200 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6201 return IEMOP_RAISE_INVALID_OPCODE();
6202 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6203}
6204
6205
6206/** Opcode 0x0f 0xb3. */
6207FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6208{
6209 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6210 IEMOP_HLP_MIN_386();
6211 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6212}
6213
6214
6215/** Opcode 0x0f 0xb4. */
6216FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6217{
6218 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6219 IEMOP_HLP_MIN_386();
6220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6221 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6222 return IEMOP_RAISE_INVALID_OPCODE();
6223 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6224}
6225
6226
6227/** Opcode 0x0f 0xb5. */
6228FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6229{
6230 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6231 IEMOP_HLP_MIN_386();
6232 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6233 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6234 return IEMOP_RAISE_INVALID_OPCODE();
6235 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6236}
6237
6238
6239/** Opcode 0x0f 0xb6. */
6240FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6241{
6242 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6243 IEMOP_HLP_MIN_386();
6244
6245 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6246
6247 /*
6248 * If rm is denoting a register, no more instruction bytes.
6249 */
6250 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6251 {
6252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6253 switch (pVCpu->iem.s.enmEffOpSize)
6254 {
6255 case IEMMODE_16BIT:
6256 IEM_MC_BEGIN(0, 1);
6257 IEM_MC_LOCAL(uint16_t, u16Value);
6258 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6259 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6260 IEM_MC_ADVANCE_RIP();
6261 IEM_MC_END();
6262 return VINF_SUCCESS;
6263
6264 case IEMMODE_32BIT:
6265 IEM_MC_BEGIN(0, 1);
6266 IEM_MC_LOCAL(uint32_t, u32Value);
6267 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6268 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6269 IEM_MC_ADVANCE_RIP();
6270 IEM_MC_END();
6271 return VINF_SUCCESS;
6272
6273 case IEMMODE_64BIT:
6274 IEM_MC_BEGIN(0, 1);
6275 IEM_MC_LOCAL(uint64_t, u64Value);
6276 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6277 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6278 IEM_MC_ADVANCE_RIP();
6279 IEM_MC_END();
6280 return VINF_SUCCESS;
6281
6282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6283 }
6284 }
6285 else
6286 {
6287 /*
6288 * We're loading a register from memory.
6289 */
6290 switch (pVCpu->iem.s.enmEffOpSize)
6291 {
6292 case IEMMODE_16BIT:
6293 IEM_MC_BEGIN(0, 2);
6294 IEM_MC_LOCAL(uint16_t, u16Value);
6295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6298 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6299 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6300 IEM_MC_ADVANCE_RIP();
6301 IEM_MC_END();
6302 return VINF_SUCCESS;
6303
6304 case IEMMODE_32BIT:
6305 IEM_MC_BEGIN(0, 2);
6306 IEM_MC_LOCAL(uint32_t, u32Value);
6307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6310 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6311 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6312 IEM_MC_ADVANCE_RIP();
6313 IEM_MC_END();
6314 return VINF_SUCCESS;
6315
6316 case IEMMODE_64BIT:
6317 IEM_MC_BEGIN(0, 2);
6318 IEM_MC_LOCAL(uint64_t, u64Value);
6319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6322 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6323 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6324 IEM_MC_ADVANCE_RIP();
6325 IEM_MC_END();
6326 return VINF_SUCCESS;
6327
6328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6329 }
6330 }
6331}
6332
6333
6334/** Opcode 0x0f 0xb7. */
6335FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6336{
6337 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6338 IEMOP_HLP_MIN_386();
6339
6340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6341
6342 /** @todo Not entirely sure how the operand size prefix is handled here,
6343 * assuming that it will be ignored. Would be nice to have a few
6344 * test for this. */
6345 /*
6346 * If rm is denoting a register, no more instruction bytes.
6347 */
6348 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6349 {
6350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6351 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6352 {
6353 IEM_MC_BEGIN(0, 1);
6354 IEM_MC_LOCAL(uint32_t, u32Value);
6355 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6356 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6357 IEM_MC_ADVANCE_RIP();
6358 IEM_MC_END();
6359 }
6360 else
6361 {
6362 IEM_MC_BEGIN(0, 1);
6363 IEM_MC_LOCAL(uint64_t, u64Value);
6364 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6365 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6366 IEM_MC_ADVANCE_RIP();
6367 IEM_MC_END();
6368 }
6369 }
6370 else
6371 {
6372 /*
6373 * We're loading a register from memory.
6374 */
6375 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6376 {
6377 IEM_MC_BEGIN(0, 2);
6378 IEM_MC_LOCAL(uint32_t, u32Value);
6379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6382 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6383 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6384 IEM_MC_ADVANCE_RIP();
6385 IEM_MC_END();
6386 }
6387 else
6388 {
6389 IEM_MC_BEGIN(0, 2);
6390 IEM_MC_LOCAL(uint64_t, u64Value);
6391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6394 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6395 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6396 IEM_MC_ADVANCE_RIP();
6397 IEM_MC_END();
6398 }
6399 }
6400 return VINF_SUCCESS;
6401}
6402
6403
6404/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6405FNIEMOP_UD_STUB(iemOp_jmpe);
6406/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6407FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6408
6409
6410/** Opcode 0x0f 0xb9. */
6411FNIEMOP_DEF(iemOp_Grp10)
6412{
6413 Log(("iemOp_Grp10 -> #UD\n"));
6414 return IEMOP_RAISE_INVALID_OPCODE();
6415}
6416
6417
6418/** Opcode 0x0f 0xba. */
6419FNIEMOP_DEF(iemOp_Grp8)
6420{
6421 IEMOP_HLP_MIN_386();
6422 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6423 PCIEMOPBINSIZES pImpl;
6424 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6425 {
6426 case 0: case 1: case 2: case 3:
6427 return IEMOP_RAISE_INVALID_OPCODE();
6428 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6429 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6430 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6431 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6433 }
6434 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6435
6436 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6437 {
6438 /* register destination. */
6439 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6441
6442 switch (pVCpu->iem.s.enmEffOpSize)
6443 {
6444 case IEMMODE_16BIT:
6445 IEM_MC_BEGIN(3, 0);
6446 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6447 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6449
6450 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6451 IEM_MC_REF_EFLAGS(pEFlags);
6452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6453
6454 IEM_MC_ADVANCE_RIP();
6455 IEM_MC_END();
6456 return VINF_SUCCESS;
6457
6458 case IEMMODE_32BIT:
6459 IEM_MC_BEGIN(3, 0);
6460 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6461 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6462 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6463
6464 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6465 IEM_MC_REF_EFLAGS(pEFlags);
6466 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6467
6468 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6469 IEM_MC_ADVANCE_RIP();
6470 IEM_MC_END();
6471 return VINF_SUCCESS;
6472
6473 case IEMMODE_64BIT:
6474 IEM_MC_BEGIN(3, 0);
6475 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6476 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6477 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6478
6479 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6480 IEM_MC_REF_EFLAGS(pEFlags);
6481 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6482
6483 IEM_MC_ADVANCE_RIP();
6484 IEM_MC_END();
6485 return VINF_SUCCESS;
6486
6487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6488 }
6489 }
6490 else
6491 {
6492 /* memory destination. */
6493
6494 uint32_t fAccess;
6495 if (pImpl->pfnLockedU16)
6496 fAccess = IEM_ACCESS_DATA_RW;
6497 else /* BT */
6498 fAccess = IEM_ACCESS_DATA_R;
6499
6500 /** @todo test negative bit offsets! */
6501 switch (pVCpu->iem.s.enmEffOpSize)
6502 {
6503 case IEMMODE_16BIT:
6504 IEM_MC_BEGIN(3, 1);
6505 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6506 IEM_MC_ARG(uint16_t, u16Src, 1);
6507 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6509
6510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6511 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6512 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6513 if (pImpl->pfnLockedU16)
6514 IEMOP_HLP_DONE_DECODING();
6515 else
6516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6517 IEM_MC_FETCH_EFLAGS(EFlags);
6518 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6519 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6520 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6521 else
6522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6523 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6524
6525 IEM_MC_COMMIT_EFLAGS(EFlags);
6526 IEM_MC_ADVANCE_RIP();
6527 IEM_MC_END();
6528 return VINF_SUCCESS;
6529
6530 case IEMMODE_32BIT:
6531 IEM_MC_BEGIN(3, 1);
6532 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6533 IEM_MC_ARG(uint32_t, u32Src, 1);
6534 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6536
6537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6538 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6539 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6540 if (pImpl->pfnLockedU16)
6541 IEMOP_HLP_DONE_DECODING();
6542 else
6543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6544 IEM_MC_FETCH_EFLAGS(EFlags);
6545 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6546 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6547 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6548 else
6549 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6550 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6551
6552 IEM_MC_COMMIT_EFLAGS(EFlags);
6553 IEM_MC_ADVANCE_RIP();
6554 IEM_MC_END();
6555 return VINF_SUCCESS;
6556
6557 case IEMMODE_64BIT:
6558 IEM_MC_BEGIN(3, 1);
6559 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6560 IEM_MC_ARG(uint64_t, u64Src, 1);
6561 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6563
6564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6565 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6566 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6567 if (pImpl->pfnLockedU16)
6568 IEMOP_HLP_DONE_DECODING();
6569 else
6570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6571 IEM_MC_FETCH_EFLAGS(EFlags);
6572 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6573 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6574 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6575 else
6576 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6577 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6578
6579 IEM_MC_COMMIT_EFLAGS(EFlags);
6580 IEM_MC_ADVANCE_RIP();
6581 IEM_MC_END();
6582 return VINF_SUCCESS;
6583
6584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6585 }
6586 }
6587
6588}
6589
6590
6591/** Opcode 0x0f 0xbb. */
6592FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6593{
6594 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6595 IEMOP_HLP_MIN_386();
6596 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6597}
6598
6599
6600/** Opcode 0x0f 0xbc. */
6601FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6602{
6603 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6604 IEMOP_HLP_MIN_386();
6605 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6606 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6607}
6608
6609
6610/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6611FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6612
6613
6614/** Opcode 0x0f 0xbd. */
6615FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6616{
6617 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6618 IEMOP_HLP_MIN_386();
6619 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6620 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6621}
6622
6623
6624/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6625FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6626
6627
6628/** Opcode 0x0f 0xbe. */
6629FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6630{
6631 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6632 IEMOP_HLP_MIN_386();
6633
6634 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6635
6636 /*
6637 * If rm is denoting a register, no more instruction bytes.
6638 */
6639 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6640 {
6641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6642 switch (pVCpu->iem.s.enmEffOpSize)
6643 {
6644 case IEMMODE_16BIT:
6645 IEM_MC_BEGIN(0, 1);
6646 IEM_MC_LOCAL(uint16_t, u16Value);
6647 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6648 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6649 IEM_MC_ADVANCE_RIP();
6650 IEM_MC_END();
6651 return VINF_SUCCESS;
6652
6653 case IEMMODE_32BIT:
6654 IEM_MC_BEGIN(0, 1);
6655 IEM_MC_LOCAL(uint32_t, u32Value);
6656 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6657 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6658 IEM_MC_ADVANCE_RIP();
6659 IEM_MC_END();
6660 return VINF_SUCCESS;
6661
6662 case IEMMODE_64BIT:
6663 IEM_MC_BEGIN(0, 1);
6664 IEM_MC_LOCAL(uint64_t, u64Value);
6665 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6666 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6667 IEM_MC_ADVANCE_RIP();
6668 IEM_MC_END();
6669 return VINF_SUCCESS;
6670
6671 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6672 }
6673 }
6674 else
6675 {
6676 /*
6677 * We're loading a register from memory.
6678 */
6679 switch (pVCpu->iem.s.enmEffOpSize)
6680 {
6681 case IEMMODE_16BIT:
6682 IEM_MC_BEGIN(0, 2);
6683 IEM_MC_LOCAL(uint16_t, u16Value);
6684 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6687 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6688 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6689 IEM_MC_ADVANCE_RIP();
6690 IEM_MC_END();
6691 return VINF_SUCCESS;
6692
6693 case IEMMODE_32BIT:
6694 IEM_MC_BEGIN(0, 2);
6695 IEM_MC_LOCAL(uint32_t, u32Value);
6696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6699 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6700 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6701 IEM_MC_ADVANCE_RIP();
6702 IEM_MC_END();
6703 return VINF_SUCCESS;
6704
6705 case IEMMODE_64BIT:
6706 IEM_MC_BEGIN(0, 2);
6707 IEM_MC_LOCAL(uint64_t, u64Value);
6708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6711 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6712 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6713 IEM_MC_ADVANCE_RIP();
6714 IEM_MC_END();
6715 return VINF_SUCCESS;
6716
6717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6718 }
6719 }
6720}
6721
6722
6723/** Opcode 0x0f 0xbf. */
6724FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6725{
6726 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6727 IEMOP_HLP_MIN_386();
6728
6729 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6730
6731 /** @todo Not entirely sure how the operand size prefix is handled here,
6732 * assuming that it will be ignored. Would be nice to have a few
6733 * test for this. */
6734 /*
6735 * If rm is denoting a register, no more instruction bytes.
6736 */
6737 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6738 {
6739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6740 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6741 {
6742 IEM_MC_BEGIN(0, 1);
6743 IEM_MC_LOCAL(uint32_t, u32Value);
6744 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6745 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6746 IEM_MC_ADVANCE_RIP();
6747 IEM_MC_END();
6748 }
6749 else
6750 {
6751 IEM_MC_BEGIN(0, 1);
6752 IEM_MC_LOCAL(uint64_t, u64Value);
6753 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6754 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6755 IEM_MC_ADVANCE_RIP();
6756 IEM_MC_END();
6757 }
6758 }
6759 else
6760 {
6761 /*
6762 * We're loading a register from memory.
6763 */
6764 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6765 {
6766 IEM_MC_BEGIN(0, 2);
6767 IEM_MC_LOCAL(uint32_t, u32Value);
6768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6771 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6772 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6773 IEM_MC_ADVANCE_RIP();
6774 IEM_MC_END();
6775 }
6776 else
6777 {
6778 IEM_MC_BEGIN(0, 2);
6779 IEM_MC_LOCAL(uint64_t, u64Value);
6780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6783 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6784 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6785 IEM_MC_ADVANCE_RIP();
6786 IEM_MC_END();
6787 }
6788 }
6789 return VINF_SUCCESS;
6790}
6791
6792
6793/** Opcode 0x0f 0xc0. */
6794FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6795{
6796 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6797 IEMOP_HLP_MIN_486();
6798 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6799
6800 /*
6801 * If rm is denoting a register, no more instruction bytes.
6802 */
6803 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6804 {
6805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6806
6807 IEM_MC_BEGIN(3, 0);
6808 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6809 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6810 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6811
6812 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6813 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6814 IEM_MC_REF_EFLAGS(pEFlags);
6815 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6816
6817 IEM_MC_ADVANCE_RIP();
6818 IEM_MC_END();
6819 }
6820 else
6821 {
6822 /*
6823 * We're accessing memory.
6824 */
6825 IEM_MC_BEGIN(3, 3);
6826 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6827 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6828 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6829 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6831
6832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6833 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6834 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6835 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6836 IEM_MC_FETCH_EFLAGS(EFlags);
6837 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6838 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6839 else
6840 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6841
6842 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6843 IEM_MC_COMMIT_EFLAGS(EFlags);
6844 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6845 IEM_MC_ADVANCE_RIP();
6846 IEM_MC_END();
6847 return VINF_SUCCESS;
6848 }
6849 return VINF_SUCCESS;
6850}
6851
6852
6853/** Opcode 0x0f 0xc1. */
6854FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6855{
6856 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6857 IEMOP_HLP_MIN_486();
6858 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6859
6860 /*
6861 * If rm is denoting a register, no more instruction bytes.
6862 */
6863 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6864 {
6865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6866
6867 switch (pVCpu->iem.s.enmEffOpSize)
6868 {
6869 case IEMMODE_16BIT:
6870 IEM_MC_BEGIN(3, 0);
6871 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6872 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6873 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6874
6875 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6876 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6877 IEM_MC_REF_EFLAGS(pEFlags);
6878 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6879
6880 IEM_MC_ADVANCE_RIP();
6881 IEM_MC_END();
6882 return VINF_SUCCESS;
6883
6884 case IEMMODE_32BIT:
6885 IEM_MC_BEGIN(3, 0);
6886 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6887 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6888 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6889
6890 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6891 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6892 IEM_MC_REF_EFLAGS(pEFlags);
6893 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6894
6895 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6896 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6897 IEM_MC_ADVANCE_RIP();
6898 IEM_MC_END();
6899 return VINF_SUCCESS;
6900
6901 case IEMMODE_64BIT:
6902 IEM_MC_BEGIN(3, 0);
6903 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6904 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6905 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6906
6907 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6908 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6909 IEM_MC_REF_EFLAGS(pEFlags);
6910 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6911
6912 IEM_MC_ADVANCE_RIP();
6913 IEM_MC_END();
6914 return VINF_SUCCESS;
6915
6916 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6917 }
6918 }
6919 else
6920 {
6921 /*
6922 * We're accessing memory.
6923 */
6924 switch (pVCpu->iem.s.enmEffOpSize)
6925 {
6926 case IEMMODE_16BIT:
6927 IEM_MC_BEGIN(3, 3);
6928 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6929 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6930 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6931 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6933
6934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6935 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6936 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6937 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6938 IEM_MC_FETCH_EFLAGS(EFlags);
6939 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6940 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6941 else
6942 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6943
6944 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6945 IEM_MC_COMMIT_EFLAGS(EFlags);
6946 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
6947 IEM_MC_ADVANCE_RIP();
6948 IEM_MC_END();
6949 return VINF_SUCCESS;
6950
6951 case IEMMODE_32BIT:
6952 IEM_MC_BEGIN(3, 3);
6953 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6954 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6955 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6956 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6958
6959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6960 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6961 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6962 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6963 IEM_MC_FETCH_EFLAGS(EFlags);
6964 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6965 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6966 else
6967 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6968
6969 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6970 IEM_MC_COMMIT_EFLAGS(EFlags);
6971 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
6972 IEM_MC_ADVANCE_RIP();
6973 IEM_MC_END();
6974 return VINF_SUCCESS;
6975
6976 case IEMMODE_64BIT:
6977 IEM_MC_BEGIN(3, 3);
6978 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6979 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6980 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6981 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6982 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6983
6984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6985 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6986 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6987 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6988 IEM_MC_FETCH_EFLAGS(EFlags);
6989 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6990 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6991 else
6992 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6993
6994 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6995 IEM_MC_COMMIT_EFLAGS(EFlags);
6996 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
6997 IEM_MC_ADVANCE_RIP();
6998 IEM_MC_END();
6999 return VINF_SUCCESS;
7000
7001 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7002 }
7003 }
7004}
7005
7006
7007/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
7008FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
7009/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
7010FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
7011/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
7012FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
7013/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
7014FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
7015
7016
7017/** Opcode 0x0f 0xc3. */
7018FNIEMOP_DEF(iemOp_movnti_My_Gy)
7019{
7020 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7021
7022 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7023
7024 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7025 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7026 {
7027 switch (pVCpu->iem.s.enmEffOpSize)
7028 {
7029 case IEMMODE_32BIT:
7030 IEM_MC_BEGIN(0, 2);
7031 IEM_MC_LOCAL(uint32_t, u32Value);
7032 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7033
7034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7036 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7037 return IEMOP_RAISE_INVALID_OPCODE();
7038
7039 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7040 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7041 IEM_MC_ADVANCE_RIP();
7042 IEM_MC_END();
7043 break;
7044
7045 case IEMMODE_64BIT:
7046 IEM_MC_BEGIN(0, 2);
7047 IEM_MC_LOCAL(uint64_t, u64Value);
7048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7049
7050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7052 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7053 return IEMOP_RAISE_INVALID_OPCODE();
7054
7055 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7056 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7057 IEM_MC_ADVANCE_RIP();
7058 IEM_MC_END();
7059 break;
7060
7061 case IEMMODE_16BIT:
7062 /** @todo check this form. */
7063 return IEMOP_RAISE_INVALID_OPCODE();
7064 }
7065 }
7066 else
7067 return IEMOP_RAISE_INVALID_OPCODE();
7068 return VINF_SUCCESS;
7069}
7070/* Opcode 0x66 0x0f 0xc3 - invalid */
7071/* Opcode 0xf3 0x0f 0xc3 - invalid */
7072/* Opcode 0xf2 0x0f 0xc3 - invalid */
7073
7074/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
7075FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7076/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
7077FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
7078/* Opcode 0xf3 0x0f 0xc4 - invalid */
7079/* Opcode 0xf2 0x0f 0xc4 - invalid */
7080
7081/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7082FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7083/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
7084FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
7085/* Opcode 0xf3 0x0f 0xc5 - invalid */
7086/* Opcode 0xf2 0x0f 0xc5 - invalid */
7087
7088/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
7089FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
7090/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
7091FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
7092/* Opcode 0xf3 0x0f 0xc6 - invalid */
7093/* Opcode 0xf2 0x0f 0xc6 - invalid */
7094
7095
7096/** Opcode 0x0f 0xc7 !11/1. */
7097FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7098{
7099 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7100
7101 IEM_MC_BEGIN(4, 3);
7102 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7103 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7104 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7105 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7106 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7107 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7109
7110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7111 IEMOP_HLP_DONE_DECODING();
7112 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7113
7114 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7115 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7116 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7117
7118 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7119 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7120 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7121
7122 IEM_MC_FETCH_EFLAGS(EFlags);
7123 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7124 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7125 else
7126 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7127
7128 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7129 IEM_MC_COMMIT_EFLAGS(EFlags);
7130 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7131 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7132 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7133 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7134 IEM_MC_ENDIF();
7135 IEM_MC_ADVANCE_RIP();
7136
7137 IEM_MC_END();
7138 return VINF_SUCCESS;
7139}
7140
7141
7142/** Opcode REX.W 0x0f 0xc7 !11/1. */
7143FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7144{
7145 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7146 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7147 {
7148#if 0
7149 RT_NOREF(bRm);
7150 IEMOP_BITCH_ABOUT_STUB();
7151 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7152#else
7153 IEM_MC_BEGIN(4, 3);
7154 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7155 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7156 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7157 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7158 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7159 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7161
7162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7163 IEMOP_HLP_DONE_DECODING();
7164 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7165 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7166
7167 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7168 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7169 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7170
7171 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7172 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7173 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7174
7175 IEM_MC_FETCH_EFLAGS(EFlags);
7176# ifdef RT_ARCH_AMD64
7177 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7178 {
7179 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7180 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7181 else
7182 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7183 }
7184 else
7185# endif
7186 {
7187 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7188 accesses and not all all atomic, which works fine on in UNI CPU guest
7189 configuration (ignoring DMA). If guest SMP is active we have no choice
7190 but to use a rendezvous callback here. Sigh. */
7191 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7192 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7193 else
7194 {
7195 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7196 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7197 }
7198 }
7199
7200 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7201 IEM_MC_COMMIT_EFLAGS(EFlags);
7202 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7203 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7204 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7205 IEM_MC_ENDIF();
7206 IEM_MC_ADVANCE_RIP();
7207
7208 IEM_MC_END();
7209 return VINF_SUCCESS;
7210#endif
7211 }
7212 Log(("cmpxchg16b -> #UD\n"));
7213 return IEMOP_RAISE_INVALID_OPCODE();
7214}
7215
7216
7217/** Opcode 0x0f 0xc7 11/6. */
7218FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7219
7220/** Opcode 0x0f 0xc7 !11/6. */
7221FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7222
7223/** Opcode 0x66 0x0f 0xc7 !11/6. */
7224FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7225
7226/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7227FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7228
7229/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7230FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7231
7232
7233/** Opcode 0x0f 0xc7. */
7234FNIEMOP_DEF(iemOp_Grp9)
7235{
7236 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
7237 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7238 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7239 {
7240 case 0: case 2: case 3: case 4: case 5:
7241 return IEMOP_RAISE_INVALID_OPCODE();
7242 case 1:
7243 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
7244 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
7245 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
7246 return IEMOP_RAISE_INVALID_OPCODE();
7247 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7248 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7249 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7250 case 6:
7251 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7252 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
7253 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7254 {
7255 case 0:
7256 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
7257 case IEM_OP_PRF_SIZE_OP:
7258 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
7259 case IEM_OP_PRF_REPZ:
7260 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
7261 default:
7262 return IEMOP_RAISE_INVALID_OPCODE();
7263 }
7264 case 7:
7265 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7266 {
7267 case 0:
7268 case IEM_OP_PRF_REPZ:
7269 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
7270 default:
7271 return IEMOP_RAISE_INVALID_OPCODE();
7272 }
7273 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7274 }
7275}
7276
7277
7278/**
7279 * Common 'bswap register' helper.
7280 */
7281FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7282{
7283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7284 switch (pVCpu->iem.s.enmEffOpSize)
7285 {
7286 case IEMMODE_16BIT:
7287 IEM_MC_BEGIN(1, 0);
7288 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7289 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7290 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7291 IEM_MC_ADVANCE_RIP();
7292 IEM_MC_END();
7293 return VINF_SUCCESS;
7294
7295 case IEMMODE_32BIT:
7296 IEM_MC_BEGIN(1, 0);
7297 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7298 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7299 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7300 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7301 IEM_MC_ADVANCE_RIP();
7302 IEM_MC_END();
7303 return VINF_SUCCESS;
7304
7305 case IEMMODE_64BIT:
7306 IEM_MC_BEGIN(1, 0);
7307 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7308 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7309 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7310 IEM_MC_ADVANCE_RIP();
7311 IEM_MC_END();
7312 return VINF_SUCCESS;
7313
7314 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7315 }
7316}
7317
7318
7319/** Opcode 0x0f 0xc8. */
7320FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7321{
7322 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7323 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7324 prefix. REX.B is the correct prefix it appears. For a parallel
7325 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7326 IEMOP_HLP_MIN_486();
7327 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7328}
7329
7330
7331/** Opcode 0x0f 0xc9. */
7332FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7333{
7334 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7335 IEMOP_HLP_MIN_486();
7336 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7337}
7338
7339
7340/** Opcode 0x0f 0xca. */
7341FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7342{
7343 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7344 IEMOP_HLP_MIN_486();
7345 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7346}
7347
7348
7349/** Opcode 0x0f 0xcb. */
7350FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7351{
7352 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7353 IEMOP_HLP_MIN_486();
7354 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7355}
7356
7357
7358/** Opcode 0x0f 0xcc. */
7359FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7360{
7361 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7362 IEMOP_HLP_MIN_486();
7363 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7364}
7365
7366
7367/** Opcode 0x0f 0xcd. */
7368FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7369{
7370 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7371 IEMOP_HLP_MIN_486();
7372 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7373}
7374
7375
7376/** Opcode 0x0f 0xce. */
7377FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7378{
7379 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7380 IEMOP_HLP_MIN_486();
7381 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7382}
7383
7384
7385/** Opcode 0x0f 0xcf. */
7386FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7387{
7388 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7389 IEMOP_HLP_MIN_486();
7390 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7391}
7392
7393
7394/* Opcode 0x0f 0xd0 - invalid */
7395/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7396FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7397/* Opcode 0xf3 0x0f 0xd0 - invalid */
7398/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7399FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7400
7401/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7402FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7403/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7404FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7405/* Opcode 0xf3 0x0f 0xd1 - invalid */
7406/* Opcode 0xf2 0x0f 0xd1 - invalid */
7407
7408/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7409FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7410/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7411FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7412/* Opcode 0xf3 0x0f 0xd2 - invalid */
7413/* Opcode 0xf2 0x0f 0xd2 - invalid */
7414
7415/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7416FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7417/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7418FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7419/* Opcode 0xf3 0x0f 0xd3 - invalid */
7420/* Opcode 0xf2 0x0f 0xd3 - invalid */
7421
7422/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7423FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7424/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7425FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7426/* Opcode 0xf3 0x0f 0xd4 - invalid */
7427/* Opcode 0xf2 0x0f 0xd4 - invalid */
7428
7429/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7430FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7431/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7432FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7433/* Opcode 0xf3 0x0f 0xd5 - invalid */
7434/* Opcode 0xf2 0x0f 0xd5 - invalid */
7435
7436/* Opcode 0x0f 0xd6 - invalid */
7437/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7438FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7439/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7440FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7441/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7442FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7443#if 0
7444FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7445{
7446 /* Docs says register only. */
7447 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7448
7449 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7450 {
7451 case IEM_OP_PRF_SIZE_OP: /* SSE */
7452 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7453 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7454 IEM_MC_BEGIN(2, 0);
7455 IEM_MC_ARG(uint64_t *, pDst, 0);
7456 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7457 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7458 IEM_MC_PREPARE_SSE_USAGE();
7459 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7460 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7461 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7462 IEM_MC_ADVANCE_RIP();
7463 IEM_MC_END();
7464 return VINF_SUCCESS;
7465
7466 case 0: /* MMX */
7467 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7468 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7469 IEM_MC_BEGIN(2, 0);
7470 IEM_MC_ARG(uint64_t *, pDst, 0);
7471 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7472 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7473 IEM_MC_PREPARE_FPU_USAGE();
7474 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7475 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7476 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7477 IEM_MC_ADVANCE_RIP();
7478 IEM_MC_END();
7479 return VINF_SUCCESS;
7480
7481 default:
7482 return IEMOP_RAISE_INVALID_OPCODE();
7483 }
7484}
7485#endif
7486
7487
7488/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7489FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7490{
7491 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7492 /** @todo testcase: Check that the instruction implicitly clears the high
7493 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7494 * and opcode modifications are made to work with the whole width (not
7495 * just 128). */
7496 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
7497 /* Docs says register only. */
7498 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7499 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7500 {
7501 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7502 IEM_MC_BEGIN(2, 0);
7503 IEM_MC_ARG(uint64_t *, pDst, 0);
7504 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7505 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7506 IEM_MC_PREPARE_FPU_USAGE();
7507 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7508 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7509 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7510 IEM_MC_ADVANCE_RIP();
7511 IEM_MC_END();
7512 return VINF_SUCCESS;
7513 }
7514 return IEMOP_RAISE_INVALID_OPCODE();
7515}
7516
7517/** Opcode 0x66 0x0f 0xd7 - */
7518FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
7519{
7520 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7521 /** @todo testcase: Check that the instruction implicitly clears the high
7522 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7523 * and opcode modifications are made to work with the whole width (not
7524 * just 128). */
7525 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
7526 /* Docs says register only. */
7527 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7528 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7529 {
7530 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7531 IEM_MC_BEGIN(2, 0);
7532 IEM_MC_ARG(uint64_t *, pDst, 0);
7533 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7534 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7535 IEM_MC_PREPARE_SSE_USAGE();
7536 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7537 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7538 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7539 IEM_MC_ADVANCE_RIP();
7540 IEM_MC_END();
7541 return VINF_SUCCESS;
7542 }
7543 return IEMOP_RAISE_INVALID_OPCODE();
7544}
7545
7546/* Opcode 0xf3 0x0f 0xd7 - invalid */
7547/* Opcode 0xf2 0x0f 0xd7 - invalid */
7548
7549
7550/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7551FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7552/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7553FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7554/* Opcode 0xf3 0x0f 0xd8 - invalid */
7555/* Opcode 0xf2 0x0f 0xd8 - invalid */
7556
7557/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7558FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7559/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7560FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7561/* Opcode 0xf3 0x0f 0xd9 - invalid */
7562/* Opcode 0xf2 0x0f 0xd9 - invalid */
7563
7564/** Opcode 0x0f 0xda - pminub Pq, Qq */
7565FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7566/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7567FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7568/* Opcode 0xf3 0x0f 0xda - invalid */
7569/* Opcode 0xf2 0x0f 0xda - invalid */
7570
7571/** Opcode 0x0f 0xdb - pand Pq, Qq */
7572FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7573/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7574FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7575/* Opcode 0xf3 0x0f 0xdb - invalid */
7576/* Opcode 0xf2 0x0f 0xdb - invalid */
7577
7578/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7579FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7580/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7581FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7582/* Opcode 0xf3 0x0f 0xdc - invalid */
7583/* Opcode 0xf2 0x0f 0xdc - invalid */
7584
7585/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7586FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7587/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7588FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7589/* Opcode 0xf3 0x0f 0xdd - invalid */
7590/* Opcode 0xf2 0x0f 0xdd - invalid */
7591
7592/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7593FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7594/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7595FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7596/* Opcode 0xf3 0x0f 0xde - invalid */
7597/* Opcode 0xf2 0x0f 0xde - invalid */
7598
7599/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7600FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7601/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7602FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7603/* Opcode 0xf3 0x0f 0xdf - invalid */
7604/* Opcode 0xf2 0x0f 0xdf - invalid */
7605
7606/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7607FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7608/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7609FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7610/* Opcode 0xf3 0x0f 0xe0 - invalid */
7611/* Opcode 0xf2 0x0f 0xe0 - invalid */
7612
7613/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7614FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7615/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7616FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7617/* Opcode 0xf3 0x0f 0xe1 - invalid */
7618/* Opcode 0xf2 0x0f 0xe1 - invalid */
7619
7620/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7621FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7622/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7623FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7624/* Opcode 0xf3 0x0f 0xe2 - invalid */
7625/* Opcode 0xf2 0x0f 0xe2 - invalid */
7626
7627/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7628FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7629/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7630FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7631/* Opcode 0xf3 0x0f 0xe3 - invalid */
7632/* Opcode 0xf2 0x0f 0xe3 - invalid */
7633
7634/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7635FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7636/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7637FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7638/* Opcode 0xf3 0x0f 0xe4 - invalid */
7639/* Opcode 0xf2 0x0f 0xe4 - invalid */
7640
7641/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7642FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7643/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7644FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7645/* Opcode 0xf3 0x0f 0xe5 - invalid */
7646/* Opcode 0xf2 0x0f 0xe5 - invalid */
7647
7648/* Opcode 0x0f 0xe6 - invalid */
7649/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7650FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7651/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7652FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7653/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7654FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7655
7656
7657/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
7658FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
7659{
7660 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7661 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7662 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7663 {
7664 /* Register, memory. */
7665 IEM_MC_BEGIN(0, 2);
7666 IEM_MC_LOCAL(uint64_t, uSrc);
7667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7668
7669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7671 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7672 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7673
7674 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7675 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7676
7677 IEM_MC_ADVANCE_RIP();
7678 IEM_MC_END();
7679 return VINF_SUCCESS;
7680 }
7681 /* The register, register encoding is invalid. */
7682 return IEMOP_RAISE_INVALID_OPCODE();
7683}
7684
7685/** Opcode 0x66 0x0f 0xe7 - vmovntdq Mx, Vx */
7686FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
7687{
7688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7689 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7690 {
7691 /* Register, memory. */
7692 IEMOP_MNEMONIC(vmovntdq_Mx_Vx, "vmovntdq Mx,Vx");
7693 IEM_MC_BEGIN(0, 2);
7694 IEM_MC_LOCAL(uint128_t, uSrc);
7695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7696
7697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7699 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7700 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7701
7702 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7703 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7704
7705 IEM_MC_ADVANCE_RIP();
7706 IEM_MC_END();
7707 return VINF_SUCCESS;
7708 }
7709
7710 /* The register, register encoding is invalid. */
7711 return IEMOP_RAISE_INVALID_OPCODE();
7712}
7713
7714/* Opcode 0xf3 0x0f 0xe7 - invalid */
7715/* Opcode 0xf2 0x0f 0xe7 - invalid */
7716
7717
7718/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7719FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7720/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7721FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7722/* Opcode 0xf3 0x0f 0xe8 - invalid */
7723/* Opcode 0xf2 0x0f 0xe8 - invalid */
7724
7725/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7726FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7727/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7728FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7729/* Opcode 0xf3 0x0f 0xe9 - invalid */
7730/* Opcode 0xf2 0x0f 0xe9 - invalid */
7731
7732/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7733FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7734/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7735FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7736/* Opcode 0xf3 0x0f 0xea - invalid */
7737/* Opcode 0xf2 0x0f 0xea - invalid */
7738
7739/** Opcode 0x0f 0xeb - por Pq, Qq */
7740FNIEMOP_STUB(iemOp_por_Pq_Qq);
7741/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7742FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7743/* Opcode 0xf3 0x0f 0xeb - invalid */
7744/* Opcode 0xf2 0x0f 0xeb - invalid */
7745
7746/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7747FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7748/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7749FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7750/* Opcode 0xf3 0x0f 0xec - invalid */
7751/* Opcode 0xf2 0x0f 0xec - invalid */
7752
7753/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7754FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7755/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7756FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7757/* Opcode 0xf3 0x0f 0xed - invalid */
7758/* Opcode 0xf2 0x0f 0xed - invalid */
7759
7760/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7761FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7762/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7763FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7764/* Opcode 0xf3 0x0f 0xee - invalid */
7765/* Opcode 0xf2 0x0f 0xee - invalid */
7766
7767
7768/** Opcode 0x0f 0xef - pxor Pq, Qq */
7769FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
7770{
7771 IEMOP_MNEMONIC(pxor, "pxor");
7772 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
7773}
7774
7775/** Opcode 0x66 0x0f 0xef - vpxor Vx, Hx, Wx */
7776FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
7777{
7778 IEMOP_MNEMONIC(vpxor, "vpxor");
7779 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7780}
7781
7782/* Opcode 0xf3 0x0f 0xef - invalid */
7783/* Opcode 0xf2 0x0f 0xef - invalid */
7784
7785/* Opcode 0x0f 0xf0 - invalid */
7786/* Opcode 0x66 0x0f 0xf0 - invalid */
7787/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7788FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7789
7790/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7791FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7792/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7793FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7794/* Opcode 0xf2 0x0f 0xf1 - invalid */
7795
7796/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7797FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7798/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7799FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7800/* Opcode 0xf2 0x0f 0xf2 - invalid */
7801
7802/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7803FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7804/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7805FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7806/* Opcode 0xf2 0x0f 0xf3 - invalid */
7807
7808/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7809FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7810/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7811FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7812/* Opcode 0xf2 0x0f 0xf4 - invalid */
7813
7814/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7815FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7816/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7817FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7818/* Opcode 0xf2 0x0f 0xf5 - invalid */
7819
7820/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7821FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7822/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7823FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7824/* Opcode 0xf2 0x0f 0xf6 - invalid */
7825
7826/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7827FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7828/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7829FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7830/* Opcode 0xf2 0x0f 0xf7 - invalid */
7831
7832/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7833FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7834/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7835FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7836/* Opcode 0xf2 0x0f 0xf8 - invalid */
7837
7838/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7839FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7840/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7841FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7842/* Opcode 0xf2 0x0f 0xf9 - invalid */
7843
7844/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7845FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7846/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7847FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7848/* Opcode 0xf2 0x0f 0xfa - invalid */
7849
7850/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7851FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7852/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7853FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7854/* Opcode 0xf2 0x0f 0xfb - invalid */
7855
7856/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7857FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7858/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7859FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7860/* Opcode 0xf2 0x0f 0xfc - invalid */
7861
7862/** Opcode 0x0f 0xfd - paddw Pq, Qq */
7863FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
7864/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
7865FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
7866/* Opcode 0xf2 0x0f 0xfd - invalid */
7867
7868/** Opcode 0x0f 0xfe - paddd Pq, Qq */
7869FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
7870/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
7871FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
7872/* Opcode 0xf2 0x0f 0xfe - invalid */
7873
7874
7875/** Opcode **** 0x0f 0xff - UD0 */
7876FNIEMOP_DEF(iemOp_ud0)
7877{
7878 IEMOP_MNEMONIC(ud0, "ud0");
7879 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7880 {
7881 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7882#ifndef TST_IEM_CHECK_MC
7883 RTGCPTR GCPtrEff;
7884 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
7885 if (rcStrict != VINF_SUCCESS)
7886 return rcStrict;
7887#endif
7888 IEMOP_HLP_DONE_DECODING();
7889 }
7890 return IEMOP_RAISE_INVALID_OPCODE();
7891}
7892
7893
7894
7895/**
7896 * Two byte opcode map, first byte 0x0f.
7897 *
7898 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
7899 * check if it needs updating as well when making changes.
7900 */
7901IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
7902{
7903 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7904 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
7905 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
7906 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
7907 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
7908 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
7909 /* 0x05 */ IEMOP_X4(iemOp_syscall),
7910 /* 0x06 */ IEMOP_X4(iemOp_clts),
7911 /* 0x07 */ IEMOP_X4(iemOp_sysret),
7912 /* 0x08 */ IEMOP_X4(iemOp_invd),
7913 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
7914 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
7915 /* 0x0b */ IEMOP_X4(iemOp_ud2),
7916 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
7917 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
7918 /* 0x0e */ IEMOP_X4(iemOp_femms),
7919 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
7920
7921 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7922 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7923 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7924 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7925 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7926 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7927 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7928 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7929 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
7930 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
7931 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
7932 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
7933 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
7934 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
7935 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
7936 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
7937
7938 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
7939 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
7940 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
7941 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
7942 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
7943 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7944 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
7945 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7946 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7947 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7948 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7949 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7950 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7951 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7952 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7953 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7954
7955 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
7956 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
7957 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
7958 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
7959 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
7960 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
7961 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
7962 /* 0x37 */ IEMOP_X4(iemOp_getsec),
7963 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
7964 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7965 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
7966 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7967 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7968 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7969 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7970 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7971
7972 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
7973 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
7974 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
7975 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
7976 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
7977 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
7978 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
7979 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
7980 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
7981 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
7982 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
7983 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
7984 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
7985 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
7986 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
7987 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
7988
7989 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7990 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
7991 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7992 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7993 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7994 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7995 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7996 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7997 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
7998 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
7999 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8000 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8001 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8002 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8003 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8004 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8005
8006 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8007 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8008 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8009 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8010 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8011 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8012 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8013 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8014 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8015 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8016 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8017 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8018 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8019 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8020 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8021 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8022
8023 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8024 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8025 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8026 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8027 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8028 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8029 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8030 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8031
8032 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8033 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8034 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8035 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8036 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8037 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8038 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
8039 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
8040
8041 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8042 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8043 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8044 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8045 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8046 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8047 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8048 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8049 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8050 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8051 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8052 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8053 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8054 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8055 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8056 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8057
8058 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8059 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8060 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8061 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8062 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8063 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8064 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8065 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8066 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8067 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8068 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8069 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8070 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8071 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8072 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8073 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8074
8075 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8076 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8077 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8078 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8079 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8080 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8081 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8082 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8083 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8084 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8085 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8086 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8087 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8088 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8089 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8090 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8091
8092 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8093 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8094 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8095 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8096 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8097 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8098 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8099 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8100 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8101 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8102 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8103 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8104 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8105 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8106 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8107 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8108
8109 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8110 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8111 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8112 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8113 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8114 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8115 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8116 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8117 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8118 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8119 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8120 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8121 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8122 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8123 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8124 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8125
8126 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8127 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8128 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8129 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8130 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8131 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8132 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8133 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8134 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8135 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8136 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8137 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8138 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8139 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8140 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8141 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8142
8143 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8144 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8145 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8146 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8147 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8148 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8149 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8150 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8151 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8152 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8153 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8154 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8155 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8156 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8157 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8158 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8159
8160 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8161 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8162 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8163 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8164 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8165 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8166 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8167 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8168 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8169 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8170 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8171 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8172 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8173 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8174 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8175 /* 0xff */ IEMOP_X4(iemOp_ud0),
8176};
8177AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8178
8179
8180/**
8181 * VEX opcode map \#1.
8182 *
8183 * @remarks This is (currently) a subset of g_apfnTwoByteMap, so please check if
8184 * it it needs updating too when making changes.
8185 */
8186IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
8187{
8188 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8189 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
8190 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
8191 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
8192 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
8193 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
8194 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
8195 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
8196 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
8197 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
8198 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
8199 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
8200 /* 0x0b */ IEMOP_X4(iemOp_InvalidNeedRM),
8201 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
8202 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
8203 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
8204 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
8205
8206 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
8207 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
8208 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
8209 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8210 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8211 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8212 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
8213 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8214 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
8215 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
8216 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
8217 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
8218 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
8219 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
8220 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
8221 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
8222
8223 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
8224 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
8225 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
8226 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
8227 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
8228 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
8229 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
8230 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
8231 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8232 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8233 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
8234 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8235 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
8236 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
8237 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8238 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8239
8240 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
8241 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
8242 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
8243 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
8244 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
8245 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
8246 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
8247 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
8248 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8249 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8250 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8251 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8252 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8253 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8254 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8255 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8256
8257 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
8258 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
8259 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
8260 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
8261 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
8262 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
8263 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
8264 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
8265 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
8266 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
8267 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
8268 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
8269 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
8270 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
8271 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
8272 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
8273
8274 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8275 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8276 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8277 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8278 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8279 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8280 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8281 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8282 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8283 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8284 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8285 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8286 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8287 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8288 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8289 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8290
8291 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8292 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8293 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8294 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8295 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8296 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8297 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8298 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8299 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8300 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8301 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8302 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8303 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8304 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8305 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8306 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8307
8308 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8309 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_Grp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8310 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_Grp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8311 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_Grp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8312 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8313 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8314 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8315 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8316 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
8317 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
8318 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
8319 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
8320 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8321 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8322 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
8323 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
8324
8325 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
8326 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
8327 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
8328 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
8329 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
8330 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
8331 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
8332 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
8333 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
8334 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
8335 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
8336 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
8337 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
8338 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
8339 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
8340 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
8341 IEMOP_X4(iemOp_InvalidNeedRM),
8342 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
8343 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
8344 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
8345 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
8346 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
8347 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
8348 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
8349 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
8350 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
8351 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
8352 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
8353 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
8354 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
8355 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
8356 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
8357 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
8358
8359 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8360 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8361 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8362 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8363 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8364 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8365 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8366 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8367 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8368 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8369 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
8370 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
8371 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
8372 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
8373 /* 0xae */ IEMOP_X4(iemOp_Grp15), /** @todo groups and vex */
8374 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
8375
8376 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8377 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8378 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8379 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8380 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8381 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8382 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8383 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8384 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8385 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8386 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
8387 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
8388 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
8389 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
8390 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
8391 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
8392
8393 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8394 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8395 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8396 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8397 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8398 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8399 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8400 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8401 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8402 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8403 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
8404 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
8405 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
8406 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
8407 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
8408 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
8409
8410 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8411 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8412 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8413 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8414 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8415 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8416 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8417 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8418 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8419 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8420 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8421 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8422 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8423 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8424 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8425 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8426
8427 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8428 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8429 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8430 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8431 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8432 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8433 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8434 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8435 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8436 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8437 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8438 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8439 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8440 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8441 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8442 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8443
8444 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8445 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8446 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8447 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8448 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8449 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8450 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8451 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8452 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8453 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8454 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8455 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8456 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8457 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8458 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8459 /* 0xff */ IEMOP_X4(iemOp_ud0),
8460};
8461AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8462/** @} */
8463
8464
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette