VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66309

Last change on this file since 66309 was 66309, checked in by vboxsync, 8 years ago

IEM: Implemented movlps Vq,Mq and movhlps Vq,Uq (0f 12).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 312.8 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66309 2017-03-28 15:35:12Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/** @name ..... opcodes.
26 *
27 * @{
28 */
29
30/** @} */
31
32
33/** @name Two byte opcodes (first byte 0x0f).
34 *
35 * @{
36 */
37
38/** Opcode 0x0f 0x00 /0. */
39FNIEMOPRM_DEF(iemOp_Grp6_sldt)
40{
41 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
42 IEMOP_HLP_MIN_286();
43 IEMOP_HLP_NO_REAL_OR_V86_MODE();
44
45 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
46 {
47 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
48 switch (pVCpu->iem.s.enmEffOpSize)
49 {
50 case IEMMODE_16BIT:
51 IEM_MC_BEGIN(0, 1);
52 IEM_MC_LOCAL(uint16_t, u16Ldtr);
53 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
54 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
55 IEM_MC_ADVANCE_RIP();
56 IEM_MC_END();
57 break;
58
59 case IEMMODE_32BIT:
60 IEM_MC_BEGIN(0, 1);
61 IEM_MC_LOCAL(uint32_t, u32Ldtr);
62 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
63 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
64 IEM_MC_ADVANCE_RIP();
65 IEM_MC_END();
66 break;
67
68 case IEMMODE_64BIT:
69 IEM_MC_BEGIN(0, 1);
70 IEM_MC_LOCAL(uint64_t, u64Ldtr);
71 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
72 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
73 IEM_MC_ADVANCE_RIP();
74 IEM_MC_END();
75 break;
76
77 IEM_NOT_REACHED_DEFAULT_CASE_RET();
78 }
79 }
80 else
81 {
82 IEM_MC_BEGIN(0, 2);
83 IEM_MC_LOCAL(uint16_t, u16Ldtr);
84 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
85 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
86 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
87 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
88 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
89 IEM_MC_ADVANCE_RIP();
90 IEM_MC_END();
91 }
92 return VINF_SUCCESS;
93}
94
95
96/** Opcode 0x0f 0x00 /1. */
97FNIEMOPRM_DEF(iemOp_Grp6_str)
98{
99 IEMOP_MNEMONIC(str, "str Rv/Mw");
100 IEMOP_HLP_MIN_286();
101 IEMOP_HLP_NO_REAL_OR_V86_MODE();
102
103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
104 {
105 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
106 switch (pVCpu->iem.s.enmEffOpSize)
107 {
108 case IEMMODE_16BIT:
109 IEM_MC_BEGIN(0, 1);
110 IEM_MC_LOCAL(uint16_t, u16Tr);
111 IEM_MC_FETCH_TR_U16(u16Tr);
112 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 break;
116
117 case IEMMODE_32BIT:
118 IEM_MC_BEGIN(0, 1);
119 IEM_MC_LOCAL(uint32_t, u32Tr);
120 IEM_MC_FETCH_TR_U32(u32Tr);
121 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
122 IEM_MC_ADVANCE_RIP();
123 IEM_MC_END();
124 break;
125
126 case IEMMODE_64BIT:
127 IEM_MC_BEGIN(0, 1);
128 IEM_MC_LOCAL(uint64_t, u64Tr);
129 IEM_MC_FETCH_TR_U64(u64Tr);
130 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
131 IEM_MC_ADVANCE_RIP();
132 IEM_MC_END();
133 break;
134
135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
136 }
137 }
138 else
139 {
140 IEM_MC_BEGIN(0, 2);
141 IEM_MC_LOCAL(uint16_t, u16Tr);
142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
144 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
145 IEM_MC_FETCH_TR_U16(u16Tr);
146 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
147 IEM_MC_ADVANCE_RIP();
148 IEM_MC_END();
149 }
150 return VINF_SUCCESS;
151}
152
153
154/** Opcode 0x0f 0x00 /2. */
155FNIEMOPRM_DEF(iemOp_Grp6_lldt)
156{
157 IEMOP_MNEMONIC(lldt, "lldt Ew");
158 IEMOP_HLP_MIN_286();
159 IEMOP_HLP_NO_REAL_OR_V86_MODE();
160
161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
162 {
163 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
164 IEM_MC_BEGIN(1, 0);
165 IEM_MC_ARG(uint16_t, u16Sel, 0);
166 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
167 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
168 IEM_MC_END();
169 }
170 else
171 {
172 IEM_MC_BEGIN(1, 1);
173 IEM_MC_ARG(uint16_t, u16Sel, 0);
174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
176 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
177 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
178 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
179 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
180 IEM_MC_END();
181 }
182 return VINF_SUCCESS;
183}
184
185
186/** Opcode 0x0f 0x00 /3. */
187FNIEMOPRM_DEF(iemOp_Grp6_ltr)
188{
189 IEMOP_MNEMONIC(ltr, "ltr Ew");
190 IEMOP_HLP_MIN_286();
191 IEMOP_HLP_NO_REAL_OR_V86_MODE();
192
193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
194 {
195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
196 IEM_MC_BEGIN(1, 0);
197 IEM_MC_ARG(uint16_t, u16Sel, 0);
198 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
199 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
200 IEM_MC_END();
201 }
202 else
203 {
204 IEM_MC_BEGIN(1, 1);
205 IEM_MC_ARG(uint16_t, u16Sel, 0);
206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
209 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
210 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
211 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
212 IEM_MC_END();
213 }
214 return VINF_SUCCESS;
215}
216
217
218/** Opcode 0x0f 0x00 /3. */
219FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
220{
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
223
224 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
225 {
226 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
227 IEM_MC_BEGIN(2, 0);
228 IEM_MC_ARG(uint16_t, u16Sel, 0);
229 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
230 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
231 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
232 IEM_MC_END();
233 }
234 else
235 {
236 IEM_MC_BEGIN(2, 1);
237 IEM_MC_ARG(uint16_t, u16Sel, 0);
238 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
241 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
242 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
243 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
244 IEM_MC_END();
245 }
246 return VINF_SUCCESS;
247}
248
249
250/** Opcode 0x0f 0x00 /4. */
251FNIEMOPRM_DEF(iemOp_Grp6_verr)
252{
253 IEMOP_MNEMONIC(verr, "verr Ew");
254 IEMOP_HLP_MIN_286();
255 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
256}
257
258
259/** Opcode 0x0f 0x00 /5. */
260FNIEMOPRM_DEF(iemOp_Grp6_verw)
261{
262 IEMOP_MNEMONIC(verw, "verw Ew");
263 IEMOP_HLP_MIN_286();
264 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
265}
266
267
268/**
269 * Group 6 jump table.
270 */
271IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
272{
273 iemOp_Grp6_sldt,
274 iemOp_Grp6_str,
275 iemOp_Grp6_lldt,
276 iemOp_Grp6_ltr,
277 iemOp_Grp6_verr,
278 iemOp_Grp6_verw,
279 iemOp_InvalidWithRM,
280 iemOp_InvalidWithRM
281};
282
283/** Opcode 0x0f 0x00. */
284FNIEMOP_DEF(iemOp_Grp6)
285{
286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
287 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
288}
289
290
291/** Opcode 0x0f 0x01 /0. */
292FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
293{
294 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
295 IEMOP_HLP_MIN_286();
296 IEMOP_HLP_64BIT_OP_SIZE();
297 IEM_MC_BEGIN(2, 1);
298 IEM_MC_ARG(uint8_t, iEffSeg, 0);
299 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
302 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
303 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
304 IEM_MC_END();
305 return VINF_SUCCESS;
306}
307
308
309/** Opcode 0x0f 0x01 /0. */
310FNIEMOP_DEF(iemOp_Grp7_vmcall)
311{
312 IEMOP_BITCH_ABOUT_STUB();
313 return IEMOP_RAISE_INVALID_OPCODE();
314}
315
316
317/** Opcode 0x0f 0x01 /0. */
318FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
319{
320 IEMOP_BITCH_ABOUT_STUB();
321 return IEMOP_RAISE_INVALID_OPCODE();
322}
323
324
325/** Opcode 0x0f 0x01 /0. */
326FNIEMOP_DEF(iemOp_Grp7_vmresume)
327{
328 IEMOP_BITCH_ABOUT_STUB();
329 return IEMOP_RAISE_INVALID_OPCODE();
330}
331
332
333/** Opcode 0x0f 0x01 /0. */
334FNIEMOP_DEF(iemOp_Grp7_vmxoff)
335{
336 IEMOP_BITCH_ABOUT_STUB();
337 return IEMOP_RAISE_INVALID_OPCODE();
338}
339
340
341/** Opcode 0x0f 0x01 /1. */
342FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
343{
344 IEMOP_MNEMONIC(sidt, "sidt Ms");
345 IEMOP_HLP_MIN_286();
346 IEMOP_HLP_64BIT_OP_SIZE();
347 IEM_MC_BEGIN(2, 1);
348 IEM_MC_ARG(uint8_t, iEffSeg, 0);
349 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
352 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
353 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
354 IEM_MC_END();
355 return VINF_SUCCESS;
356}
357
358
359/** Opcode 0x0f 0x01 /1. */
360FNIEMOP_DEF(iemOp_Grp7_monitor)
361{
362 IEMOP_MNEMONIC(monitor, "monitor");
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
364 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
365}
366
367
368/** Opcode 0x0f 0x01 /1. */
369FNIEMOP_DEF(iemOp_Grp7_mwait)
370{
371 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
374}
375
376
377/** Opcode 0x0f 0x01 /2. */
378FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
379{
380 IEMOP_MNEMONIC(lgdt, "lgdt");
381 IEMOP_HLP_64BIT_OP_SIZE();
382 IEM_MC_BEGIN(3, 1);
383 IEM_MC_ARG(uint8_t, iEffSeg, 0);
384 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
385 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
389 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
390 IEM_MC_END();
391 return VINF_SUCCESS;
392}
393
394
395/** Opcode 0x0f 0x01 0xd0. */
396FNIEMOP_DEF(iemOp_Grp7_xgetbv)
397{
398 IEMOP_MNEMONIC(xgetbv, "xgetbv");
399 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
400 {
401 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
402 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
403 }
404 return IEMOP_RAISE_INVALID_OPCODE();
405}
406
407
408/** Opcode 0x0f 0x01 0xd1. */
409FNIEMOP_DEF(iemOp_Grp7_xsetbv)
410{
411 IEMOP_MNEMONIC(xsetbv, "xsetbv");
412 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
413 {
414 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
416 }
417 return IEMOP_RAISE_INVALID_OPCODE();
418}
419
420
421/** Opcode 0x0f 0x01 /3. */
422FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
423{
424 IEMOP_MNEMONIC(lidt, "lidt");
425 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
426 ? IEMMODE_64BIT
427 : pVCpu->iem.s.enmEffOpSize;
428 IEM_MC_BEGIN(3, 1);
429 IEM_MC_ARG(uint8_t, iEffSeg, 0);
430 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
431 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
434 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
435 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
436 IEM_MC_END();
437 return VINF_SUCCESS;
438}
439
440
441#ifdef VBOX_WITH_NESTED_HWVIRT
442/** Opcode 0x0f 0x01 0xd8. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
444{
445 IEMOP_MNEMONIC(vmrun, "vmrun");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
447}
448
449/** Opcode 0x0f 0x01 0xd9. */
450FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
451{
452 IEMOP_MNEMONIC(vmmcall, "vmmcall");
453 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
454}
455
456
457/** Opcode 0x0f 0x01 0xda. */
458FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
459{
460 IEMOP_MNEMONIC(vmload, "vmload");
461 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
462}
463
464
465/** Opcode 0x0f 0x01 0xdb. */
466FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
467{
468 IEMOP_MNEMONIC(vmsave, "vmsave");
469 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
470}
471
472
473/** Opcode 0x0f 0x01 0xdc. */
474FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
475{
476 IEMOP_MNEMONIC(stgi, "stgi");
477 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
478}
479
480
481/** Opcode 0x0f 0x01 0xdd. */
482FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
483{
484 IEMOP_MNEMONIC(clgi, "clgi");
485 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
486}
487
488
489/** Opcode 0x0f 0x01 0xdf. */
490FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
491{
492 IEMOP_MNEMONIC(invlpga, "invlpga");
493 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
494}
495#else
496/** Opcode 0x0f 0x01 0xd8. */
497FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
498
499/** Opcode 0x0f 0x01 0xd9. */
500FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
501
502/** Opcode 0x0f 0x01 0xda. */
503FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
504
505/** Opcode 0x0f 0x01 0xdb. */
506FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
507
508/** Opcode 0x0f 0x01 0xdc. */
509FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
510
511/** Opcode 0x0f 0x01 0xdd. */
512FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
513
514/** Opcode 0x0f 0x01 0xdf. */
515FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
516#endif /* VBOX_WITH_NESTED_HWVIRT */
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520
521/** Opcode 0x0f 0x01 /4. */
522FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
523{
524 IEMOP_MNEMONIC(smsw, "smsw");
525 IEMOP_HLP_MIN_286();
526 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
527 {
528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
529 switch (pVCpu->iem.s.enmEffOpSize)
530 {
531 case IEMMODE_16BIT:
532 IEM_MC_BEGIN(0, 1);
533 IEM_MC_LOCAL(uint16_t, u16Tmp);
534 IEM_MC_FETCH_CR0_U16(u16Tmp);
535 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
536 { /* likely */ }
537 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
538 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
539 else
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
541 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
542 IEM_MC_ADVANCE_RIP();
543 IEM_MC_END();
544 return VINF_SUCCESS;
545
546 case IEMMODE_32BIT:
547 IEM_MC_BEGIN(0, 1);
548 IEM_MC_LOCAL(uint32_t, u32Tmp);
549 IEM_MC_FETCH_CR0_U32(u32Tmp);
550 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
551 IEM_MC_ADVANCE_RIP();
552 IEM_MC_END();
553 return VINF_SUCCESS;
554
555 case IEMMODE_64BIT:
556 IEM_MC_BEGIN(0, 1);
557 IEM_MC_LOCAL(uint64_t, u64Tmp);
558 IEM_MC_FETCH_CR0_U64(u64Tmp);
559 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
560 IEM_MC_ADVANCE_RIP();
561 IEM_MC_END();
562 return VINF_SUCCESS;
563
564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
565 }
566 }
567 else
568 {
569 /* Ignore operand size here, memory refs are always 16-bit. */
570 IEM_MC_BEGIN(0, 2);
571 IEM_MC_LOCAL(uint16_t, u16Tmp);
572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
575 IEM_MC_FETCH_CR0_U16(u16Tmp);
576 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
577 { /* likely */ }
578 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
579 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
580 else
581 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
582 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
583 IEM_MC_ADVANCE_RIP();
584 IEM_MC_END();
585 return VINF_SUCCESS;
586 }
587}
588
589
590/** Opcode 0x0f 0x01 /6. */
591FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
592{
593 /* The operand size is effectively ignored, all is 16-bit and only the
594 lower 3-bits are used. */
595 IEMOP_MNEMONIC(lmsw, "lmsw");
596 IEMOP_HLP_MIN_286();
597 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
598 {
599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
600 IEM_MC_BEGIN(1, 0);
601 IEM_MC_ARG(uint16_t, u16Tmp, 0);
602 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
603 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
604 IEM_MC_END();
605 }
606 else
607 {
608 IEM_MC_BEGIN(1, 1);
609 IEM_MC_ARG(uint16_t, u16Tmp, 0);
610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
613 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
614 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
615 IEM_MC_END();
616 }
617 return VINF_SUCCESS;
618}
619
620
621/** Opcode 0x0f 0x01 /7. */
622FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
623{
624 IEMOP_MNEMONIC(invlpg, "invlpg");
625 IEMOP_HLP_MIN_486();
626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
627 IEM_MC_BEGIN(1, 1);
628 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
630 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
631 IEM_MC_END();
632 return VINF_SUCCESS;
633}
634
635
636/** Opcode 0x0f 0x01 /7. */
637FNIEMOP_DEF(iemOp_Grp7_swapgs)
638{
639 IEMOP_MNEMONIC(swapgs, "swapgs");
640 IEMOP_HLP_ONLY_64BIT();
641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
642 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
643}
644
645
646/** Opcode 0x0f 0x01 /7. */
647FNIEMOP_DEF(iemOp_Grp7_rdtscp)
648{
649 NOREF(pVCpu);
650 IEMOP_BITCH_ABOUT_STUB();
651 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
652}
653
654
655/**
656 * Group 7 jump table, memory variant.
657 */
658IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
659{
660 iemOp_Grp7_sgdt,
661 iemOp_Grp7_sidt,
662 iemOp_Grp7_lgdt,
663 iemOp_Grp7_lidt,
664 iemOp_Grp7_smsw,
665 iemOp_InvalidWithRM,
666 iemOp_Grp7_lmsw,
667 iemOp_Grp7_invlpg
668};
669
670
671/** Opcode 0x0f 0x01. */
672FNIEMOP_DEF(iemOp_Grp7)
673{
674 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
675 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
676 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
677
678 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
679 {
680 case 0:
681 switch (bRm & X86_MODRM_RM_MASK)
682 {
683 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
684 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
685 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
686 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
687 }
688 return IEMOP_RAISE_INVALID_OPCODE();
689
690 case 1:
691 switch (bRm & X86_MODRM_RM_MASK)
692 {
693 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
694 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
695 }
696 return IEMOP_RAISE_INVALID_OPCODE();
697
698 case 2:
699 switch (bRm & X86_MODRM_RM_MASK)
700 {
701 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
702 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
703 }
704 return IEMOP_RAISE_INVALID_OPCODE();
705
706 case 3:
707 switch (bRm & X86_MODRM_RM_MASK)
708 {
709 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
710 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
711 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
712 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
713 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
714 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
715 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
716 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
718 }
719
720 case 4:
721 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
722
723 case 5:
724 return IEMOP_RAISE_INVALID_OPCODE();
725
726 case 6:
727 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
728
729 case 7:
730 switch (bRm & X86_MODRM_RM_MASK)
731 {
732 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
733 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
734 }
735 return IEMOP_RAISE_INVALID_OPCODE();
736
737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
738 }
739}
740
741/** Opcode 0x0f 0x00 /3. */
742FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
743{
744 IEMOP_HLP_NO_REAL_OR_V86_MODE();
745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
746
747 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
748 {
749 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
750 switch (pVCpu->iem.s.enmEffOpSize)
751 {
752 case IEMMODE_16BIT:
753 {
754 IEM_MC_BEGIN(3, 0);
755 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
756 IEM_MC_ARG(uint16_t, u16Sel, 1);
757 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
758
759 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
760 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
761 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
762
763 IEM_MC_END();
764 return VINF_SUCCESS;
765 }
766
767 case IEMMODE_32BIT:
768 case IEMMODE_64BIT:
769 {
770 IEM_MC_BEGIN(3, 0);
771 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
772 IEM_MC_ARG(uint16_t, u16Sel, 1);
773 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
774
775 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
776 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
777 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
778
779 IEM_MC_END();
780 return VINF_SUCCESS;
781 }
782
783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
784 }
785 }
786 else
787 {
788 switch (pVCpu->iem.s.enmEffOpSize)
789 {
790 case IEMMODE_16BIT:
791 {
792 IEM_MC_BEGIN(3, 1);
793 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
794 IEM_MC_ARG(uint16_t, u16Sel, 1);
795 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
797
798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
799 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
800
801 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
802 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
803 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
804
805 IEM_MC_END();
806 return VINF_SUCCESS;
807 }
808
809 case IEMMODE_32BIT:
810 case IEMMODE_64BIT:
811 {
812 IEM_MC_BEGIN(3, 1);
813 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
814 IEM_MC_ARG(uint16_t, u16Sel, 1);
815 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
817
818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
819 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
820/** @todo testcase: make sure it's a 16-bit read. */
821
822 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
823 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
824 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
825
826 IEM_MC_END();
827 return VINF_SUCCESS;
828 }
829
830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
831 }
832 }
833}
834
835
836
837/** Opcode 0x0f 0x02. */
838FNIEMOP_DEF(iemOp_lar_Gv_Ew)
839{
840 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
841 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
842}
843
844
845/** Opcode 0x0f 0x03. */
846FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
847{
848 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
849 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
850}
851
852
853/** Opcode 0x0f 0x05. */
854FNIEMOP_DEF(iemOp_syscall)
855{
856 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
858 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
859}
860
861
862/** Opcode 0x0f 0x06. */
863FNIEMOP_DEF(iemOp_clts)
864{
865 IEMOP_MNEMONIC(clts, "clts");
866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
867 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
868}
869
870
871/** Opcode 0x0f 0x07. */
872FNIEMOP_DEF(iemOp_sysret)
873{
874 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
876 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
877}
878
879
880/** Opcode 0x0f 0x08. */
881FNIEMOP_STUB(iemOp_invd);
882// IEMOP_HLP_MIN_486();
883
884
885/** Opcode 0x0f 0x09. */
886FNIEMOP_DEF(iemOp_wbinvd)
887{
888 IEMOP_MNEMONIC(wbinvd, "wbinvd");
889 IEMOP_HLP_MIN_486();
890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
891 IEM_MC_BEGIN(0, 0);
892 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
893 IEM_MC_ADVANCE_RIP();
894 IEM_MC_END();
895 return VINF_SUCCESS; /* ignore for now */
896}
897
898
899/** Opcode 0x0f 0x0b. */
900FNIEMOP_DEF(iemOp_ud2)
901{
902 IEMOP_MNEMONIC(ud2, "ud2");
903 return IEMOP_RAISE_INVALID_OPCODE();
904}
905
906/** Opcode 0x0f 0x0d. */
907FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
908{
909 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
910 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
911 {
912 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
913 return IEMOP_RAISE_INVALID_OPCODE();
914 }
915
916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
917 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
918 {
919 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
920 return IEMOP_RAISE_INVALID_OPCODE();
921 }
922
923 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
924 {
925 case 2: /* Aliased to /0 for the time being. */
926 case 4: /* Aliased to /0 for the time being. */
927 case 5: /* Aliased to /0 for the time being. */
928 case 6: /* Aliased to /0 for the time being. */
929 case 7: /* Aliased to /0 for the time being. */
930 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
931 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
932 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
933 IEM_NOT_REACHED_DEFAULT_CASE_RET();
934 }
935
936 IEM_MC_BEGIN(0, 1);
937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
940 /* Currently a NOP. */
941 NOREF(GCPtrEffSrc);
942 IEM_MC_ADVANCE_RIP();
943 IEM_MC_END();
944 return VINF_SUCCESS;
945}
946
947
948/** Opcode 0x0f 0x0e. */
949FNIEMOP_STUB(iemOp_femms);
950
951
952/** Opcode 0x0f 0x0f 0x0c. */
953FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
954
955/** Opcode 0x0f 0x0f 0x0d. */
956FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
957
958/** Opcode 0x0f 0x0f 0x1c. */
959FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
960
961/** Opcode 0x0f 0x0f 0x1d. */
962FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
963
964/** Opcode 0x0f 0x0f 0x8a. */
965FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
966
967/** Opcode 0x0f 0x0f 0x8e. */
968FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
969
970/** Opcode 0x0f 0x0f 0x90. */
971FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
972
973/** Opcode 0x0f 0x0f 0x94. */
974FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
975
976/** Opcode 0x0f 0x0f 0x96. */
977FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
978
979/** Opcode 0x0f 0x0f 0x97. */
980FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
981
982/** Opcode 0x0f 0x0f 0x9a. */
983FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
984
985/** Opcode 0x0f 0x0f 0x9e. */
986FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
987
988/** Opcode 0x0f 0x0f 0xa0. */
989FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
990
991/** Opcode 0x0f 0x0f 0xa4. */
992FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
993
994/** Opcode 0x0f 0x0f 0xa6. */
995FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
996
997/** Opcode 0x0f 0x0f 0xa7. */
998FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
999
1000/** Opcode 0x0f 0x0f 0xaa. */
1001FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1002
1003/** Opcode 0x0f 0x0f 0xae. */
1004FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1005
1006/** Opcode 0x0f 0x0f 0xb0. */
1007FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1008
1009/** Opcode 0x0f 0x0f 0xb4. */
1010FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1011
1012/** Opcode 0x0f 0x0f 0xb6. */
1013FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1014
1015/** Opcode 0x0f 0x0f 0xb7. */
1016FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1017
1018/** Opcode 0x0f 0x0f 0xbb. */
1019FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1020
1021/** Opcode 0x0f 0x0f 0xbf. */
1022FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1023
1024
1025/** Opcode 0x0f 0x0f. */
1026FNIEMOP_DEF(iemOp_3Dnow)
1027{
1028 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1029 {
1030 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1031 return IEMOP_RAISE_INVALID_OPCODE();
1032 }
1033
1034 /* This is pretty sparse, use switch instead of table. */
1035 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1036 switch (b)
1037 {
1038 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1039 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1040 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1041 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1042 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1043 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1044 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1045 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1046 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1047 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1048 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1049 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1050 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1051 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1052 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1053 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1054 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1055 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1056 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1057 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1058 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1059 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1060 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1061 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1062 default:
1063 return IEMOP_RAISE_INVALID_OPCODE();
1064 }
1065}
1066
1067
1068/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1069FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1070/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1071FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1072/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1073FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1074/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1075FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1076
1077
1078/**
1079 * @opcode 0x11
1080 * @oppfx none
1081 * @opcpuid sse
1082 * @opgroup og_sse_simdfp_datamove
1083 * @opxcpttype 4UA
1084 * @optest op1=1 op2=2 -> op1=2
1085 * @optest op1=0 op2=-42 -> op1=-42
1086 */
1087FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1088{
1089 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1091 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1092 {
1093 /*
1094 * Register, register.
1095 */
1096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1097 IEM_MC_BEGIN(0, 0);
1098 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1099 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1100 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1101 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1102 IEM_MC_ADVANCE_RIP();
1103 IEM_MC_END();
1104 }
1105 else
1106 {
1107 /*
1108 * Memory, register.
1109 */
1110 IEM_MC_BEGIN(0, 2);
1111 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1113
1114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1116 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1117 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1118
1119 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1120 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 return VINF_SUCCESS;
1126}
1127
1128
1129/**
1130 * @opcode 0x11
1131 * @oppfx 0x66
1132 * @opcpuid sse2
1133 * @opgroup og_sse2_pcksclr_datamove
1134 * @opxcpttype 4UA
1135 * @optest op1=1 op2=2 -> op1=2
1136 * @optest op1=0 op2=-42 -> op1=-42
1137 */
1138FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
1139{
1140 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1141 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1143 {
1144 /*
1145 * Register, register.
1146 */
1147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1148 IEM_MC_BEGIN(0, 0);
1149 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1150 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1151 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1152 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1153 IEM_MC_ADVANCE_RIP();
1154 IEM_MC_END();
1155 }
1156 else
1157 {
1158 /*
1159 * Memory, register.
1160 */
1161 IEM_MC_BEGIN(0, 2);
1162 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1164
1165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1167 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1168 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1169
1170 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1171 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1172
1173 IEM_MC_ADVANCE_RIP();
1174 IEM_MC_END();
1175 }
1176 return VINF_SUCCESS;
1177}
1178
1179
1180/**
1181 * @opcode 0x11
1182 * @oppfx 0xf3
1183 * @opcpuid sse
1184 * @opgroup og_sse_simdfp_datamove
1185 * @opxcpttype 5
1186 * @optest op1=1 op2=2 -> op1=2
1187 * @optest op1=0 op2=-22 -> op1=-22
1188 */
1189FNIEMOP_DEF(iemOp_vmovss_Wss_Hx_Vss)
1190{
1191 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1192 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1194 {
1195 /*
1196 * Register, register.
1197 */
1198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1199 IEM_MC_BEGIN(0, 1);
1200 IEM_MC_LOCAL(uint32_t, uSrc);
1201
1202 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1203 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1204 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1205 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1206
1207 IEM_MC_ADVANCE_RIP();
1208 IEM_MC_END();
1209 }
1210 else
1211 {
1212 /*
1213 * Memory, register.
1214 */
1215 IEM_MC_BEGIN(0, 2);
1216 IEM_MC_LOCAL(uint32_t, uSrc);
1217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1218
1219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1221 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1222 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1223
1224 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1225 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1226
1227 IEM_MC_ADVANCE_RIP();
1228 IEM_MC_END();
1229 }
1230 return VINF_SUCCESS;
1231}
1232
1233
1234/**
1235 * @opcode 0x11
1236 * @oppfx 0xf2
1237 * @opcpuid sse2
1238 * @opgroup og_sse2_pcksclr_datamove
1239 * @opxcpttype 5
1240 * @optest op1=1 op2=2 -> op1=2
1241 * @optest op1=0 op2=-42 -> op1=-42
1242 */
1243FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1244{
1245 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1247 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1248 {
1249 /*
1250 * Register, register.
1251 */
1252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1253 IEM_MC_BEGIN(0, 1);
1254 IEM_MC_LOCAL(uint64_t, uSrc);
1255
1256 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1257 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1258 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1259 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1260
1261 IEM_MC_ADVANCE_RIP();
1262 IEM_MC_END();
1263 }
1264 else
1265 {
1266 /*
1267 * Memory, register.
1268 */
1269 IEM_MC_BEGIN(0, 2);
1270 IEM_MC_LOCAL(uint64_t, uSrc);
1271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1272
1273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1275 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1276 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1277
1278 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1279 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1280
1281 IEM_MC_ADVANCE_RIP();
1282 IEM_MC_END();
1283 }
1284 return VINF_SUCCESS;
1285}
1286
1287
1288FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
1289{
1290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1291 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1292 {
1293 /**
1294 * @opcode 0x12
1295 * @oppfx none
1296 * @opcpuid sse
1297 * @opgroup og_sse_simdfp_datamove
1298 * @opxcpttype 5
1299 * @optest op1=1 op2=2 -> op1=2
1300 * @optest op1=0 op2=-42 -> op1=-42
1301 * @oponlytest
1302 */
1303 IEMOP_MNEMONIC2(RM, MOVHLPS, movhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1304
1305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1306 IEM_MC_BEGIN(0, 1);
1307 IEM_MC_LOCAL(uint64_t, uSrc);
1308
1309 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1310 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1311 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1312 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1313
1314 IEM_MC_ADVANCE_RIP();
1315 IEM_MC_END();
1316 }
1317 else
1318 {
1319 /**
1320 * @opdone
1321 * @opcode 0x12
1322 * @oppfx none
1323 * @opcpuid sse
1324 * @opgroup og_sse_simdfp_datamove
1325 * @opxcpttype 5
1326 * @optest op1=1 op2=2 -> op1=2
1327 * @optest op1=0 op2=-42 -> op1=-42
1328 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1329 * @oponlytest
1330 */
1331 IEMOP_MNEMONIC2(RM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1332
1333 IEM_MC_BEGIN(0, 2);
1334 IEM_MC_LOCAL(uint64_t, uSrc);
1335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1336
1337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1339 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1340 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1341
1342 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1343 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1344
1345 IEM_MC_ADVANCE_RIP();
1346 IEM_MC_END();
1347 }
1348 return VINF_SUCCESS;
1349}
1350
1351/** Opcode 0x66 0x0f 0x12. */
1352FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1353
1354/** Opcode 0xf3 0x0f 0x12. */
1355FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1356
1357/** Opcode 0xf2 0x0f 0x12. */
1358FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1359
1360/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1361FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1362
1363/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1364FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1365{
1366 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1368 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1369 {
1370#if 0
1371 /*
1372 * Register, register.
1373 */
1374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1375 IEM_MC_BEGIN(0, 1);
1376 IEM_MC_LOCAL(uint64_t, uSrc);
1377 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1378 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1379 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1380 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1381 IEM_MC_ADVANCE_RIP();
1382 IEM_MC_END();
1383#else
1384 return IEMOP_RAISE_INVALID_OPCODE();
1385#endif
1386 }
1387 else
1388 {
1389 /*
1390 * Memory, register.
1391 */
1392 IEM_MC_BEGIN(0, 2);
1393 IEM_MC_LOCAL(uint64_t, uSrc);
1394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1395
1396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1398 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1399 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1400
1401 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1402 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1403
1404 IEM_MC_ADVANCE_RIP();
1405 IEM_MC_END();
1406 }
1407 return VINF_SUCCESS;
1408}
1409
1410/* Opcode 0xf3 0x0f 0x13 - invalid */
1411/* Opcode 0xf2 0x0f 0x13 - invalid */
1412
1413/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1414FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1415/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1416FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1417/* Opcode 0xf3 0x0f 0x14 - invalid */
1418/* Opcode 0xf2 0x0f 0x14 - invalid */
1419/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1420FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1421/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1422FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1423/* Opcode 0xf3 0x0f 0x15 - invalid */
1424/* Opcode 0xf2 0x0f 0x15 - invalid */
1425/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1426FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1427/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1428FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1429/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1430FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1431/* Opcode 0xf2 0x0f 0x16 - invalid */
1432/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1433FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1434/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1435FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1436/* Opcode 0xf3 0x0f 0x17 - invalid */
1437/* Opcode 0xf2 0x0f 0x17 - invalid */
1438
1439
1440/** Opcode 0x0f 0x18. */
1441FNIEMOP_DEF(iemOp_prefetch_Grp16)
1442{
1443 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1444 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1445 {
1446 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1447 {
1448 case 4: /* Aliased to /0 for the time being according to AMD. */
1449 case 5: /* Aliased to /0 for the time being according to AMD. */
1450 case 6: /* Aliased to /0 for the time being according to AMD. */
1451 case 7: /* Aliased to /0 for the time being according to AMD. */
1452 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1453 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1454 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1455 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1457 }
1458
1459 IEM_MC_BEGIN(0, 1);
1460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1463 /* Currently a NOP. */
1464 NOREF(GCPtrEffSrc);
1465 IEM_MC_ADVANCE_RIP();
1466 IEM_MC_END();
1467 return VINF_SUCCESS;
1468 }
1469
1470 return IEMOP_RAISE_INVALID_OPCODE();
1471}
1472
1473
1474/** Opcode 0x0f 0x19..0x1f. */
1475FNIEMOP_DEF(iemOp_nop_Ev)
1476{
1477 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1478 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1479 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1480 {
1481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1482 IEM_MC_BEGIN(0, 0);
1483 IEM_MC_ADVANCE_RIP();
1484 IEM_MC_END();
1485 }
1486 else
1487 {
1488 IEM_MC_BEGIN(0, 1);
1489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1492 /* Currently a NOP. */
1493 NOREF(GCPtrEffSrc);
1494 IEM_MC_ADVANCE_RIP();
1495 IEM_MC_END();
1496 }
1497 return VINF_SUCCESS;
1498}
1499
1500
1501/** Opcode 0x0f 0x20. */
1502FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1503{
1504 /* mod is ignored, as is operand size overrides. */
1505 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1506 IEMOP_HLP_MIN_386();
1507 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1508 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1509 else
1510 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1511
1512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1513 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1514 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1515 {
1516 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1517 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1518 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1519 iCrReg |= 8;
1520 }
1521 switch (iCrReg)
1522 {
1523 case 0: case 2: case 3: case 4: case 8:
1524 break;
1525 default:
1526 return IEMOP_RAISE_INVALID_OPCODE();
1527 }
1528 IEMOP_HLP_DONE_DECODING();
1529
1530 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1531}
1532
1533
1534/** Opcode 0x0f 0x21. */
1535FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1536{
1537 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1538 IEMOP_HLP_MIN_386();
1539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1541 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1542 return IEMOP_RAISE_INVALID_OPCODE();
1543 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1544 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1545 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1546}
1547
1548
1549/** Opcode 0x0f 0x22. */
1550FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1551{
1552 /* mod is ignored, as is operand size overrides. */
1553 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1554 IEMOP_HLP_MIN_386();
1555 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1556 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1557 else
1558 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1559
1560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1561 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1562 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1563 {
1564 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1565 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1566 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1567 iCrReg |= 8;
1568 }
1569 switch (iCrReg)
1570 {
1571 case 0: case 2: case 3: case 4: case 8:
1572 break;
1573 default:
1574 return IEMOP_RAISE_INVALID_OPCODE();
1575 }
1576 IEMOP_HLP_DONE_DECODING();
1577
1578 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1579}
1580
1581
1582/** Opcode 0x0f 0x23. */
1583FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1584{
1585 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1586 IEMOP_HLP_MIN_386();
1587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1589 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1590 return IEMOP_RAISE_INVALID_OPCODE();
1591 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1592 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1593 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1594}
1595
1596
1597/** Opcode 0x0f 0x24. */
1598FNIEMOP_DEF(iemOp_mov_Rd_Td)
1599{
1600 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1601 /** @todo works on 386 and 486. */
1602 /* The RM byte is not considered, see testcase. */
1603 return IEMOP_RAISE_INVALID_OPCODE();
1604}
1605
1606
1607/** Opcode 0x0f 0x26. */
1608FNIEMOP_DEF(iemOp_mov_Td_Rd)
1609{
1610 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1611 /** @todo works on 386 and 486. */
1612 /* The RM byte is not considered, see testcase. */
1613 return IEMOP_RAISE_INVALID_OPCODE();
1614}
1615
1616
1617/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
1618FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1619{
1620 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1621 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1622 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1623 {
1624 /*
1625 * Register, register.
1626 */
1627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1628 IEM_MC_BEGIN(0, 0);
1629 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1630 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1631 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1632 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1633 IEM_MC_ADVANCE_RIP();
1634 IEM_MC_END();
1635 }
1636 else
1637 {
1638 /*
1639 * Register, memory.
1640 */
1641 IEM_MC_BEGIN(0, 2);
1642 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1644
1645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1647 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1648 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1649
1650 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1651 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1652
1653 IEM_MC_ADVANCE_RIP();
1654 IEM_MC_END();
1655 }
1656 return VINF_SUCCESS;
1657}
1658
1659/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
1660FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1661{
1662 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1663 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1664 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1665 {
1666 /*
1667 * Register, register.
1668 */
1669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1670 IEM_MC_BEGIN(0, 0);
1671 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1672 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1673 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1674 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1675 IEM_MC_ADVANCE_RIP();
1676 IEM_MC_END();
1677 }
1678 else
1679 {
1680 /*
1681 * Register, memory.
1682 */
1683 IEM_MC_BEGIN(0, 2);
1684 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1686
1687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1689 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1690 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1691
1692 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1693 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1694
1695 IEM_MC_ADVANCE_RIP();
1696 IEM_MC_END();
1697 }
1698 return VINF_SUCCESS;
1699}
1700
1701/* Opcode 0xf3 0x0f 0x28 - invalid */
1702/* Opcode 0xf2 0x0f 0x28 - invalid */
1703
1704/** Opcode 0x0f 0x29 - vmovaps Wps, Vps */
1705FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1706{
1707 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1709 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1710 {
1711 /*
1712 * Register, register.
1713 */
1714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1715 IEM_MC_BEGIN(0, 0);
1716 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1717 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1718 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1719 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1720 IEM_MC_ADVANCE_RIP();
1721 IEM_MC_END();
1722 }
1723 else
1724 {
1725 /*
1726 * Memory, register.
1727 */
1728 IEM_MC_BEGIN(0, 2);
1729 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1731
1732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1734 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1735 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1736
1737 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1738 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1739
1740 IEM_MC_ADVANCE_RIP();
1741 IEM_MC_END();
1742 }
1743 return VINF_SUCCESS;
1744}
1745
1746/** Opcode 0x66 0x0f 0x29 - vmovapd Wpd,Vpd */
1747FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
1748{
1749 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1751 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1752 {
1753 /*
1754 * Register, register.
1755 */
1756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1757 IEM_MC_BEGIN(0, 0);
1758 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1759 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1760 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1761 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1762 IEM_MC_ADVANCE_RIP();
1763 IEM_MC_END();
1764 }
1765 else
1766 {
1767 /*
1768 * Memory, register.
1769 */
1770 IEM_MC_BEGIN(0, 2);
1771 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1773
1774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1776 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1777 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1778
1779 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1780 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1781
1782 IEM_MC_ADVANCE_RIP();
1783 IEM_MC_END();
1784 }
1785 return VINF_SUCCESS;
1786}
1787
1788/* Opcode 0xf3 0x0f 0x29 - invalid */
1789/* Opcode 0xf2 0x0f 0x29 - invalid */
1790
1791
1792/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
1793FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
1794/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
1795FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
1796/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
1797FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
1798/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1799FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
1800
1801
1802/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
1803FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
1804{
1805 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1806 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1807 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1808 {
1809 /*
1810 * memory, register.
1811 */
1812 IEM_MC_BEGIN(0, 2);
1813 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1815
1816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1818 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1819 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1820
1821 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1822 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1823
1824 IEM_MC_ADVANCE_RIP();
1825 IEM_MC_END();
1826 }
1827 /* The register, register encoding is invalid. */
1828 else
1829 return IEMOP_RAISE_INVALID_OPCODE();
1830 return VINF_SUCCESS;
1831}
1832
1833/** Opcode 0x66 0x0f 0x2b - vmovntpd Mpd, Vpd */
1834FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
1835{
1836 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1837 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1838 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1839 {
1840 /*
1841 * memory, register.
1842 */
1843 IEM_MC_BEGIN(0, 2);
1844 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1846
1847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1849 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1850 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1851
1852 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1853 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1854
1855 IEM_MC_ADVANCE_RIP();
1856 IEM_MC_END();
1857 }
1858 /* The register, register encoding is invalid. */
1859 else
1860 return IEMOP_RAISE_INVALID_OPCODE();
1861 return VINF_SUCCESS;
1862}
1863/* Opcode 0xf3 0x0f 0x2b - invalid */
1864/* Opcode 0xf2 0x0f 0x2b - invalid */
1865
1866
1867/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
1868FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
1869/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
1870FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
1871/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
1872FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
1873/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
1874FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
1875
1876/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
1877FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
1878/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
1879FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
1880/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
1881FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
1882/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
1883FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
1884
1885/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
1886FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
1887/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
1888FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
1889/* Opcode 0xf3 0x0f 0x2e - invalid */
1890/* Opcode 0xf2 0x0f 0x2e - invalid */
1891
1892/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
1893FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
1894/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
1895FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
1896/* Opcode 0xf3 0x0f 0x2f - invalid */
1897/* Opcode 0xf2 0x0f 0x2f - invalid */
1898
1899/** Opcode 0x0f 0x30. */
1900FNIEMOP_DEF(iemOp_wrmsr)
1901{
1902 IEMOP_MNEMONIC(wrmsr, "wrmsr");
1903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1904 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1905}
1906
1907
1908/** Opcode 0x0f 0x31. */
1909FNIEMOP_DEF(iemOp_rdtsc)
1910{
1911 IEMOP_MNEMONIC(rdtsc, "rdtsc");
1912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1913 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1914}
1915
1916
1917/** Opcode 0x0f 0x33. */
1918FNIEMOP_DEF(iemOp_rdmsr)
1919{
1920 IEMOP_MNEMONIC(rdmsr, "rdmsr");
1921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1922 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1923}
1924
1925
1926/** Opcode 0x0f 0x34. */
1927FNIEMOP_STUB(iemOp_rdpmc);
1928/** Opcode 0x0f 0x34. */
1929FNIEMOP_STUB(iemOp_sysenter);
1930/** Opcode 0x0f 0x35. */
1931FNIEMOP_STUB(iemOp_sysexit);
1932/** Opcode 0x0f 0x37. */
1933FNIEMOP_STUB(iemOp_getsec);
1934/** Opcode 0x0f 0x38. */
1935FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1936/** Opcode 0x0f 0x3a. */
1937FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1938
1939
1940/**
1941 * Implements a conditional move.
1942 *
1943 * Wish there was an obvious way to do this where we could share and reduce
1944 * code bloat.
1945 *
1946 * @param a_Cnd The conditional "microcode" operation.
1947 */
1948#define CMOV_X(a_Cnd) \
1949 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1950 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1951 { \
1952 switch (pVCpu->iem.s.enmEffOpSize) \
1953 { \
1954 case IEMMODE_16BIT: \
1955 IEM_MC_BEGIN(0, 1); \
1956 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1957 a_Cnd { \
1958 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1959 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
1960 } IEM_MC_ENDIF(); \
1961 IEM_MC_ADVANCE_RIP(); \
1962 IEM_MC_END(); \
1963 return VINF_SUCCESS; \
1964 \
1965 case IEMMODE_32BIT: \
1966 IEM_MC_BEGIN(0, 1); \
1967 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1968 a_Cnd { \
1969 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1970 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
1971 } IEM_MC_ELSE() { \
1972 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
1973 } IEM_MC_ENDIF(); \
1974 IEM_MC_ADVANCE_RIP(); \
1975 IEM_MC_END(); \
1976 return VINF_SUCCESS; \
1977 \
1978 case IEMMODE_64BIT: \
1979 IEM_MC_BEGIN(0, 1); \
1980 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1981 a_Cnd { \
1982 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1983 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
1984 } IEM_MC_ENDIF(); \
1985 IEM_MC_ADVANCE_RIP(); \
1986 IEM_MC_END(); \
1987 return VINF_SUCCESS; \
1988 \
1989 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1990 } \
1991 } \
1992 else \
1993 { \
1994 switch (pVCpu->iem.s.enmEffOpSize) \
1995 { \
1996 case IEMMODE_16BIT: \
1997 IEM_MC_BEGIN(0, 2); \
1998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1999 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2001 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2002 a_Cnd { \
2003 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2004 } IEM_MC_ENDIF(); \
2005 IEM_MC_ADVANCE_RIP(); \
2006 IEM_MC_END(); \
2007 return VINF_SUCCESS; \
2008 \
2009 case IEMMODE_32BIT: \
2010 IEM_MC_BEGIN(0, 2); \
2011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2012 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2014 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2015 a_Cnd { \
2016 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2017 } IEM_MC_ELSE() { \
2018 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2019 } IEM_MC_ENDIF(); \
2020 IEM_MC_ADVANCE_RIP(); \
2021 IEM_MC_END(); \
2022 return VINF_SUCCESS; \
2023 \
2024 case IEMMODE_64BIT: \
2025 IEM_MC_BEGIN(0, 2); \
2026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2027 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2029 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2030 a_Cnd { \
2031 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2032 } IEM_MC_ENDIF(); \
2033 IEM_MC_ADVANCE_RIP(); \
2034 IEM_MC_END(); \
2035 return VINF_SUCCESS; \
2036 \
2037 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2038 } \
2039 } do {} while (0)
2040
2041
2042
2043/** Opcode 0x0f 0x40. */
2044FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2045{
2046 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2047 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2048}
2049
2050
2051/** Opcode 0x0f 0x41. */
2052FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2053{
2054 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2055 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2056}
2057
2058
2059/** Opcode 0x0f 0x42. */
2060FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2061{
2062 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2063 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2064}
2065
2066
2067/** Opcode 0x0f 0x43. */
2068FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2069{
2070 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2071 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2072}
2073
2074
2075/** Opcode 0x0f 0x44. */
2076FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2077{
2078 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2079 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2080}
2081
2082
2083/** Opcode 0x0f 0x45. */
2084FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2085{
2086 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2087 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2088}
2089
2090
2091/** Opcode 0x0f 0x46. */
2092FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2093{
2094 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2095 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2096}
2097
2098
2099/** Opcode 0x0f 0x47. */
2100FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2101{
2102 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2103 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2104}
2105
2106
2107/** Opcode 0x0f 0x48. */
2108FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2109{
2110 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2111 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2112}
2113
2114
2115/** Opcode 0x0f 0x49. */
2116FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2117{
2118 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2119 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2120}
2121
2122
2123/** Opcode 0x0f 0x4a. */
2124FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2125{
2126 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2127 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2128}
2129
2130
2131/** Opcode 0x0f 0x4b. */
2132FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2133{
2134 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2135 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2136}
2137
2138
2139/** Opcode 0x0f 0x4c. */
2140FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2141{
2142 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2143 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2144}
2145
2146
2147/** Opcode 0x0f 0x4d. */
2148FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2149{
2150 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2151 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2152}
2153
2154
2155/** Opcode 0x0f 0x4e. */
2156FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2157{
2158 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2159 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2160}
2161
2162
2163/** Opcode 0x0f 0x4f. */
2164FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2165{
2166 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2167 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2168}
2169
2170#undef CMOV_X
2171
2172/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
2173FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
2174/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
2175FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
2176/* Opcode 0xf3 0x0f 0x50 - invalid */
2177/* Opcode 0xf2 0x0f 0x50 - invalid */
2178
2179/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
2180FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2181/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
2182FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2183/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
2184FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2185/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2186FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2187
2188/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
2189FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2190/* Opcode 0x66 0x0f 0x52 - invalid */
2191/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
2192FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2193/* Opcode 0xf2 0x0f 0x52 - invalid */
2194
2195/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
2196FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2197/* Opcode 0x66 0x0f 0x53 - invalid */
2198/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
2199FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2200/* Opcode 0xf2 0x0f 0x53 - invalid */
2201
2202/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
2203FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
2204/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
2205FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
2206/* Opcode 0xf3 0x0f 0x54 - invalid */
2207/* Opcode 0xf2 0x0f 0x54 - invalid */
2208
2209/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
2210FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
2211/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
2212FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
2213/* Opcode 0xf3 0x0f 0x55 - invalid */
2214/* Opcode 0xf2 0x0f 0x55 - invalid */
2215
2216/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
2217FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
2218/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
2219FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
2220/* Opcode 0xf3 0x0f 0x56 - invalid */
2221/* Opcode 0xf2 0x0f 0x56 - invalid */
2222
2223/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
2224FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
2225/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
2226FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
2227/* Opcode 0xf3 0x0f 0x57 - invalid */
2228/* Opcode 0xf2 0x0f 0x57 - invalid */
2229
2230/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2231FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2232/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2233FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2234/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2235FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2236/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2237FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2238
2239/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2240FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2241/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2242FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2243/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2244FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2245/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2246FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2247
2248/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2249FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2250/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2251FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2252/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2253FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2254/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2255FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2256
2257/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2258FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2259/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2260FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2261/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2262FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2263/* Opcode 0xf2 0x0f 0x5b - invalid */
2264
2265/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2266FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2267/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2268FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2269/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2270FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2271/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2272FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2273
2274/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2275FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2276/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2277FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2278/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2279FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2280/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2281FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2282
2283/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2284FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2285/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2286FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2287/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2288FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2289/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2290FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2291
2292/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2293FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2294/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2295FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2296/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2297FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2298/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2299FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2300
2301/**
2302 * Common worker for MMX instructions on the forms:
2303 * pxxxx mm1, mm2/mem32
2304 *
2305 * The 2nd operand is the first half of a register, which in the memory case
2306 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2307 * memory accessed for MMX.
2308 *
2309 * Exceptions type 4.
2310 */
2311FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2312{
2313 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2314 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2315 {
2316 /*
2317 * Register, register.
2318 */
2319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2320 IEM_MC_BEGIN(2, 0);
2321 IEM_MC_ARG(uint128_t *, pDst, 0);
2322 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2323 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2324 IEM_MC_PREPARE_SSE_USAGE();
2325 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2326 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2327 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2328 IEM_MC_ADVANCE_RIP();
2329 IEM_MC_END();
2330 }
2331 else
2332 {
2333 /*
2334 * Register, memory.
2335 */
2336 IEM_MC_BEGIN(2, 2);
2337 IEM_MC_ARG(uint128_t *, pDst, 0);
2338 IEM_MC_LOCAL(uint64_t, uSrc);
2339 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2341
2342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2344 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2345 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2346
2347 IEM_MC_PREPARE_SSE_USAGE();
2348 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2349 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2350
2351 IEM_MC_ADVANCE_RIP();
2352 IEM_MC_END();
2353 }
2354 return VINF_SUCCESS;
2355}
2356
2357
2358/**
2359 * Common worker for SSE2 instructions on the forms:
2360 * pxxxx xmm1, xmm2/mem128
2361 *
2362 * The 2nd operand is the first half of a register, which in the memory case
2363 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2364 * memory accessed for MMX.
2365 *
2366 * Exceptions type 4.
2367 */
2368FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2369{
2370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2371 if (!pImpl->pfnU64)
2372 return IEMOP_RAISE_INVALID_OPCODE();
2373 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2374 {
2375 /*
2376 * Register, register.
2377 */
2378 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2379 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2381 IEM_MC_BEGIN(2, 0);
2382 IEM_MC_ARG(uint64_t *, pDst, 0);
2383 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2384 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2385 IEM_MC_PREPARE_FPU_USAGE();
2386 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2387 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2388 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2389 IEM_MC_ADVANCE_RIP();
2390 IEM_MC_END();
2391 }
2392 else
2393 {
2394 /*
2395 * Register, memory.
2396 */
2397 IEM_MC_BEGIN(2, 2);
2398 IEM_MC_ARG(uint64_t *, pDst, 0);
2399 IEM_MC_LOCAL(uint32_t, uSrc);
2400 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2402
2403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2405 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2406 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2407
2408 IEM_MC_PREPARE_FPU_USAGE();
2409 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2410 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2411
2412 IEM_MC_ADVANCE_RIP();
2413 IEM_MC_END();
2414 }
2415 return VINF_SUCCESS;
2416}
2417
2418
2419/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2420FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2421{
2422 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2423 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2424}
2425
2426/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2427FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2428{
2429 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2430 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2431}
2432
2433/* Opcode 0xf3 0x0f 0x60 - invalid */
2434
2435
2436/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2437FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2438{
2439 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2440 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2441}
2442
2443/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2444FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2445{
2446 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2447 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2448}
2449
2450/* Opcode 0xf3 0x0f 0x61 - invalid */
2451
2452
2453/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2454FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2455{
2456 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2457 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2458}
2459
2460/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2461FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2462{
2463 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2464 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2465}
2466
2467/* Opcode 0xf3 0x0f 0x62 - invalid */
2468
2469
2470
2471/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2472FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2473/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2474FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2475/* Opcode 0xf3 0x0f 0x63 - invalid */
2476
2477/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2478FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2479/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2480FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2481/* Opcode 0xf3 0x0f 0x64 - invalid */
2482
2483/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2484FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2485/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2486FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2487/* Opcode 0xf3 0x0f 0x65 - invalid */
2488
2489/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2490FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2491/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2492FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2493/* Opcode 0xf3 0x0f 0x66 - invalid */
2494
2495/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2496FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2497/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2498FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2499/* Opcode 0xf3 0x0f 0x67 - invalid */
2500
2501
2502/**
2503 * Common worker for MMX instructions on the form:
2504 * pxxxx mm1, mm2/mem64
2505 *
2506 * The 2nd operand is the second half of a register, which in the memory case
2507 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2508 * where it may read the full 128 bits or only the upper 64 bits.
2509 *
2510 * Exceptions type 4.
2511 */
2512FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2513{
2514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2515 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2516 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2517 {
2518 /*
2519 * Register, register.
2520 */
2521 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2522 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2524 IEM_MC_BEGIN(2, 0);
2525 IEM_MC_ARG(uint64_t *, pDst, 0);
2526 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2527 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2528 IEM_MC_PREPARE_FPU_USAGE();
2529 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2530 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2531 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2532 IEM_MC_ADVANCE_RIP();
2533 IEM_MC_END();
2534 }
2535 else
2536 {
2537 /*
2538 * Register, memory.
2539 */
2540 IEM_MC_BEGIN(2, 2);
2541 IEM_MC_ARG(uint64_t *, pDst, 0);
2542 IEM_MC_LOCAL(uint64_t, uSrc);
2543 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2545
2546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2548 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2549 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2550
2551 IEM_MC_PREPARE_FPU_USAGE();
2552 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2553 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2554
2555 IEM_MC_ADVANCE_RIP();
2556 IEM_MC_END();
2557 }
2558 return VINF_SUCCESS;
2559}
2560
2561
2562/**
2563 * Common worker for SSE2 instructions on the form:
2564 * pxxxx xmm1, xmm2/mem128
2565 *
2566 * The 2nd operand is the second half of a register, which in the memory case
2567 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2568 * where it may read the full 128 bits or only the upper 64 bits.
2569 *
2570 * Exceptions type 4.
2571 */
2572FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2573{
2574 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2575 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2576 {
2577 /*
2578 * Register, register.
2579 */
2580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2581 IEM_MC_BEGIN(2, 0);
2582 IEM_MC_ARG(uint128_t *, pDst, 0);
2583 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2584 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2585 IEM_MC_PREPARE_SSE_USAGE();
2586 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2587 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2588 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2589 IEM_MC_ADVANCE_RIP();
2590 IEM_MC_END();
2591 }
2592 else
2593 {
2594 /*
2595 * Register, memory.
2596 */
2597 IEM_MC_BEGIN(2, 2);
2598 IEM_MC_ARG(uint128_t *, pDst, 0);
2599 IEM_MC_LOCAL(uint128_t, uSrc);
2600 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2602
2603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2605 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2606 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2607
2608 IEM_MC_PREPARE_SSE_USAGE();
2609 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2610 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2611
2612 IEM_MC_ADVANCE_RIP();
2613 IEM_MC_END();
2614 }
2615 return VINF_SUCCESS;
2616}
2617
2618
2619/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2620FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2621{
2622 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2623 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2624}
2625
2626/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2627FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2628{
2629 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2630 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2631}
2632/* Opcode 0xf3 0x0f 0x68 - invalid */
2633
2634
2635/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2636FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2637{
2638 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2639 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2640}
2641
2642/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2643FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2644{
2645 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2646 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2647
2648}
2649/* Opcode 0xf3 0x0f 0x69 - invalid */
2650
2651
2652/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2653FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2654{
2655 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2656 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2657}
2658
2659/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2660FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2661{
2662 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2663 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2664}
2665/* Opcode 0xf3 0x0f 0x6a - invalid */
2666
2667
2668/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2669FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2670/** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */
2671FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2672/* Opcode 0xf3 0x0f 0x6b - invalid */
2673
2674
2675/* Opcode 0x0f 0x6c - invalid */
2676
2677/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2678FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2679{
2680 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
2681 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2682}
2683
2684/* Opcode 0xf3 0x0f 0x6c - invalid */
2685/* Opcode 0xf2 0x0f 0x6c - invalid */
2686
2687
2688/* Opcode 0x0f 0x6d - invalid */
2689
2690/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
2691FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
2692{
2693 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2694 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2695}
2696
2697/* Opcode 0xf3 0x0f 0x6d - invalid */
2698
2699
2700/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
2701FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
2702{
2703 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2704 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2705 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2706 else
2707 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2708 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2709 {
2710 /* MMX, greg */
2711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2712 IEM_MC_BEGIN(0, 1);
2713 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2714 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2715 IEM_MC_LOCAL(uint64_t, u64Tmp);
2716 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2717 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2718 else
2719 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2720 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2721 IEM_MC_ADVANCE_RIP();
2722 IEM_MC_END();
2723 }
2724 else
2725 {
2726 /* MMX, [mem] */
2727 IEM_MC_BEGIN(0, 2);
2728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2729 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2732 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2733 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2734 {
2735 IEM_MC_LOCAL(uint64_t, u64Tmp);
2736 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2737 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2738 }
2739 else
2740 {
2741 IEM_MC_LOCAL(uint32_t, u32Tmp);
2742 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2743 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2744 }
2745 IEM_MC_ADVANCE_RIP();
2746 IEM_MC_END();
2747 }
2748 return VINF_SUCCESS;
2749}
2750
2751/** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */
2752FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
2753{
2754 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2755 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2756 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
2757 else
2758 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
2759 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2760 {
2761 /* XMM, greg*/
2762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2763 IEM_MC_BEGIN(0, 1);
2764 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2765 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2766 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2767 {
2768 IEM_MC_LOCAL(uint64_t, u64Tmp);
2769 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2770 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2771 }
2772 else
2773 {
2774 IEM_MC_LOCAL(uint32_t, u32Tmp);
2775 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2776 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2777 }
2778 IEM_MC_ADVANCE_RIP();
2779 IEM_MC_END();
2780 }
2781 else
2782 {
2783 /* XMM, [mem] */
2784 IEM_MC_BEGIN(0, 2);
2785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2786 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2789 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2790 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2791 {
2792 IEM_MC_LOCAL(uint64_t, u64Tmp);
2793 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2794 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2795 }
2796 else
2797 {
2798 IEM_MC_LOCAL(uint32_t, u32Tmp);
2799 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2800 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2801 }
2802 IEM_MC_ADVANCE_RIP();
2803 IEM_MC_END();
2804 }
2805 return VINF_SUCCESS;
2806}
2807
2808/* Opcode 0xf3 0x0f 0x6e - invalid */
2809
2810
2811/** Opcode 0x0f 0x6f - movq Pq, Qq */
2812FNIEMOP_DEF(iemOp_movq_Pq_Qq)
2813{
2814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2815 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2817 {
2818 /*
2819 * Register, register.
2820 */
2821 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2822 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2824 IEM_MC_BEGIN(0, 1);
2825 IEM_MC_LOCAL(uint64_t, u64Tmp);
2826 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2827 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2828 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2829 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2830 IEM_MC_ADVANCE_RIP();
2831 IEM_MC_END();
2832 }
2833 else
2834 {
2835 /*
2836 * Register, memory.
2837 */
2838 IEM_MC_BEGIN(0, 2);
2839 IEM_MC_LOCAL(uint64_t, u64Tmp);
2840 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2841
2842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2844 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2845 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2846 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2847 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2848
2849 IEM_MC_ADVANCE_RIP();
2850 IEM_MC_END();
2851 }
2852 return VINF_SUCCESS;
2853}
2854
2855/** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */
2856FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
2857{
2858 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2859 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
2860 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2861 {
2862 /*
2863 * Register, register.
2864 */
2865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2866 IEM_MC_BEGIN(0, 0);
2867 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2868 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2869 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2870 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2871 IEM_MC_ADVANCE_RIP();
2872 IEM_MC_END();
2873 }
2874 else
2875 {
2876 /*
2877 * Register, memory.
2878 */
2879 IEM_MC_BEGIN(0, 2);
2880 IEM_MC_LOCAL(uint128_t, u128Tmp);
2881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2882
2883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2885 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2886 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2887 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2888 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2889
2890 IEM_MC_ADVANCE_RIP();
2891 IEM_MC_END();
2892 }
2893 return VINF_SUCCESS;
2894}
2895
2896/** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */
2897FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
2898{
2899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2900 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
2901 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2902 {
2903 /*
2904 * Register, register.
2905 */
2906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2907 IEM_MC_BEGIN(0, 0);
2908 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2909 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2910 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2911 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2912 IEM_MC_ADVANCE_RIP();
2913 IEM_MC_END();
2914 }
2915 else
2916 {
2917 /*
2918 * Register, memory.
2919 */
2920 IEM_MC_BEGIN(0, 2);
2921 IEM_MC_LOCAL(uint128_t, u128Tmp);
2922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2923
2924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2926 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2927 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2928 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2929 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2930
2931 IEM_MC_ADVANCE_RIP();
2932 IEM_MC_END();
2933 }
2934 return VINF_SUCCESS;
2935}
2936
2937
2938/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
2939FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
2940{
2941 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
2942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2943 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2944 {
2945 /*
2946 * Register, register.
2947 */
2948 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2950
2951 IEM_MC_BEGIN(3, 0);
2952 IEM_MC_ARG(uint64_t *, pDst, 0);
2953 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2954 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2955 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2956 IEM_MC_PREPARE_FPU_USAGE();
2957 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2958 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2959 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2960 IEM_MC_ADVANCE_RIP();
2961 IEM_MC_END();
2962 }
2963 else
2964 {
2965 /*
2966 * Register, memory.
2967 */
2968 IEM_MC_BEGIN(3, 2);
2969 IEM_MC_ARG(uint64_t *, pDst, 0);
2970 IEM_MC_LOCAL(uint64_t, uSrc);
2971 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2973
2974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2975 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2976 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2978 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2979
2980 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2981 IEM_MC_PREPARE_FPU_USAGE();
2982 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2983 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2984
2985 IEM_MC_ADVANCE_RIP();
2986 IEM_MC_END();
2987 }
2988 return VINF_SUCCESS;
2989}
2990
2991/** Opcode 0x66 0x0f 0x70 - vpshufd Vx, Wx, Ib */
2992FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
2993{
2994 IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
2995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2996 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2997 {
2998 /*
2999 * Register, register.
3000 */
3001 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3003
3004 IEM_MC_BEGIN(3, 0);
3005 IEM_MC_ARG(uint128_t *, pDst, 0);
3006 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3007 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3008 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3009 IEM_MC_PREPARE_SSE_USAGE();
3010 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3011 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3012 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3013 IEM_MC_ADVANCE_RIP();
3014 IEM_MC_END();
3015 }
3016 else
3017 {
3018 /*
3019 * Register, memory.
3020 */
3021 IEM_MC_BEGIN(3, 2);
3022 IEM_MC_ARG(uint128_t *, pDst, 0);
3023 IEM_MC_LOCAL(uint128_t, uSrc);
3024 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3026
3027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3028 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3029 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3031 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3032
3033 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3034 IEM_MC_PREPARE_SSE_USAGE();
3035 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3036 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3037
3038 IEM_MC_ADVANCE_RIP();
3039 IEM_MC_END();
3040 }
3041 return VINF_SUCCESS;
3042}
3043
3044/** Opcode 0xf3 0x0f 0x70 - vpshufhw Vx, Wx, Ib */
3045FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3046{
3047 IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
3048 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3049 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3050 {
3051 /*
3052 * Register, register.
3053 */
3054 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3056
3057 IEM_MC_BEGIN(3, 0);
3058 IEM_MC_ARG(uint128_t *, pDst, 0);
3059 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3060 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3061 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3062 IEM_MC_PREPARE_SSE_USAGE();
3063 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3064 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3065 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3066 IEM_MC_ADVANCE_RIP();
3067 IEM_MC_END();
3068 }
3069 else
3070 {
3071 /*
3072 * Register, memory.
3073 */
3074 IEM_MC_BEGIN(3, 2);
3075 IEM_MC_ARG(uint128_t *, pDst, 0);
3076 IEM_MC_LOCAL(uint128_t, uSrc);
3077 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3079
3080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3081 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3082 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3084 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3085
3086 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3087 IEM_MC_PREPARE_SSE_USAGE();
3088 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3089 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3090
3091 IEM_MC_ADVANCE_RIP();
3092 IEM_MC_END();
3093 }
3094 return VINF_SUCCESS;
3095}
3096
3097/** Opcode 0xf2 0x0f 0x70 - vpshuflw Vx, Wx, Ib */
3098FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3099{
3100 IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
3101 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3102 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3103 {
3104 /*
3105 * Register, register.
3106 */
3107 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3109
3110 IEM_MC_BEGIN(3, 0);
3111 IEM_MC_ARG(uint128_t *, pDst, 0);
3112 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3113 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3114 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3115 IEM_MC_PREPARE_SSE_USAGE();
3116 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3117 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3118 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3119 IEM_MC_ADVANCE_RIP();
3120 IEM_MC_END();
3121 }
3122 else
3123 {
3124 /*
3125 * Register, memory.
3126 */
3127 IEM_MC_BEGIN(3, 2);
3128 IEM_MC_ARG(uint128_t *, pDst, 0);
3129 IEM_MC_LOCAL(uint128_t, uSrc);
3130 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3132
3133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3134 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3135 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3137 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3138
3139 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3140 IEM_MC_PREPARE_SSE_USAGE();
3141 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3142 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3143
3144 IEM_MC_ADVANCE_RIP();
3145 IEM_MC_END();
3146 }
3147 return VINF_SUCCESS;
3148}
3149
3150
3151/** Opcode 0x0f 0x71 11/2. */
3152FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3153
3154/** Opcode 0x66 0x0f 0x71 11/2. */
3155FNIEMOP_STUB_1(iemOp_Grp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
3156
3157/** Opcode 0x0f 0x71 11/4. */
3158FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3159
3160/** Opcode 0x66 0x0f 0x71 11/4. */
3161FNIEMOP_STUB_1(iemOp_Grp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
3162
3163/** Opcode 0x0f 0x71 11/6. */
3164FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3165
3166/** Opcode 0x66 0x0f 0x71 11/6. */
3167FNIEMOP_STUB_1(iemOp_Grp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
3168
3169
3170/**
3171 * Group 12 jump table for register variant.
3172 */
3173IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3174{
3175 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3176 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3177 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3178 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3179 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3180 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3181 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3182 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3183};
3184AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3185
3186
3187/** Opcode 0x0f 0x71. */
3188FNIEMOP_DEF(iemOp_Grp12)
3189{
3190 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3191 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3192 /* register, register */
3193 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3194 + pVCpu->iem.s.idxPrefix], bRm);
3195 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3196}
3197
3198
3199/** Opcode 0x0f 0x72 11/2. */
3200FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3201
3202/** Opcode 0x66 0x0f 0x72 11/2. */
3203FNIEMOP_STUB_1(iemOp_Grp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
3204
3205/** Opcode 0x0f 0x72 11/4. */
3206FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3207
3208/** Opcode 0x66 0x0f 0x72 11/4. */
3209FNIEMOP_STUB_1(iemOp_Grp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
3210
3211/** Opcode 0x0f 0x72 11/6. */
3212FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3213
3214/** Opcode 0x66 0x0f 0x72 11/6. */
3215FNIEMOP_STUB_1(iemOp_Grp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
3216
3217
3218/**
3219 * Group 13 jump table for register variant.
3220 */
3221IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3222{
3223 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3224 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3225 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3226 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3227 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3228 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3229 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3230 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3231};
3232AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3233
3234/** Opcode 0x0f 0x72. */
3235FNIEMOP_DEF(iemOp_Grp13)
3236{
3237 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3238 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3239 /* register, register */
3240 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3241 + pVCpu->iem.s.idxPrefix], bRm);
3242 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3243}
3244
3245
3246/** Opcode 0x0f 0x73 11/2. */
3247FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3248
3249/** Opcode 0x66 0x0f 0x73 11/2. */
3250FNIEMOP_STUB_1(iemOp_Grp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
3251
3252/** Opcode 0x66 0x0f 0x73 11/3. */
3253FNIEMOP_STUB_1(iemOp_Grp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3254
3255/** Opcode 0x0f 0x73 11/6. */
3256FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3257
3258/** Opcode 0x66 0x0f 0x73 11/6. */
3259FNIEMOP_STUB_1(iemOp_Grp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
3260
3261/** Opcode 0x66 0x0f 0x73 11/7. */
3262FNIEMOP_STUB_1(iemOp_Grp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3263
3264/**
3265 * Group 14 jump table for register variant.
3266 */
3267IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
3268{
3269 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3270 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3271 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3272 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3273 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3274 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3275 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3276 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3277};
3278AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
3279
3280
3281/** Opcode 0x0f 0x73. */
3282FNIEMOP_DEF(iemOp_Grp14)
3283{
3284 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3285 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3286 /* register, register */
3287 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3288 + pVCpu->iem.s.idxPrefix], bRm);
3289 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3290}
3291
3292
3293/**
3294 * Common worker for MMX instructions on the form:
3295 * pxxx mm1, mm2/mem64
3296 */
3297FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3298{
3299 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3300 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3301 {
3302 /*
3303 * Register, register.
3304 */
3305 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3306 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3308 IEM_MC_BEGIN(2, 0);
3309 IEM_MC_ARG(uint64_t *, pDst, 0);
3310 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3311 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3312 IEM_MC_PREPARE_FPU_USAGE();
3313 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3314 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3315 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3316 IEM_MC_ADVANCE_RIP();
3317 IEM_MC_END();
3318 }
3319 else
3320 {
3321 /*
3322 * Register, memory.
3323 */
3324 IEM_MC_BEGIN(2, 2);
3325 IEM_MC_ARG(uint64_t *, pDst, 0);
3326 IEM_MC_LOCAL(uint64_t, uSrc);
3327 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3329
3330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3332 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3333 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3334
3335 IEM_MC_PREPARE_FPU_USAGE();
3336 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3337 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3338
3339 IEM_MC_ADVANCE_RIP();
3340 IEM_MC_END();
3341 }
3342 return VINF_SUCCESS;
3343}
3344
3345
3346/**
3347 * Common worker for SSE2 instructions on the forms:
3348 * pxxx xmm1, xmm2/mem128
3349 *
3350 * Proper alignment of the 128-bit operand is enforced.
3351 * Exceptions type 4. SSE2 cpuid checks.
3352 */
3353FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3354{
3355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3356 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3357 {
3358 /*
3359 * Register, register.
3360 */
3361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3362 IEM_MC_BEGIN(2, 0);
3363 IEM_MC_ARG(uint128_t *, pDst, 0);
3364 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3365 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3366 IEM_MC_PREPARE_SSE_USAGE();
3367 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3368 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3369 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3370 IEM_MC_ADVANCE_RIP();
3371 IEM_MC_END();
3372 }
3373 else
3374 {
3375 /*
3376 * Register, memory.
3377 */
3378 IEM_MC_BEGIN(2, 2);
3379 IEM_MC_ARG(uint128_t *, pDst, 0);
3380 IEM_MC_LOCAL(uint128_t, uSrc);
3381 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3383
3384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3386 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3387 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3388
3389 IEM_MC_PREPARE_SSE_USAGE();
3390 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3391 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3392
3393 IEM_MC_ADVANCE_RIP();
3394 IEM_MC_END();
3395 }
3396 return VINF_SUCCESS;
3397}
3398
3399
3400/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3401FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3402{
3403 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3404 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3405}
3406
3407/** Opcode 0x66 0x0f 0x74 - vpcmpeqb Vx, Hx, Wx */
3408FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3409{
3410 IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb");
3411 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3412}
3413
3414/* Opcode 0xf3 0x0f 0x74 - invalid */
3415/* Opcode 0xf2 0x0f 0x74 - invalid */
3416
3417
3418/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3419FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3420{
3421 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3422 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3423}
3424
3425/** Opcode 0x66 0x0f 0x75 - vpcmpeqw Vx, Hx, Wx */
3426FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3427{
3428 IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw");
3429 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3430}
3431
3432/* Opcode 0xf3 0x0f 0x75 - invalid */
3433/* Opcode 0xf2 0x0f 0x75 - invalid */
3434
3435
3436/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3437FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3438{
3439 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3440 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3441}
3442
3443/** Opcode 0x66 0x0f 0x76 - vpcmpeqd Vx, Hx, Wx */
3444FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3445{
3446 IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd");
3447 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3448}
3449
3450/* Opcode 0xf3 0x0f 0x76 - invalid */
3451/* Opcode 0xf2 0x0f 0x76 - invalid */
3452
3453
3454/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3455FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3456/* Opcode 0x66 0x0f 0x77 - invalid */
3457/* Opcode 0xf3 0x0f 0x77 - invalid */
3458/* Opcode 0xf2 0x0f 0x77 - invalid */
3459
3460/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3461FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3462/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3463FNIEMOP_STUB(iemOp_AmdGrp17);
3464/* Opcode 0xf3 0x0f 0x78 - invalid */
3465/* Opcode 0xf2 0x0f 0x78 - invalid */
3466
3467/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3468FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3469/* Opcode 0x66 0x0f 0x79 - invalid */
3470/* Opcode 0xf3 0x0f 0x79 - invalid */
3471/* Opcode 0xf2 0x0f 0x79 - invalid */
3472
3473/* Opcode 0x0f 0x7a - invalid */
3474/* Opcode 0x66 0x0f 0x7a - invalid */
3475/* Opcode 0xf3 0x0f 0x7a - invalid */
3476/* Opcode 0xf2 0x0f 0x7a - invalid */
3477
3478/* Opcode 0x0f 0x7b - invalid */
3479/* Opcode 0x66 0x0f 0x7b - invalid */
3480/* Opcode 0xf3 0x0f 0x7b - invalid */
3481/* Opcode 0xf2 0x0f 0x7b - invalid */
3482
3483/* Opcode 0x0f 0x7c - invalid */
3484/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3485FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3486/* Opcode 0xf3 0x0f 0x7c - invalid */
3487/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3488FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3489
3490/* Opcode 0x0f 0x7d - invalid */
3491/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3492FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3493/* Opcode 0xf3 0x0f 0x7d - invalid */
3494/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3495FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3496
3497
3498/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3499FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3500{
3501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3502 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3503 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3504 else
3505 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3506 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3507 {
3508 /* greg, MMX */
3509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3510 IEM_MC_BEGIN(0, 1);
3511 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3512 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3513 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3514 {
3515 IEM_MC_LOCAL(uint64_t, u64Tmp);
3516 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3517 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3518 }
3519 else
3520 {
3521 IEM_MC_LOCAL(uint32_t, u32Tmp);
3522 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3523 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3524 }
3525 IEM_MC_ADVANCE_RIP();
3526 IEM_MC_END();
3527 }
3528 else
3529 {
3530 /* [mem], MMX */
3531 IEM_MC_BEGIN(0, 2);
3532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3533 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3536 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3537 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3538 {
3539 IEM_MC_LOCAL(uint64_t, u64Tmp);
3540 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3541 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3542 }
3543 else
3544 {
3545 IEM_MC_LOCAL(uint32_t, u32Tmp);
3546 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3547 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3548 }
3549 IEM_MC_ADVANCE_RIP();
3550 IEM_MC_END();
3551 }
3552 return VINF_SUCCESS;
3553}
3554
3555/** Opcode 0x66 0x0f 0x7e - vmovd_q Ey, Vy */
3556FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3557{
3558 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3559 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3560 IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq");
3561 else
3562 IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd");
3563 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3564 {
3565 /* greg, XMM */
3566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3567 IEM_MC_BEGIN(0, 1);
3568 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3569 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3570 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3571 {
3572 IEM_MC_LOCAL(uint64_t, u64Tmp);
3573 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3574 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3575 }
3576 else
3577 {
3578 IEM_MC_LOCAL(uint32_t, u32Tmp);
3579 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3580 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3581 }
3582 IEM_MC_ADVANCE_RIP();
3583 IEM_MC_END();
3584 }
3585 else
3586 {
3587 /* [mem], XMM */
3588 IEM_MC_BEGIN(0, 2);
3589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3590 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3593 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3594 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3595 {
3596 IEM_MC_LOCAL(uint64_t, u64Tmp);
3597 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3598 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3599 }
3600 else
3601 {
3602 IEM_MC_LOCAL(uint32_t, u32Tmp);
3603 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3604 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3605 }
3606 IEM_MC_ADVANCE_RIP();
3607 IEM_MC_END();
3608 }
3609 return VINF_SUCCESS;
3610}
3611
3612/** Opcode 0xf3 0x0f 0x7e - vmovq Vq, Wq */
3613FNIEMOP_STUB(iemOp_vmovq_Vq_Wq);
3614/* Opcode 0xf2 0x0f 0x7e - invalid */
3615
3616
3617/** Opcode 0x0f 0x7f - movq Qq, Pq */
3618FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3619{
3620 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3621 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3622 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3623 {
3624 /*
3625 * Register, register.
3626 */
3627 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3628 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3630 IEM_MC_BEGIN(0, 1);
3631 IEM_MC_LOCAL(uint64_t, u64Tmp);
3632 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3633 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3634 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3635 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3636 IEM_MC_ADVANCE_RIP();
3637 IEM_MC_END();
3638 }
3639 else
3640 {
3641 /*
3642 * Register, memory.
3643 */
3644 IEM_MC_BEGIN(0, 2);
3645 IEM_MC_LOCAL(uint64_t, u64Tmp);
3646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3647
3648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3650 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3651 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3652
3653 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3654 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3655
3656 IEM_MC_ADVANCE_RIP();
3657 IEM_MC_END();
3658 }
3659 return VINF_SUCCESS;
3660}
3661
3662/** Opcode 0x66 0x0f 0x7f - vmovdqa Wx,Vx */
3663FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3664{
3665 IEMOP_MNEMONIC(vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx");
3666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3667 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3668 {
3669 /*
3670 * Register, register.
3671 */
3672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3673 IEM_MC_BEGIN(0, 0);
3674 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3675 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3676 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3677 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3678 IEM_MC_ADVANCE_RIP();
3679 IEM_MC_END();
3680 }
3681 else
3682 {
3683 /*
3684 * Register, memory.
3685 */
3686 IEM_MC_BEGIN(0, 2);
3687 IEM_MC_LOCAL(uint128_t, u128Tmp);
3688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3689
3690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3692 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3693 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3694
3695 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3696 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3697
3698 IEM_MC_ADVANCE_RIP();
3699 IEM_MC_END();
3700 }
3701 return VINF_SUCCESS;
3702}
3703
3704/** Opcode 0xf3 0x0f 0x7f - vmovdqu Wx,Vx */
3705FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
3706{
3707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3708 IEMOP_MNEMONIC(vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx");
3709 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3710 {
3711 /*
3712 * Register, register.
3713 */
3714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3715 IEM_MC_BEGIN(0, 0);
3716 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3717 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3718 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3719 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3720 IEM_MC_ADVANCE_RIP();
3721 IEM_MC_END();
3722 }
3723 else
3724 {
3725 /*
3726 * Register, memory.
3727 */
3728 IEM_MC_BEGIN(0, 2);
3729 IEM_MC_LOCAL(uint128_t, u128Tmp);
3730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3731
3732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3734 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3735 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3736
3737 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3738 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3739
3740 IEM_MC_ADVANCE_RIP();
3741 IEM_MC_END();
3742 }
3743 return VINF_SUCCESS;
3744}
3745
3746/* Opcode 0xf2 0x0f 0x7f - invalid */
3747
3748
3749
3750/** Opcode 0x0f 0x80. */
3751FNIEMOP_DEF(iemOp_jo_Jv)
3752{
3753 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3754 IEMOP_HLP_MIN_386();
3755 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3756 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3757 {
3758 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3760
3761 IEM_MC_BEGIN(0, 0);
3762 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3763 IEM_MC_REL_JMP_S16(i16Imm);
3764 } IEM_MC_ELSE() {
3765 IEM_MC_ADVANCE_RIP();
3766 } IEM_MC_ENDIF();
3767 IEM_MC_END();
3768 }
3769 else
3770 {
3771 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3773
3774 IEM_MC_BEGIN(0, 0);
3775 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3776 IEM_MC_REL_JMP_S32(i32Imm);
3777 } IEM_MC_ELSE() {
3778 IEM_MC_ADVANCE_RIP();
3779 } IEM_MC_ENDIF();
3780 IEM_MC_END();
3781 }
3782 return VINF_SUCCESS;
3783}
3784
3785
3786/** Opcode 0x0f 0x81. */
3787FNIEMOP_DEF(iemOp_jno_Jv)
3788{
3789 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3790 IEMOP_HLP_MIN_386();
3791 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3792 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3793 {
3794 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3796
3797 IEM_MC_BEGIN(0, 0);
3798 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3799 IEM_MC_ADVANCE_RIP();
3800 } IEM_MC_ELSE() {
3801 IEM_MC_REL_JMP_S16(i16Imm);
3802 } IEM_MC_ENDIF();
3803 IEM_MC_END();
3804 }
3805 else
3806 {
3807 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3809
3810 IEM_MC_BEGIN(0, 0);
3811 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3812 IEM_MC_ADVANCE_RIP();
3813 } IEM_MC_ELSE() {
3814 IEM_MC_REL_JMP_S32(i32Imm);
3815 } IEM_MC_ENDIF();
3816 IEM_MC_END();
3817 }
3818 return VINF_SUCCESS;
3819}
3820
3821
3822/** Opcode 0x0f 0x82. */
3823FNIEMOP_DEF(iemOp_jc_Jv)
3824{
3825 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3826 IEMOP_HLP_MIN_386();
3827 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3828 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3829 {
3830 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3832
3833 IEM_MC_BEGIN(0, 0);
3834 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3835 IEM_MC_REL_JMP_S16(i16Imm);
3836 } IEM_MC_ELSE() {
3837 IEM_MC_ADVANCE_RIP();
3838 } IEM_MC_ENDIF();
3839 IEM_MC_END();
3840 }
3841 else
3842 {
3843 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3845
3846 IEM_MC_BEGIN(0, 0);
3847 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3848 IEM_MC_REL_JMP_S32(i32Imm);
3849 } IEM_MC_ELSE() {
3850 IEM_MC_ADVANCE_RIP();
3851 } IEM_MC_ENDIF();
3852 IEM_MC_END();
3853 }
3854 return VINF_SUCCESS;
3855}
3856
3857
3858/** Opcode 0x0f 0x83. */
3859FNIEMOP_DEF(iemOp_jnc_Jv)
3860{
3861 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3862 IEMOP_HLP_MIN_386();
3863 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3864 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3865 {
3866 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3868
3869 IEM_MC_BEGIN(0, 0);
3870 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3871 IEM_MC_ADVANCE_RIP();
3872 } IEM_MC_ELSE() {
3873 IEM_MC_REL_JMP_S16(i16Imm);
3874 } IEM_MC_ENDIF();
3875 IEM_MC_END();
3876 }
3877 else
3878 {
3879 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3881
3882 IEM_MC_BEGIN(0, 0);
3883 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3884 IEM_MC_ADVANCE_RIP();
3885 } IEM_MC_ELSE() {
3886 IEM_MC_REL_JMP_S32(i32Imm);
3887 } IEM_MC_ENDIF();
3888 IEM_MC_END();
3889 }
3890 return VINF_SUCCESS;
3891}
3892
3893
3894/** Opcode 0x0f 0x84. */
3895FNIEMOP_DEF(iemOp_je_Jv)
3896{
3897 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
3898 IEMOP_HLP_MIN_386();
3899 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3900 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3901 {
3902 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3904
3905 IEM_MC_BEGIN(0, 0);
3906 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3907 IEM_MC_REL_JMP_S16(i16Imm);
3908 } IEM_MC_ELSE() {
3909 IEM_MC_ADVANCE_RIP();
3910 } IEM_MC_ENDIF();
3911 IEM_MC_END();
3912 }
3913 else
3914 {
3915 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3917
3918 IEM_MC_BEGIN(0, 0);
3919 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3920 IEM_MC_REL_JMP_S32(i32Imm);
3921 } IEM_MC_ELSE() {
3922 IEM_MC_ADVANCE_RIP();
3923 } IEM_MC_ENDIF();
3924 IEM_MC_END();
3925 }
3926 return VINF_SUCCESS;
3927}
3928
3929
3930/** Opcode 0x0f 0x85. */
3931FNIEMOP_DEF(iemOp_jne_Jv)
3932{
3933 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
3934 IEMOP_HLP_MIN_386();
3935 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3936 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3937 {
3938 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3940
3941 IEM_MC_BEGIN(0, 0);
3942 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3943 IEM_MC_ADVANCE_RIP();
3944 } IEM_MC_ELSE() {
3945 IEM_MC_REL_JMP_S16(i16Imm);
3946 } IEM_MC_ENDIF();
3947 IEM_MC_END();
3948 }
3949 else
3950 {
3951 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3953
3954 IEM_MC_BEGIN(0, 0);
3955 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3956 IEM_MC_ADVANCE_RIP();
3957 } IEM_MC_ELSE() {
3958 IEM_MC_REL_JMP_S32(i32Imm);
3959 } IEM_MC_ENDIF();
3960 IEM_MC_END();
3961 }
3962 return VINF_SUCCESS;
3963}
3964
3965
3966/** Opcode 0x0f 0x86. */
3967FNIEMOP_DEF(iemOp_jbe_Jv)
3968{
3969 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
3970 IEMOP_HLP_MIN_386();
3971 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3972 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3973 {
3974 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3976
3977 IEM_MC_BEGIN(0, 0);
3978 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3979 IEM_MC_REL_JMP_S16(i16Imm);
3980 } IEM_MC_ELSE() {
3981 IEM_MC_ADVANCE_RIP();
3982 } IEM_MC_ENDIF();
3983 IEM_MC_END();
3984 }
3985 else
3986 {
3987 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3989
3990 IEM_MC_BEGIN(0, 0);
3991 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3992 IEM_MC_REL_JMP_S32(i32Imm);
3993 } IEM_MC_ELSE() {
3994 IEM_MC_ADVANCE_RIP();
3995 } IEM_MC_ENDIF();
3996 IEM_MC_END();
3997 }
3998 return VINF_SUCCESS;
3999}
4000
4001
4002/** Opcode 0x0f 0x87. */
4003FNIEMOP_DEF(iemOp_jnbe_Jv)
4004{
4005 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4006 IEMOP_HLP_MIN_386();
4007 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4008 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4009 {
4010 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4012
4013 IEM_MC_BEGIN(0, 0);
4014 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4015 IEM_MC_ADVANCE_RIP();
4016 } IEM_MC_ELSE() {
4017 IEM_MC_REL_JMP_S16(i16Imm);
4018 } IEM_MC_ENDIF();
4019 IEM_MC_END();
4020 }
4021 else
4022 {
4023 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4025
4026 IEM_MC_BEGIN(0, 0);
4027 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4028 IEM_MC_ADVANCE_RIP();
4029 } IEM_MC_ELSE() {
4030 IEM_MC_REL_JMP_S32(i32Imm);
4031 } IEM_MC_ENDIF();
4032 IEM_MC_END();
4033 }
4034 return VINF_SUCCESS;
4035}
4036
4037
4038/** Opcode 0x0f 0x88. */
4039FNIEMOP_DEF(iemOp_js_Jv)
4040{
4041 IEMOP_MNEMONIC(js_Jv, "js Jv");
4042 IEMOP_HLP_MIN_386();
4043 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4044 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4045 {
4046 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4048
4049 IEM_MC_BEGIN(0, 0);
4050 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4051 IEM_MC_REL_JMP_S16(i16Imm);
4052 } IEM_MC_ELSE() {
4053 IEM_MC_ADVANCE_RIP();
4054 } IEM_MC_ENDIF();
4055 IEM_MC_END();
4056 }
4057 else
4058 {
4059 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4061
4062 IEM_MC_BEGIN(0, 0);
4063 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4064 IEM_MC_REL_JMP_S32(i32Imm);
4065 } IEM_MC_ELSE() {
4066 IEM_MC_ADVANCE_RIP();
4067 } IEM_MC_ENDIF();
4068 IEM_MC_END();
4069 }
4070 return VINF_SUCCESS;
4071}
4072
4073
4074/** Opcode 0x0f 0x89. */
4075FNIEMOP_DEF(iemOp_jns_Jv)
4076{
4077 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4078 IEMOP_HLP_MIN_386();
4079 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4080 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4081 {
4082 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4084
4085 IEM_MC_BEGIN(0, 0);
4086 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4087 IEM_MC_ADVANCE_RIP();
4088 } IEM_MC_ELSE() {
4089 IEM_MC_REL_JMP_S16(i16Imm);
4090 } IEM_MC_ENDIF();
4091 IEM_MC_END();
4092 }
4093 else
4094 {
4095 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4097
4098 IEM_MC_BEGIN(0, 0);
4099 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4100 IEM_MC_ADVANCE_RIP();
4101 } IEM_MC_ELSE() {
4102 IEM_MC_REL_JMP_S32(i32Imm);
4103 } IEM_MC_ENDIF();
4104 IEM_MC_END();
4105 }
4106 return VINF_SUCCESS;
4107}
4108
4109
4110/** Opcode 0x0f 0x8a. */
4111FNIEMOP_DEF(iemOp_jp_Jv)
4112{
4113 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4114 IEMOP_HLP_MIN_386();
4115 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4116 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4117 {
4118 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4120
4121 IEM_MC_BEGIN(0, 0);
4122 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4123 IEM_MC_REL_JMP_S16(i16Imm);
4124 } IEM_MC_ELSE() {
4125 IEM_MC_ADVANCE_RIP();
4126 } IEM_MC_ENDIF();
4127 IEM_MC_END();
4128 }
4129 else
4130 {
4131 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4133
4134 IEM_MC_BEGIN(0, 0);
4135 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4136 IEM_MC_REL_JMP_S32(i32Imm);
4137 } IEM_MC_ELSE() {
4138 IEM_MC_ADVANCE_RIP();
4139 } IEM_MC_ENDIF();
4140 IEM_MC_END();
4141 }
4142 return VINF_SUCCESS;
4143}
4144
4145
4146/** Opcode 0x0f 0x8b. */
4147FNIEMOP_DEF(iemOp_jnp_Jv)
4148{
4149 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4150 IEMOP_HLP_MIN_386();
4151 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4152 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4153 {
4154 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4156
4157 IEM_MC_BEGIN(0, 0);
4158 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4159 IEM_MC_ADVANCE_RIP();
4160 } IEM_MC_ELSE() {
4161 IEM_MC_REL_JMP_S16(i16Imm);
4162 } IEM_MC_ENDIF();
4163 IEM_MC_END();
4164 }
4165 else
4166 {
4167 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4169
4170 IEM_MC_BEGIN(0, 0);
4171 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4172 IEM_MC_ADVANCE_RIP();
4173 } IEM_MC_ELSE() {
4174 IEM_MC_REL_JMP_S32(i32Imm);
4175 } IEM_MC_ENDIF();
4176 IEM_MC_END();
4177 }
4178 return VINF_SUCCESS;
4179}
4180
4181
4182/** Opcode 0x0f 0x8c. */
4183FNIEMOP_DEF(iemOp_jl_Jv)
4184{
4185 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4186 IEMOP_HLP_MIN_386();
4187 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4188 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4189 {
4190 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4192
4193 IEM_MC_BEGIN(0, 0);
4194 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4195 IEM_MC_REL_JMP_S16(i16Imm);
4196 } IEM_MC_ELSE() {
4197 IEM_MC_ADVANCE_RIP();
4198 } IEM_MC_ENDIF();
4199 IEM_MC_END();
4200 }
4201 else
4202 {
4203 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4205
4206 IEM_MC_BEGIN(0, 0);
4207 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4208 IEM_MC_REL_JMP_S32(i32Imm);
4209 } IEM_MC_ELSE() {
4210 IEM_MC_ADVANCE_RIP();
4211 } IEM_MC_ENDIF();
4212 IEM_MC_END();
4213 }
4214 return VINF_SUCCESS;
4215}
4216
4217
4218/** Opcode 0x0f 0x8d. */
4219FNIEMOP_DEF(iemOp_jnl_Jv)
4220{
4221 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4222 IEMOP_HLP_MIN_386();
4223 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4224 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4225 {
4226 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4228
4229 IEM_MC_BEGIN(0, 0);
4230 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4231 IEM_MC_ADVANCE_RIP();
4232 } IEM_MC_ELSE() {
4233 IEM_MC_REL_JMP_S16(i16Imm);
4234 } IEM_MC_ENDIF();
4235 IEM_MC_END();
4236 }
4237 else
4238 {
4239 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4241
4242 IEM_MC_BEGIN(0, 0);
4243 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4244 IEM_MC_ADVANCE_RIP();
4245 } IEM_MC_ELSE() {
4246 IEM_MC_REL_JMP_S32(i32Imm);
4247 } IEM_MC_ENDIF();
4248 IEM_MC_END();
4249 }
4250 return VINF_SUCCESS;
4251}
4252
4253
4254/** Opcode 0x0f 0x8e. */
4255FNIEMOP_DEF(iemOp_jle_Jv)
4256{
4257 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4258 IEMOP_HLP_MIN_386();
4259 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4260 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4261 {
4262 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4264
4265 IEM_MC_BEGIN(0, 0);
4266 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4267 IEM_MC_REL_JMP_S16(i16Imm);
4268 } IEM_MC_ELSE() {
4269 IEM_MC_ADVANCE_RIP();
4270 } IEM_MC_ENDIF();
4271 IEM_MC_END();
4272 }
4273 else
4274 {
4275 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4277
4278 IEM_MC_BEGIN(0, 0);
4279 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4280 IEM_MC_REL_JMP_S32(i32Imm);
4281 } IEM_MC_ELSE() {
4282 IEM_MC_ADVANCE_RIP();
4283 } IEM_MC_ENDIF();
4284 IEM_MC_END();
4285 }
4286 return VINF_SUCCESS;
4287}
4288
4289
4290/** Opcode 0x0f 0x8f. */
4291FNIEMOP_DEF(iemOp_jnle_Jv)
4292{
4293 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4294 IEMOP_HLP_MIN_386();
4295 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4296 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4297 {
4298 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4300
4301 IEM_MC_BEGIN(0, 0);
4302 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4303 IEM_MC_ADVANCE_RIP();
4304 } IEM_MC_ELSE() {
4305 IEM_MC_REL_JMP_S16(i16Imm);
4306 } IEM_MC_ENDIF();
4307 IEM_MC_END();
4308 }
4309 else
4310 {
4311 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4313
4314 IEM_MC_BEGIN(0, 0);
4315 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4316 IEM_MC_ADVANCE_RIP();
4317 } IEM_MC_ELSE() {
4318 IEM_MC_REL_JMP_S32(i32Imm);
4319 } IEM_MC_ENDIF();
4320 IEM_MC_END();
4321 }
4322 return VINF_SUCCESS;
4323}
4324
4325
4326/** Opcode 0x0f 0x90. */
4327FNIEMOP_DEF(iemOp_seto_Eb)
4328{
4329 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4330 IEMOP_HLP_MIN_386();
4331 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4332
4333 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4334 * any way. AMD says it's "unused", whatever that means. We're
4335 * ignoring for now. */
4336 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4337 {
4338 /* register target */
4339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4340 IEM_MC_BEGIN(0, 0);
4341 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4342 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4343 } IEM_MC_ELSE() {
4344 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4345 } IEM_MC_ENDIF();
4346 IEM_MC_ADVANCE_RIP();
4347 IEM_MC_END();
4348 }
4349 else
4350 {
4351 /* memory target */
4352 IEM_MC_BEGIN(0, 1);
4353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4356 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4357 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4358 } IEM_MC_ELSE() {
4359 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4360 } IEM_MC_ENDIF();
4361 IEM_MC_ADVANCE_RIP();
4362 IEM_MC_END();
4363 }
4364 return VINF_SUCCESS;
4365}
4366
4367
4368/** Opcode 0x0f 0x91. */
4369FNIEMOP_DEF(iemOp_setno_Eb)
4370{
4371 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4372 IEMOP_HLP_MIN_386();
4373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4374
4375 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4376 * any way. AMD says it's "unused", whatever that means. We're
4377 * ignoring for now. */
4378 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4379 {
4380 /* register target */
4381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4382 IEM_MC_BEGIN(0, 0);
4383 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4384 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4385 } IEM_MC_ELSE() {
4386 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4387 } IEM_MC_ENDIF();
4388 IEM_MC_ADVANCE_RIP();
4389 IEM_MC_END();
4390 }
4391 else
4392 {
4393 /* memory target */
4394 IEM_MC_BEGIN(0, 1);
4395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4398 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4399 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4400 } IEM_MC_ELSE() {
4401 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4402 } IEM_MC_ENDIF();
4403 IEM_MC_ADVANCE_RIP();
4404 IEM_MC_END();
4405 }
4406 return VINF_SUCCESS;
4407}
4408
4409
4410/** Opcode 0x0f 0x92. */
4411FNIEMOP_DEF(iemOp_setc_Eb)
4412{
4413 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4414 IEMOP_HLP_MIN_386();
4415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4416
4417 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4418 * any way. AMD says it's "unused", whatever that means. We're
4419 * ignoring for now. */
4420 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4421 {
4422 /* register target */
4423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4424 IEM_MC_BEGIN(0, 0);
4425 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4426 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4427 } IEM_MC_ELSE() {
4428 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4429 } IEM_MC_ENDIF();
4430 IEM_MC_ADVANCE_RIP();
4431 IEM_MC_END();
4432 }
4433 else
4434 {
4435 /* memory target */
4436 IEM_MC_BEGIN(0, 1);
4437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4440 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4441 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4442 } IEM_MC_ELSE() {
4443 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4444 } IEM_MC_ENDIF();
4445 IEM_MC_ADVANCE_RIP();
4446 IEM_MC_END();
4447 }
4448 return VINF_SUCCESS;
4449}
4450
4451
4452/** Opcode 0x0f 0x93. */
4453FNIEMOP_DEF(iemOp_setnc_Eb)
4454{
4455 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4456 IEMOP_HLP_MIN_386();
4457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4458
4459 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4460 * any way. AMD says it's "unused", whatever that means. We're
4461 * ignoring for now. */
4462 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4463 {
4464 /* register target */
4465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4466 IEM_MC_BEGIN(0, 0);
4467 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4468 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4469 } IEM_MC_ELSE() {
4470 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4471 } IEM_MC_ENDIF();
4472 IEM_MC_ADVANCE_RIP();
4473 IEM_MC_END();
4474 }
4475 else
4476 {
4477 /* memory target */
4478 IEM_MC_BEGIN(0, 1);
4479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4482 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4483 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4484 } IEM_MC_ELSE() {
4485 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4486 } IEM_MC_ENDIF();
4487 IEM_MC_ADVANCE_RIP();
4488 IEM_MC_END();
4489 }
4490 return VINF_SUCCESS;
4491}
4492
4493
4494/** Opcode 0x0f 0x94. */
4495FNIEMOP_DEF(iemOp_sete_Eb)
4496{
4497 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4498 IEMOP_HLP_MIN_386();
4499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4500
4501 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4502 * any way. AMD says it's "unused", whatever that means. We're
4503 * ignoring for now. */
4504 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4505 {
4506 /* register target */
4507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4508 IEM_MC_BEGIN(0, 0);
4509 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4510 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4511 } IEM_MC_ELSE() {
4512 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4513 } IEM_MC_ENDIF();
4514 IEM_MC_ADVANCE_RIP();
4515 IEM_MC_END();
4516 }
4517 else
4518 {
4519 /* memory target */
4520 IEM_MC_BEGIN(0, 1);
4521 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4524 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4525 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4526 } IEM_MC_ELSE() {
4527 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4528 } IEM_MC_ENDIF();
4529 IEM_MC_ADVANCE_RIP();
4530 IEM_MC_END();
4531 }
4532 return VINF_SUCCESS;
4533}
4534
4535
4536/** Opcode 0x0f 0x95. */
4537FNIEMOP_DEF(iemOp_setne_Eb)
4538{
4539 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4540 IEMOP_HLP_MIN_386();
4541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4542
4543 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4544 * any way. AMD says it's "unused", whatever that means. We're
4545 * ignoring for now. */
4546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4547 {
4548 /* register target */
4549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4550 IEM_MC_BEGIN(0, 0);
4551 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4552 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4553 } IEM_MC_ELSE() {
4554 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4555 } IEM_MC_ENDIF();
4556 IEM_MC_ADVANCE_RIP();
4557 IEM_MC_END();
4558 }
4559 else
4560 {
4561 /* memory target */
4562 IEM_MC_BEGIN(0, 1);
4563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4566 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4567 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4568 } IEM_MC_ELSE() {
4569 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4570 } IEM_MC_ENDIF();
4571 IEM_MC_ADVANCE_RIP();
4572 IEM_MC_END();
4573 }
4574 return VINF_SUCCESS;
4575}
4576
4577
4578/** Opcode 0x0f 0x96. */
4579FNIEMOP_DEF(iemOp_setbe_Eb)
4580{
4581 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4582 IEMOP_HLP_MIN_386();
4583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4584
4585 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4586 * any way. AMD says it's "unused", whatever that means. We're
4587 * ignoring for now. */
4588 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4589 {
4590 /* register target */
4591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4592 IEM_MC_BEGIN(0, 0);
4593 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4594 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4595 } IEM_MC_ELSE() {
4596 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4597 } IEM_MC_ENDIF();
4598 IEM_MC_ADVANCE_RIP();
4599 IEM_MC_END();
4600 }
4601 else
4602 {
4603 /* memory target */
4604 IEM_MC_BEGIN(0, 1);
4605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4608 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4609 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4610 } IEM_MC_ELSE() {
4611 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4612 } IEM_MC_ENDIF();
4613 IEM_MC_ADVANCE_RIP();
4614 IEM_MC_END();
4615 }
4616 return VINF_SUCCESS;
4617}
4618
4619
4620/** Opcode 0x0f 0x97. */
4621FNIEMOP_DEF(iemOp_setnbe_Eb)
4622{
4623 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4624 IEMOP_HLP_MIN_386();
4625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4626
4627 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4628 * any way. AMD says it's "unused", whatever that means. We're
4629 * ignoring for now. */
4630 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4631 {
4632 /* register target */
4633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4634 IEM_MC_BEGIN(0, 0);
4635 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4636 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4637 } IEM_MC_ELSE() {
4638 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4639 } IEM_MC_ENDIF();
4640 IEM_MC_ADVANCE_RIP();
4641 IEM_MC_END();
4642 }
4643 else
4644 {
4645 /* memory target */
4646 IEM_MC_BEGIN(0, 1);
4647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4650 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4651 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4652 } IEM_MC_ELSE() {
4653 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4654 } IEM_MC_ENDIF();
4655 IEM_MC_ADVANCE_RIP();
4656 IEM_MC_END();
4657 }
4658 return VINF_SUCCESS;
4659}
4660
4661
4662/** Opcode 0x0f 0x98. */
4663FNIEMOP_DEF(iemOp_sets_Eb)
4664{
4665 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4666 IEMOP_HLP_MIN_386();
4667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4668
4669 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4670 * any way. AMD says it's "unused", whatever that means. We're
4671 * ignoring for now. */
4672 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4673 {
4674 /* register target */
4675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4676 IEM_MC_BEGIN(0, 0);
4677 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4678 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4679 } IEM_MC_ELSE() {
4680 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4681 } IEM_MC_ENDIF();
4682 IEM_MC_ADVANCE_RIP();
4683 IEM_MC_END();
4684 }
4685 else
4686 {
4687 /* memory target */
4688 IEM_MC_BEGIN(0, 1);
4689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4692 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4693 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4694 } IEM_MC_ELSE() {
4695 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4696 } IEM_MC_ENDIF();
4697 IEM_MC_ADVANCE_RIP();
4698 IEM_MC_END();
4699 }
4700 return VINF_SUCCESS;
4701}
4702
4703
4704/** Opcode 0x0f 0x99. */
4705FNIEMOP_DEF(iemOp_setns_Eb)
4706{
4707 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4708 IEMOP_HLP_MIN_386();
4709 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4710
4711 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4712 * any way. AMD says it's "unused", whatever that means. We're
4713 * ignoring for now. */
4714 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4715 {
4716 /* register target */
4717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4718 IEM_MC_BEGIN(0, 0);
4719 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4720 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4721 } IEM_MC_ELSE() {
4722 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4723 } IEM_MC_ENDIF();
4724 IEM_MC_ADVANCE_RIP();
4725 IEM_MC_END();
4726 }
4727 else
4728 {
4729 /* memory target */
4730 IEM_MC_BEGIN(0, 1);
4731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4734 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4735 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4736 } IEM_MC_ELSE() {
4737 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4738 } IEM_MC_ENDIF();
4739 IEM_MC_ADVANCE_RIP();
4740 IEM_MC_END();
4741 }
4742 return VINF_SUCCESS;
4743}
4744
4745
4746/** Opcode 0x0f 0x9a. */
4747FNIEMOP_DEF(iemOp_setp_Eb)
4748{
4749 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4750 IEMOP_HLP_MIN_386();
4751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4752
4753 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4754 * any way. AMD says it's "unused", whatever that means. We're
4755 * ignoring for now. */
4756 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4757 {
4758 /* register target */
4759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4760 IEM_MC_BEGIN(0, 0);
4761 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4762 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4763 } IEM_MC_ELSE() {
4764 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4765 } IEM_MC_ENDIF();
4766 IEM_MC_ADVANCE_RIP();
4767 IEM_MC_END();
4768 }
4769 else
4770 {
4771 /* memory target */
4772 IEM_MC_BEGIN(0, 1);
4773 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4776 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4777 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4778 } IEM_MC_ELSE() {
4779 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4780 } IEM_MC_ENDIF();
4781 IEM_MC_ADVANCE_RIP();
4782 IEM_MC_END();
4783 }
4784 return VINF_SUCCESS;
4785}
4786
4787
4788/** Opcode 0x0f 0x9b. */
4789FNIEMOP_DEF(iemOp_setnp_Eb)
4790{
4791 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4792 IEMOP_HLP_MIN_386();
4793 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4794
4795 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4796 * any way. AMD says it's "unused", whatever that means. We're
4797 * ignoring for now. */
4798 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4799 {
4800 /* register target */
4801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4802 IEM_MC_BEGIN(0, 0);
4803 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4804 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4805 } IEM_MC_ELSE() {
4806 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4807 } IEM_MC_ENDIF();
4808 IEM_MC_ADVANCE_RIP();
4809 IEM_MC_END();
4810 }
4811 else
4812 {
4813 /* memory target */
4814 IEM_MC_BEGIN(0, 1);
4815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4818 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4819 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4820 } IEM_MC_ELSE() {
4821 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4822 } IEM_MC_ENDIF();
4823 IEM_MC_ADVANCE_RIP();
4824 IEM_MC_END();
4825 }
4826 return VINF_SUCCESS;
4827}
4828
4829
4830/** Opcode 0x0f 0x9c. */
4831FNIEMOP_DEF(iemOp_setl_Eb)
4832{
4833 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4834 IEMOP_HLP_MIN_386();
4835 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4836
4837 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4838 * any way. AMD says it's "unused", whatever that means. We're
4839 * ignoring for now. */
4840 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4841 {
4842 /* register target */
4843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4844 IEM_MC_BEGIN(0, 0);
4845 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4846 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4847 } IEM_MC_ELSE() {
4848 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4849 } IEM_MC_ENDIF();
4850 IEM_MC_ADVANCE_RIP();
4851 IEM_MC_END();
4852 }
4853 else
4854 {
4855 /* memory target */
4856 IEM_MC_BEGIN(0, 1);
4857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4860 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4861 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4862 } IEM_MC_ELSE() {
4863 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4864 } IEM_MC_ENDIF();
4865 IEM_MC_ADVANCE_RIP();
4866 IEM_MC_END();
4867 }
4868 return VINF_SUCCESS;
4869}
4870
4871
4872/** Opcode 0x0f 0x9d. */
4873FNIEMOP_DEF(iemOp_setnl_Eb)
4874{
4875 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4876 IEMOP_HLP_MIN_386();
4877 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4878
4879 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4880 * any way. AMD says it's "unused", whatever that means. We're
4881 * ignoring for now. */
4882 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4883 {
4884 /* register target */
4885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4886 IEM_MC_BEGIN(0, 0);
4887 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4888 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4889 } IEM_MC_ELSE() {
4890 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4891 } IEM_MC_ENDIF();
4892 IEM_MC_ADVANCE_RIP();
4893 IEM_MC_END();
4894 }
4895 else
4896 {
4897 /* memory target */
4898 IEM_MC_BEGIN(0, 1);
4899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4902 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4903 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4904 } IEM_MC_ELSE() {
4905 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4906 } IEM_MC_ENDIF();
4907 IEM_MC_ADVANCE_RIP();
4908 IEM_MC_END();
4909 }
4910 return VINF_SUCCESS;
4911}
4912
4913
4914/** Opcode 0x0f 0x9e. */
4915FNIEMOP_DEF(iemOp_setle_Eb)
4916{
4917 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
4918 IEMOP_HLP_MIN_386();
4919 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4920
4921 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4922 * any way. AMD says it's "unused", whatever that means. We're
4923 * ignoring for now. */
4924 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4925 {
4926 /* register target */
4927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4928 IEM_MC_BEGIN(0, 0);
4929 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4930 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4931 } IEM_MC_ELSE() {
4932 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4933 } IEM_MC_ENDIF();
4934 IEM_MC_ADVANCE_RIP();
4935 IEM_MC_END();
4936 }
4937 else
4938 {
4939 /* memory target */
4940 IEM_MC_BEGIN(0, 1);
4941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4944 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4945 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4946 } IEM_MC_ELSE() {
4947 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4948 } IEM_MC_ENDIF();
4949 IEM_MC_ADVANCE_RIP();
4950 IEM_MC_END();
4951 }
4952 return VINF_SUCCESS;
4953}
4954
4955
4956/** Opcode 0x0f 0x9f. */
4957FNIEMOP_DEF(iemOp_setnle_Eb)
4958{
4959 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
4960 IEMOP_HLP_MIN_386();
4961 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4962
4963 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4964 * any way. AMD says it's "unused", whatever that means. We're
4965 * ignoring for now. */
4966 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4967 {
4968 /* register target */
4969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4970 IEM_MC_BEGIN(0, 0);
4971 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4972 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4973 } IEM_MC_ELSE() {
4974 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4975 } IEM_MC_ENDIF();
4976 IEM_MC_ADVANCE_RIP();
4977 IEM_MC_END();
4978 }
4979 else
4980 {
4981 /* memory target */
4982 IEM_MC_BEGIN(0, 1);
4983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4986 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4987 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4988 } IEM_MC_ELSE() {
4989 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4990 } IEM_MC_ENDIF();
4991 IEM_MC_ADVANCE_RIP();
4992 IEM_MC_END();
4993 }
4994 return VINF_SUCCESS;
4995}
4996
4997
4998/**
4999 * Common 'push segment-register' helper.
5000 */
5001FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5002{
5003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5004 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5005 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5006
5007 switch (pVCpu->iem.s.enmEffOpSize)
5008 {
5009 case IEMMODE_16BIT:
5010 IEM_MC_BEGIN(0, 1);
5011 IEM_MC_LOCAL(uint16_t, u16Value);
5012 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5013 IEM_MC_PUSH_U16(u16Value);
5014 IEM_MC_ADVANCE_RIP();
5015 IEM_MC_END();
5016 break;
5017
5018 case IEMMODE_32BIT:
5019 IEM_MC_BEGIN(0, 1);
5020 IEM_MC_LOCAL(uint32_t, u32Value);
5021 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5022 IEM_MC_PUSH_U32_SREG(u32Value);
5023 IEM_MC_ADVANCE_RIP();
5024 IEM_MC_END();
5025 break;
5026
5027 case IEMMODE_64BIT:
5028 IEM_MC_BEGIN(0, 1);
5029 IEM_MC_LOCAL(uint64_t, u64Value);
5030 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5031 IEM_MC_PUSH_U64(u64Value);
5032 IEM_MC_ADVANCE_RIP();
5033 IEM_MC_END();
5034 break;
5035 }
5036
5037 return VINF_SUCCESS;
5038}
5039
5040
5041/** Opcode 0x0f 0xa0. */
5042FNIEMOP_DEF(iemOp_push_fs)
5043{
5044 IEMOP_MNEMONIC(push_fs, "push fs");
5045 IEMOP_HLP_MIN_386();
5046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5047 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5048}
5049
5050
5051/** Opcode 0x0f 0xa1. */
5052FNIEMOP_DEF(iemOp_pop_fs)
5053{
5054 IEMOP_MNEMONIC(pop_fs, "pop fs");
5055 IEMOP_HLP_MIN_386();
5056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5057 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5058}
5059
5060
5061/** Opcode 0x0f 0xa2. */
5062FNIEMOP_DEF(iemOp_cpuid)
5063{
5064 IEMOP_MNEMONIC(cpuid, "cpuid");
5065 IEMOP_HLP_MIN_486(); /* not all 486es. */
5066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5067 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5068}
5069
5070
5071/**
5072 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5073 * iemOp_bts_Ev_Gv.
5074 */
5075FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5076{
5077 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5078 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5079
5080 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5081 {
5082 /* register destination. */
5083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5084 switch (pVCpu->iem.s.enmEffOpSize)
5085 {
5086 case IEMMODE_16BIT:
5087 IEM_MC_BEGIN(3, 0);
5088 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5089 IEM_MC_ARG(uint16_t, u16Src, 1);
5090 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5091
5092 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5093 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5094 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5095 IEM_MC_REF_EFLAGS(pEFlags);
5096 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5097
5098 IEM_MC_ADVANCE_RIP();
5099 IEM_MC_END();
5100 return VINF_SUCCESS;
5101
5102 case IEMMODE_32BIT:
5103 IEM_MC_BEGIN(3, 0);
5104 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5105 IEM_MC_ARG(uint32_t, u32Src, 1);
5106 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5107
5108 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5109 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5110 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5111 IEM_MC_REF_EFLAGS(pEFlags);
5112 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5113
5114 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5115 IEM_MC_ADVANCE_RIP();
5116 IEM_MC_END();
5117 return VINF_SUCCESS;
5118
5119 case IEMMODE_64BIT:
5120 IEM_MC_BEGIN(3, 0);
5121 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5122 IEM_MC_ARG(uint64_t, u64Src, 1);
5123 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5124
5125 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5126 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5127 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5128 IEM_MC_REF_EFLAGS(pEFlags);
5129 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5130
5131 IEM_MC_ADVANCE_RIP();
5132 IEM_MC_END();
5133 return VINF_SUCCESS;
5134
5135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5136 }
5137 }
5138 else
5139 {
5140 /* memory destination. */
5141
5142 uint32_t fAccess;
5143 if (pImpl->pfnLockedU16)
5144 fAccess = IEM_ACCESS_DATA_RW;
5145 else /* BT */
5146 fAccess = IEM_ACCESS_DATA_R;
5147
5148 /** @todo test negative bit offsets! */
5149 switch (pVCpu->iem.s.enmEffOpSize)
5150 {
5151 case IEMMODE_16BIT:
5152 IEM_MC_BEGIN(3, 2);
5153 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5154 IEM_MC_ARG(uint16_t, u16Src, 1);
5155 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5157 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5158
5159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5160 if (pImpl->pfnLockedU16)
5161 IEMOP_HLP_DONE_DECODING();
5162 else
5163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5164 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5165 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5166 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5167 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5168 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5169 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5170 IEM_MC_FETCH_EFLAGS(EFlags);
5171
5172 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5173 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5174 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5175 else
5176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5177 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5178
5179 IEM_MC_COMMIT_EFLAGS(EFlags);
5180 IEM_MC_ADVANCE_RIP();
5181 IEM_MC_END();
5182 return VINF_SUCCESS;
5183
5184 case IEMMODE_32BIT:
5185 IEM_MC_BEGIN(3, 2);
5186 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5187 IEM_MC_ARG(uint32_t, u32Src, 1);
5188 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5190 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5191
5192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5193 if (pImpl->pfnLockedU16)
5194 IEMOP_HLP_DONE_DECODING();
5195 else
5196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5197 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5198 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5199 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5200 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5201 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5202 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5203 IEM_MC_FETCH_EFLAGS(EFlags);
5204
5205 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5206 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5207 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5208 else
5209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5210 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5211
5212 IEM_MC_COMMIT_EFLAGS(EFlags);
5213 IEM_MC_ADVANCE_RIP();
5214 IEM_MC_END();
5215 return VINF_SUCCESS;
5216
5217 case IEMMODE_64BIT:
5218 IEM_MC_BEGIN(3, 2);
5219 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5220 IEM_MC_ARG(uint64_t, u64Src, 1);
5221 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5223 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5224
5225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5226 if (pImpl->pfnLockedU16)
5227 IEMOP_HLP_DONE_DECODING();
5228 else
5229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5230 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5231 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5232 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5233 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5234 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5235 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5236 IEM_MC_FETCH_EFLAGS(EFlags);
5237
5238 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5239 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5240 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5241 else
5242 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5243 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5244
5245 IEM_MC_COMMIT_EFLAGS(EFlags);
5246 IEM_MC_ADVANCE_RIP();
5247 IEM_MC_END();
5248 return VINF_SUCCESS;
5249
5250 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5251 }
5252 }
5253}
5254
5255
5256/** Opcode 0x0f 0xa3. */
5257FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5258{
5259 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5260 IEMOP_HLP_MIN_386();
5261 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5262}
5263
5264
5265/**
5266 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5267 */
5268FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5269{
5270 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5271 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5272
5273 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5274 {
5275 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5277
5278 switch (pVCpu->iem.s.enmEffOpSize)
5279 {
5280 case IEMMODE_16BIT:
5281 IEM_MC_BEGIN(4, 0);
5282 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5283 IEM_MC_ARG(uint16_t, u16Src, 1);
5284 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5285 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5286
5287 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5288 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5289 IEM_MC_REF_EFLAGS(pEFlags);
5290 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5291
5292 IEM_MC_ADVANCE_RIP();
5293 IEM_MC_END();
5294 return VINF_SUCCESS;
5295
5296 case IEMMODE_32BIT:
5297 IEM_MC_BEGIN(4, 0);
5298 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5299 IEM_MC_ARG(uint32_t, u32Src, 1);
5300 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5301 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5302
5303 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5304 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5305 IEM_MC_REF_EFLAGS(pEFlags);
5306 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5307
5308 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5309 IEM_MC_ADVANCE_RIP();
5310 IEM_MC_END();
5311 return VINF_SUCCESS;
5312
5313 case IEMMODE_64BIT:
5314 IEM_MC_BEGIN(4, 0);
5315 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5316 IEM_MC_ARG(uint64_t, u64Src, 1);
5317 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5318 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5319
5320 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5321 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5322 IEM_MC_REF_EFLAGS(pEFlags);
5323 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5324
5325 IEM_MC_ADVANCE_RIP();
5326 IEM_MC_END();
5327 return VINF_SUCCESS;
5328
5329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5330 }
5331 }
5332 else
5333 {
5334 switch (pVCpu->iem.s.enmEffOpSize)
5335 {
5336 case IEMMODE_16BIT:
5337 IEM_MC_BEGIN(4, 2);
5338 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5339 IEM_MC_ARG(uint16_t, u16Src, 1);
5340 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5341 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5343
5344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5345 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5346 IEM_MC_ASSIGN(cShiftArg, cShift);
5347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5348 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5349 IEM_MC_FETCH_EFLAGS(EFlags);
5350 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5351 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5352
5353 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5354 IEM_MC_COMMIT_EFLAGS(EFlags);
5355 IEM_MC_ADVANCE_RIP();
5356 IEM_MC_END();
5357 return VINF_SUCCESS;
5358
5359 case IEMMODE_32BIT:
5360 IEM_MC_BEGIN(4, 2);
5361 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5362 IEM_MC_ARG(uint32_t, u32Src, 1);
5363 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5364 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5366
5367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5368 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5369 IEM_MC_ASSIGN(cShiftArg, cShift);
5370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5371 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5372 IEM_MC_FETCH_EFLAGS(EFlags);
5373 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5374 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5375
5376 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5377 IEM_MC_COMMIT_EFLAGS(EFlags);
5378 IEM_MC_ADVANCE_RIP();
5379 IEM_MC_END();
5380 return VINF_SUCCESS;
5381
5382 case IEMMODE_64BIT:
5383 IEM_MC_BEGIN(4, 2);
5384 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5385 IEM_MC_ARG(uint64_t, u64Src, 1);
5386 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5387 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5389
5390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5391 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5392 IEM_MC_ASSIGN(cShiftArg, cShift);
5393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5394 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5395 IEM_MC_FETCH_EFLAGS(EFlags);
5396 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5397 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5398
5399 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5400 IEM_MC_COMMIT_EFLAGS(EFlags);
5401 IEM_MC_ADVANCE_RIP();
5402 IEM_MC_END();
5403 return VINF_SUCCESS;
5404
5405 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5406 }
5407 }
5408}
5409
5410
5411/**
5412 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5413 */
5414FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5415{
5416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5417 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5418
5419 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5420 {
5421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5422
5423 switch (pVCpu->iem.s.enmEffOpSize)
5424 {
5425 case IEMMODE_16BIT:
5426 IEM_MC_BEGIN(4, 0);
5427 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5428 IEM_MC_ARG(uint16_t, u16Src, 1);
5429 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5430 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5431
5432 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5433 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5434 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5435 IEM_MC_REF_EFLAGS(pEFlags);
5436 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5437
5438 IEM_MC_ADVANCE_RIP();
5439 IEM_MC_END();
5440 return VINF_SUCCESS;
5441
5442 case IEMMODE_32BIT:
5443 IEM_MC_BEGIN(4, 0);
5444 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5445 IEM_MC_ARG(uint32_t, u32Src, 1);
5446 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5447 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5448
5449 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5450 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5451 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5452 IEM_MC_REF_EFLAGS(pEFlags);
5453 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5454
5455 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5456 IEM_MC_ADVANCE_RIP();
5457 IEM_MC_END();
5458 return VINF_SUCCESS;
5459
5460 case IEMMODE_64BIT:
5461 IEM_MC_BEGIN(4, 0);
5462 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5463 IEM_MC_ARG(uint64_t, u64Src, 1);
5464 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5465 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5466
5467 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5468 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5469 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5470 IEM_MC_REF_EFLAGS(pEFlags);
5471 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5472
5473 IEM_MC_ADVANCE_RIP();
5474 IEM_MC_END();
5475 return VINF_SUCCESS;
5476
5477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5478 }
5479 }
5480 else
5481 {
5482 switch (pVCpu->iem.s.enmEffOpSize)
5483 {
5484 case IEMMODE_16BIT:
5485 IEM_MC_BEGIN(4, 2);
5486 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5487 IEM_MC_ARG(uint16_t, u16Src, 1);
5488 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5489 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5491
5492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5494 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5495 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5496 IEM_MC_FETCH_EFLAGS(EFlags);
5497 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5498 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5499
5500 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5501 IEM_MC_COMMIT_EFLAGS(EFlags);
5502 IEM_MC_ADVANCE_RIP();
5503 IEM_MC_END();
5504 return VINF_SUCCESS;
5505
5506 case IEMMODE_32BIT:
5507 IEM_MC_BEGIN(4, 2);
5508 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5509 IEM_MC_ARG(uint32_t, u32Src, 1);
5510 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5511 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5513
5514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5516 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5517 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5518 IEM_MC_FETCH_EFLAGS(EFlags);
5519 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5520 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5521
5522 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5523 IEM_MC_COMMIT_EFLAGS(EFlags);
5524 IEM_MC_ADVANCE_RIP();
5525 IEM_MC_END();
5526 return VINF_SUCCESS;
5527
5528 case IEMMODE_64BIT:
5529 IEM_MC_BEGIN(4, 2);
5530 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5531 IEM_MC_ARG(uint64_t, u64Src, 1);
5532 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5533 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5535
5536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5538 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5539 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5540 IEM_MC_FETCH_EFLAGS(EFlags);
5541 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5542 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5543
5544 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5545 IEM_MC_COMMIT_EFLAGS(EFlags);
5546 IEM_MC_ADVANCE_RIP();
5547 IEM_MC_END();
5548 return VINF_SUCCESS;
5549
5550 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5551 }
5552 }
5553}
5554
5555
5556
5557/** Opcode 0x0f 0xa4. */
5558FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5559{
5560 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5561 IEMOP_HLP_MIN_386();
5562 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5563}
5564
5565
5566/** Opcode 0x0f 0xa5. */
5567FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5568{
5569 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5570 IEMOP_HLP_MIN_386();
5571 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5572}
5573
5574
5575/** Opcode 0x0f 0xa8. */
5576FNIEMOP_DEF(iemOp_push_gs)
5577{
5578 IEMOP_MNEMONIC(push_gs, "push gs");
5579 IEMOP_HLP_MIN_386();
5580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5581 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5582}
5583
5584
5585/** Opcode 0x0f 0xa9. */
5586FNIEMOP_DEF(iemOp_pop_gs)
5587{
5588 IEMOP_MNEMONIC(pop_gs, "pop gs");
5589 IEMOP_HLP_MIN_386();
5590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5591 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5592}
5593
5594
5595/** Opcode 0x0f 0xaa. */
5596FNIEMOP_STUB(iemOp_rsm);
5597//IEMOP_HLP_MIN_386();
5598
5599
5600/** Opcode 0x0f 0xab. */
5601FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5602{
5603 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5604 IEMOP_HLP_MIN_386();
5605 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5606}
5607
5608
5609/** Opcode 0x0f 0xac. */
5610FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5611{
5612 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5613 IEMOP_HLP_MIN_386();
5614 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5615}
5616
5617
5618/** Opcode 0x0f 0xad. */
5619FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5620{
5621 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5622 IEMOP_HLP_MIN_386();
5623 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5624}
5625
5626
5627/** Opcode 0x0f 0xae mem/0. */
5628FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5629{
5630 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5631 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5632 return IEMOP_RAISE_INVALID_OPCODE();
5633
5634 IEM_MC_BEGIN(3, 1);
5635 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5636 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5637 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5640 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5641 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5642 IEM_MC_END();
5643 return VINF_SUCCESS;
5644}
5645
5646
5647/** Opcode 0x0f 0xae mem/1. */
5648FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5649{
5650 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5651 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5652 return IEMOP_RAISE_INVALID_OPCODE();
5653
5654 IEM_MC_BEGIN(3, 1);
5655 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5656 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5657 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5660 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5661 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5662 IEM_MC_END();
5663 return VINF_SUCCESS;
5664}
5665
5666
5667/** Opcode 0x0f 0xae mem/2. */
5668FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5669
5670/** Opcode 0x0f 0xae mem/3. */
5671FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5672
5673/** Opcode 0x0f 0xae mem/4. */
5674FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5675
5676/** Opcode 0x0f 0xae mem/5. */
5677FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5678
5679/** Opcode 0x0f 0xae mem/6. */
5680FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5681
5682/** Opcode 0x0f 0xae mem/7. */
5683FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5684
5685
5686/** Opcode 0x0f 0xae 11b/5. */
5687FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5688{
5689 RT_NOREF_PV(bRm);
5690 IEMOP_MNEMONIC(lfence, "lfence");
5691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5692 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5693 return IEMOP_RAISE_INVALID_OPCODE();
5694
5695 IEM_MC_BEGIN(0, 0);
5696 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5697 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5698 else
5699 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5700 IEM_MC_ADVANCE_RIP();
5701 IEM_MC_END();
5702 return VINF_SUCCESS;
5703}
5704
5705
5706/** Opcode 0x0f 0xae 11b/6. */
5707FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5708{
5709 RT_NOREF_PV(bRm);
5710 IEMOP_MNEMONIC(mfence, "mfence");
5711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5712 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5713 return IEMOP_RAISE_INVALID_OPCODE();
5714
5715 IEM_MC_BEGIN(0, 0);
5716 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5717 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5718 else
5719 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5720 IEM_MC_ADVANCE_RIP();
5721 IEM_MC_END();
5722 return VINF_SUCCESS;
5723}
5724
5725
5726/** Opcode 0x0f 0xae 11b/7. */
5727FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5728{
5729 RT_NOREF_PV(bRm);
5730 IEMOP_MNEMONIC(sfence, "sfence");
5731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5732 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5733 return IEMOP_RAISE_INVALID_OPCODE();
5734
5735 IEM_MC_BEGIN(0, 0);
5736 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5737 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5738 else
5739 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5740 IEM_MC_ADVANCE_RIP();
5741 IEM_MC_END();
5742 return VINF_SUCCESS;
5743}
5744
5745
5746/** Opcode 0xf3 0x0f 0xae 11b/0. */
5747FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5748
5749/** Opcode 0xf3 0x0f 0xae 11b/1. */
5750FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5751
5752/** Opcode 0xf3 0x0f 0xae 11b/2. */
5753FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5754
5755/** Opcode 0xf3 0x0f 0xae 11b/3. */
5756FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5757
5758
5759/** Opcode 0x0f 0xae. */
5760FNIEMOP_DEF(iemOp_Grp15)
5761{
5762/** @todo continue here tomorrow! (see bs3-cpu-decoding-1.c32 r113507). */
5763 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5765 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5766 {
5767 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5768 {
5769 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5770 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5771 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5772 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5773 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5774 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5775 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5776 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5778 }
5779 }
5780 else
5781 {
5782 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5783 {
5784 case 0:
5785 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5786 {
5787 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5788 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5789 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5790 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5791 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5792 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5793 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5794 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5796 }
5797 break;
5798
5799 case IEM_OP_PRF_REPZ:
5800 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5801 {
5802 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5803 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5804 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5805 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5806 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5807 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5808 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5809 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5810 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5811 }
5812 break;
5813
5814 default:
5815 return IEMOP_RAISE_INVALID_OPCODE();
5816 }
5817 }
5818}
5819
5820
5821/** Opcode 0x0f 0xaf. */
5822FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5823{
5824 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5825 IEMOP_HLP_MIN_386();
5826 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5827 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5828}
5829
5830
5831/** Opcode 0x0f 0xb0. */
5832FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5833{
5834 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5835 IEMOP_HLP_MIN_486();
5836 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5837
5838 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5839 {
5840 IEMOP_HLP_DONE_DECODING();
5841 IEM_MC_BEGIN(4, 0);
5842 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5843 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5844 IEM_MC_ARG(uint8_t, u8Src, 2);
5845 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5846
5847 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5848 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5849 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5850 IEM_MC_REF_EFLAGS(pEFlags);
5851 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5852 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5853 else
5854 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5855
5856 IEM_MC_ADVANCE_RIP();
5857 IEM_MC_END();
5858 }
5859 else
5860 {
5861 IEM_MC_BEGIN(4, 3);
5862 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5863 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5864 IEM_MC_ARG(uint8_t, u8Src, 2);
5865 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5867 IEM_MC_LOCAL(uint8_t, u8Al);
5868
5869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5870 IEMOP_HLP_DONE_DECODING();
5871 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5872 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5873 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5874 IEM_MC_FETCH_EFLAGS(EFlags);
5875 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5876 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5877 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5878 else
5879 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5880
5881 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5882 IEM_MC_COMMIT_EFLAGS(EFlags);
5883 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5884 IEM_MC_ADVANCE_RIP();
5885 IEM_MC_END();
5886 }
5887 return VINF_SUCCESS;
5888}
5889
5890/** Opcode 0x0f 0xb1. */
5891FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5892{
5893 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
5894 IEMOP_HLP_MIN_486();
5895 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5896
5897 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5898 {
5899 IEMOP_HLP_DONE_DECODING();
5900 switch (pVCpu->iem.s.enmEffOpSize)
5901 {
5902 case IEMMODE_16BIT:
5903 IEM_MC_BEGIN(4, 0);
5904 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5905 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5906 IEM_MC_ARG(uint16_t, u16Src, 2);
5907 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5908
5909 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5910 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5911 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5912 IEM_MC_REF_EFLAGS(pEFlags);
5913 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5914 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5915 else
5916 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5917
5918 IEM_MC_ADVANCE_RIP();
5919 IEM_MC_END();
5920 return VINF_SUCCESS;
5921
5922 case IEMMODE_32BIT:
5923 IEM_MC_BEGIN(4, 0);
5924 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5925 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5926 IEM_MC_ARG(uint32_t, u32Src, 2);
5927 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5928
5929 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5930 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5931 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5932 IEM_MC_REF_EFLAGS(pEFlags);
5933 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5934 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5935 else
5936 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5937
5938 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5939 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5940 IEM_MC_ADVANCE_RIP();
5941 IEM_MC_END();
5942 return VINF_SUCCESS;
5943
5944 case IEMMODE_64BIT:
5945 IEM_MC_BEGIN(4, 0);
5946 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5947 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5948#ifdef RT_ARCH_X86
5949 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5950#else
5951 IEM_MC_ARG(uint64_t, u64Src, 2);
5952#endif
5953 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5954
5955 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5956 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5957 IEM_MC_REF_EFLAGS(pEFlags);
5958#ifdef RT_ARCH_X86
5959 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5960 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5961 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5962 else
5963 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5964#else
5965 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5966 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5967 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5968 else
5969 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5970#endif
5971
5972 IEM_MC_ADVANCE_RIP();
5973 IEM_MC_END();
5974 return VINF_SUCCESS;
5975
5976 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5977 }
5978 }
5979 else
5980 {
5981 switch (pVCpu->iem.s.enmEffOpSize)
5982 {
5983 case IEMMODE_16BIT:
5984 IEM_MC_BEGIN(4, 3);
5985 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5986 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5987 IEM_MC_ARG(uint16_t, u16Src, 2);
5988 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5990 IEM_MC_LOCAL(uint16_t, u16Ax);
5991
5992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5993 IEMOP_HLP_DONE_DECODING();
5994 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5995 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5996 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5997 IEM_MC_FETCH_EFLAGS(EFlags);
5998 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5999 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6000 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6001 else
6002 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6003
6004 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6005 IEM_MC_COMMIT_EFLAGS(EFlags);
6006 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6007 IEM_MC_ADVANCE_RIP();
6008 IEM_MC_END();
6009 return VINF_SUCCESS;
6010
6011 case IEMMODE_32BIT:
6012 IEM_MC_BEGIN(4, 3);
6013 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6014 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6015 IEM_MC_ARG(uint32_t, u32Src, 2);
6016 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6018 IEM_MC_LOCAL(uint32_t, u32Eax);
6019
6020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6021 IEMOP_HLP_DONE_DECODING();
6022 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6023 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6024 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6025 IEM_MC_FETCH_EFLAGS(EFlags);
6026 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6027 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6028 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6029 else
6030 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6031
6032 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6033 IEM_MC_COMMIT_EFLAGS(EFlags);
6034 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6035 IEM_MC_ADVANCE_RIP();
6036 IEM_MC_END();
6037 return VINF_SUCCESS;
6038
6039 case IEMMODE_64BIT:
6040 IEM_MC_BEGIN(4, 3);
6041 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6042 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6043#ifdef RT_ARCH_X86
6044 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6045#else
6046 IEM_MC_ARG(uint64_t, u64Src, 2);
6047#endif
6048 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6050 IEM_MC_LOCAL(uint64_t, u64Rax);
6051
6052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6053 IEMOP_HLP_DONE_DECODING();
6054 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6055 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6056 IEM_MC_FETCH_EFLAGS(EFlags);
6057 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6058#ifdef RT_ARCH_X86
6059 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6060 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6061 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6062 else
6063 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6064#else
6065 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6066 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6067 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6068 else
6069 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6070#endif
6071
6072 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6073 IEM_MC_COMMIT_EFLAGS(EFlags);
6074 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6075 IEM_MC_ADVANCE_RIP();
6076 IEM_MC_END();
6077 return VINF_SUCCESS;
6078
6079 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6080 }
6081 }
6082}
6083
6084
6085FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6086{
6087 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6088 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6089
6090 switch (pVCpu->iem.s.enmEffOpSize)
6091 {
6092 case IEMMODE_16BIT:
6093 IEM_MC_BEGIN(5, 1);
6094 IEM_MC_ARG(uint16_t, uSel, 0);
6095 IEM_MC_ARG(uint16_t, offSeg, 1);
6096 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6097 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6098 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6099 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6102 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6103 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6104 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6105 IEM_MC_END();
6106 return VINF_SUCCESS;
6107
6108 case IEMMODE_32BIT:
6109 IEM_MC_BEGIN(5, 1);
6110 IEM_MC_ARG(uint16_t, uSel, 0);
6111 IEM_MC_ARG(uint32_t, offSeg, 1);
6112 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6113 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6114 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6115 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6118 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6119 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6120 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6121 IEM_MC_END();
6122 return VINF_SUCCESS;
6123
6124 case IEMMODE_64BIT:
6125 IEM_MC_BEGIN(5, 1);
6126 IEM_MC_ARG(uint16_t, uSel, 0);
6127 IEM_MC_ARG(uint64_t, offSeg, 1);
6128 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6129 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6130 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6131 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6134 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6135 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6136 else
6137 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6138 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6139 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6140 IEM_MC_END();
6141 return VINF_SUCCESS;
6142
6143 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6144 }
6145}
6146
6147
6148/** Opcode 0x0f 0xb2. */
6149FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6150{
6151 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6152 IEMOP_HLP_MIN_386();
6153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6155 return IEMOP_RAISE_INVALID_OPCODE();
6156 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6157}
6158
6159
6160/** Opcode 0x0f 0xb3. */
6161FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6162{
6163 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6164 IEMOP_HLP_MIN_386();
6165 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6166}
6167
6168
6169/** Opcode 0x0f 0xb4. */
6170FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6171{
6172 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6173 IEMOP_HLP_MIN_386();
6174 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6175 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6176 return IEMOP_RAISE_INVALID_OPCODE();
6177 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6178}
6179
6180
6181/** Opcode 0x0f 0xb5. */
6182FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6183{
6184 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6185 IEMOP_HLP_MIN_386();
6186 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6187 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6188 return IEMOP_RAISE_INVALID_OPCODE();
6189 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6190}
6191
6192
6193/** Opcode 0x0f 0xb6. */
6194FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6195{
6196 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6197 IEMOP_HLP_MIN_386();
6198
6199 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6200
6201 /*
6202 * If rm is denoting a register, no more instruction bytes.
6203 */
6204 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6205 {
6206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6207 switch (pVCpu->iem.s.enmEffOpSize)
6208 {
6209 case IEMMODE_16BIT:
6210 IEM_MC_BEGIN(0, 1);
6211 IEM_MC_LOCAL(uint16_t, u16Value);
6212 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6213 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6214 IEM_MC_ADVANCE_RIP();
6215 IEM_MC_END();
6216 return VINF_SUCCESS;
6217
6218 case IEMMODE_32BIT:
6219 IEM_MC_BEGIN(0, 1);
6220 IEM_MC_LOCAL(uint32_t, u32Value);
6221 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6222 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6223 IEM_MC_ADVANCE_RIP();
6224 IEM_MC_END();
6225 return VINF_SUCCESS;
6226
6227 case IEMMODE_64BIT:
6228 IEM_MC_BEGIN(0, 1);
6229 IEM_MC_LOCAL(uint64_t, u64Value);
6230 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6231 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6232 IEM_MC_ADVANCE_RIP();
6233 IEM_MC_END();
6234 return VINF_SUCCESS;
6235
6236 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6237 }
6238 }
6239 else
6240 {
6241 /*
6242 * We're loading a register from memory.
6243 */
6244 switch (pVCpu->iem.s.enmEffOpSize)
6245 {
6246 case IEMMODE_16BIT:
6247 IEM_MC_BEGIN(0, 2);
6248 IEM_MC_LOCAL(uint16_t, u16Value);
6249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6252 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6253 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6254 IEM_MC_ADVANCE_RIP();
6255 IEM_MC_END();
6256 return VINF_SUCCESS;
6257
6258 case IEMMODE_32BIT:
6259 IEM_MC_BEGIN(0, 2);
6260 IEM_MC_LOCAL(uint32_t, u32Value);
6261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6264 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6265 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6266 IEM_MC_ADVANCE_RIP();
6267 IEM_MC_END();
6268 return VINF_SUCCESS;
6269
6270 case IEMMODE_64BIT:
6271 IEM_MC_BEGIN(0, 2);
6272 IEM_MC_LOCAL(uint64_t, u64Value);
6273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6276 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6277 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6278 IEM_MC_ADVANCE_RIP();
6279 IEM_MC_END();
6280 return VINF_SUCCESS;
6281
6282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6283 }
6284 }
6285}
6286
6287
6288/** Opcode 0x0f 0xb7. */
6289FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6290{
6291 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6292 IEMOP_HLP_MIN_386();
6293
6294 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6295
6296 /** @todo Not entirely sure how the operand size prefix is handled here,
6297 * assuming that it will be ignored. Would be nice to have a few
6298 * test for this. */
6299 /*
6300 * If rm is denoting a register, no more instruction bytes.
6301 */
6302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6303 {
6304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6305 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6306 {
6307 IEM_MC_BEGIN(0, 1);
6308 IEM_MC_LOCAL(uint32_t, u32Value);
6309 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6310 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6311 IEM_MC_ADVANCE_RIP();
6312 IEM_MC_END();
6313 }
6314 else
6315 {
6316 IEM_MC_BEGIN(0, 1);
6317 IEM_MC_LOCAL(uint64_t, u64Value);
6318 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6319 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6320 IEM_MC_ADVANCE_RIP();
6321 IEM_MC_END();
6322 }
6323 }
6324 else
6325 {
6326 /*
6327 * We're loading a register from memory.
6328 */
6329 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6330 {
6331 IEM_MC_BEGIN(0, 2);
6332 IEM_MC_LOCAL(uint32_t, u32Value);
6333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6336 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6337 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6338 IEM_MC_ADVANCE_RIP();
6339 IEM_MC_END();
6340 }
6341 else
6342 {
6343 IEM_MC_BEGIN(0, 2);
6344 IEM_MC_LOCAL(uint64_t, u64Value);
6345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6348 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6349 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6350 IEM_MC_ADVANCE_RIP();
6351 IEM_MC_END();
6352 }
6353 }
6354 return VINF_SUCCESS;
6355}
6356
6357
6358/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6359FNIEMOP_UD_STUB(iemOp_jmpe);
6360/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6361FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6362
6363
6364/** Opcode 0x0f 0xb9. */
6365FNIEMOP_DEF(iemOp_Grp10)
6366{
6367 Log(("iemOp_Grp10 -> #UD\n"));
6368 return IEMOP_RAISE_INVALID_OPCODE();
6369}
6370
6371
6372/** Opcode 0x0f 0xba. */
6373FNIEMOP_DEF(iemOp_Grp8)
6374{
6375 IEMOP_HLP_MIN_386();
6376 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6377 PCIEMOPBINSIZES pImpl;
6378 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6379 {
6380 case 0: case 1: case 2: case 3:
6381 return IEMOP_RAISE_INVALID_OPCODE();
6382 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6383 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6384 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6385 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6387 }
6388 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6389
6390 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6391 {
6392 /* register destination. */
6393 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6395
6396 switch (pVCpu->iem.s.enmEffOpSize)
6397 {
6398 case IEMMODE_16BIT:
6399 IEM_MC_BEGIN(3, 0);
6400 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6401 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6402 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6403
6404 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6405 IEM_MC_REF_EFLAGS(pEFlags);
6406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6407
6408 IEM_MC_ADVANCE_RIP();
6409 IEM_MC_END();
6410 return VINF_SUCCESS;
6411
6412 case IEMMODE_32BIT:
6413 IEM_MC_BEGIN(3, 0);
6414 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6415 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6416 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6417
6418 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6419 IEM_MC_REF_EFLAGS(pEFlags);
6420 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6421
6422 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6423 IEM_MC_ADVANCE_RIP();
6424 IEM_MC_END();
6425 return VINF_SUCCESS;
6426
6427 case IEMMODE_64BIT:
6428 IEM_MC_BEGIN(3, 0);
6429 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6430 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6432
6433 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6434 IEM_MC_REF_EFLAGS(pEFlags);
6435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6436
6437 IEM_MC_ADVANCE_RIP();
6438 IEM_MC_END();
6439 return VINF_SUCCESS;
6440
6441 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6442 }
6443 }
6444 else
6445 {
6446 /* memory destination. */
6447
6448 uint32_t fAccess;
6449 if (pImpl->pfnLockedU16)
6450 fAccess = IEM_ACCESS_DATA_RW;
6451 else /* BT */
6452 fAccess = IEM_ACCESS_DATA_R;
6453
6454 /** @todo test negative bit offsets! */
6455 switch (pVCpu->iem.s.enmEffOpSize)
6456 {
6457 case IEMMODE_16BIT:
6458 IEM_MC_BEGIN(3, 1);
6459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6460 IEM_MC_ARG(uint16_t, u16Src, 1);
6461 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6463
6464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6465 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6466 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6467 if (pImpl->pfnLockedU16)
6468 IEMOP_HLP_DONE_DECODING();
6469 else
6470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6471 IEM_MC_FETCH_EFLAGS(EFlags);
6472 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6473 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6474 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6475 else
6476 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6477 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6478
6479 IEM_MC_COMMIT_EFLAGS(EFlags);
6480 IEM_MC_ADVANCE_RIP();
6481 IEM_MC_END();
6482 return VINF_SUCCESS;
6483
6484 case IEMMODE_32BIT:
6485 IEM_MC_BEGIN(3, 1);
6486 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6487 IEM_MC_ARG(uint32_t, u32Src, 1);
6488 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6490
6491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6492 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6493 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6494 if (pImpl->pfnLockedU16)
6495 IEMOP_HLP_DONE_DECODING();
6496 else
6497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6498 IEM_MC_FETCH_EFLAGS(EFlags);
6499 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6500 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6502 else
6503 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6504 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6505
6506 IEM_MC_COMMIT_EFLAGS(EFlags);
6507 IEM_MC_ADVANCE_RIP();
6508 IEM_MC_END();
6509 return VINF_SUCCESS;
6510
6511 case IEMMODE_64BIT:
6512 IEM_MC_BEGIN(3, 1);
6513 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6514 IEM_MC_ARG(uint64_t, u64Src, 1);
6515 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6517
6518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6519 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6520 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6521 if (pImpl->pfnLockedU16)
6522 IEMOP_HLP_DONE_DECODING();
6523 else
6524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6525 IEM_MC_FETCH_EFLAGS(EFlags);
6526 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6527 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6528 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6529 else
6530 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6531 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6532
6533 IEM_MC_COMMIT_EFLAGS(EFlags);
6534 IEM_MC_ADVANCE_RIP();
6535 IEM_MC_END();
6536 return VINF_SUCCESS;
6537
6538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6539 }
6540 }
6541
6542}
6543
6544
6545/** Opcode 0x0f 0xbb. */
6546FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6547{
6548 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6549 IEMOP_HLP_MIN_386();
6550 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6551}
6552
6553
6554/** Opcode 0x0f 0xbc. */
6555FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6556{
6557 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6558 IEMOP_HLP_MIN_386();
6559 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6560 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6561}
6562
6563
6564/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6565FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6566
6567
6568/** Opcode 0x0f 0xbd. */
6569FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6570{
6571 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6572 IEMOP_HLP_MIN_386();
6573 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6574 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6575}
6576
6577
6578/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6579FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6580
6581
6582/** Opcode 0x0f 0xbe. */
6583FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6584{
6585 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6586 IEMOP_HLP_MIN_386();
6587
6588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6589
6590 /*
6591 * If rm is denoting a register, no more instruction bytes.
6592 */
6593 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6594 {
6595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6596 switch (pVCpu->iem.s.enmEffOpSize)
6597 {
6598 case IEMMODE_16BIT:
6599 IEM_MC_BEGIN(0, 1);
6600 IEM_MC_LOCAL(uint16_t, u16Value);
6601 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6602 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6603 IEM_MC_ADVANCE_RIP();
6604 IEM_MC_END();
6605 return VINF_SUCCESS;
6606
6607 case IEMMODE_32BIT:
6608 IEM_MC_BEGIN(0, 1);
6609 IEM_MC_LOCAL(uint32_t, u32Value);
6610 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6611 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6612 IEM_MC_ADVANCE_RIP();
6613 IEM_MC_END();
6614 return VINF_SUCCESS;
6615
6616 case IEMMODE_64BIT:
6617 IEM_MC_BEGIN(0, 1);
6618 IEM_MC_LOCAL(uint64_t, u64Value);
6619 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6620 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6621 IEM_MC_ADVANCE_RIP();
6622 IEM_MC_END();
6623 return VINF_SUCCESS;
6624
6625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6626 }
6627 }
6628 else
6629 {
6630 /*
6631 * We're loading a register from memory.
6632 */
6633 switch (pVCpu->iem.s.enmEffOpSize)
6634 {
6635 case IEMMODE_16BIT:
6636 IEM_MC_BEGIN(0, 2);
6637 IEM_MC_LOCAL(uint16_t, u16Value);
6638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6641 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6642 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6643 IEM_MC_ADVANCE_RIP();
6644 IEM_MC_END();
6645 return VINF_SUCCESS;
6646
6647 case IEMMODE_32BIT:
6648 IEM_MC_BEGIN(0, 2);
6649 IEM_MC_LOCAL(uint32_t, u32Value);
6650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6653 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6654 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6655 IEM_MC_ADVANCE_RIP();
6656 IEM_MC_END();
6657 return VINF_SUCCESS;
6658
6659 case IEMMODE_64BIT:
6660 IEM_MC_BEGIN(0, 2);
6661 IEM_MC_LOCAL(uint64_t, u64Value);
6662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6665 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6666 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6667 IEM_MC_ADVANCE_RIP();
6668 IEM_MC_END();
6669 return VINF_SUCCESS;
6670
6671 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6672 }
6673 }
6674}
6675
6676
6677/** Opcode 0x0f 0xbf. */
6678FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6679{
6680 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6681 IEMOP_HLP_MIN_386();
6682
6683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6684
6685 /** @todo Not entirely sure how the operand size prefix is handled here,
6686 * assuming that it will be ignored. Would be nice to have a few
6687 * test for this. */
6688 /*
6689 * If rm is denoting a register, no more instruction bytes.
6690 */
6691 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6692 {
6693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6694 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6695 {
6696 IEM_MC_BEGIN(0, 1);
6697 IEM_MC_LOCAL(uint32_t, u32Value);
6698 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6699 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6700 IEM_MC_ADVANCE_RIP();
6701 IEM_MC_END();
6702 }
6703 else
6704 {
6705 IEM_MC_BEGIN(0, 1);
6706 IEM_MC_LOCAL(uint64_t, u64Value);
6707 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6708 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6709 IEM_MC_ADVANCE_RIP();
6710 IEM_MC_END();
6711 }
6712 }
6713 else
6714 {
6715 /*
6716 * We're loading a register from memory.
6717 */
6718 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6719 {
6720 IEM_MC_BEGIN(0, 2);
6721 IEM_MC_LOCAL(uint32_t, u32Value);
6722 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6725 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6726 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6727 IEM_MC_ADVANCE_RIP();
6728 IEM_MC_END();
6729 }
6730 else
6731 {
6732 IEM_MC_BEGIN(0, 2);
6733 IEM_MC_LOCAL(uint64_t, u64Value);
6734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6737 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6738 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6739 IEM_MC_ADVANCE_RIP();
6740 IEM_MC_END();
6741 }
6742 }
6743 return VINF_SUCCESS;
6744}
6745
6746
6747/** Opcode 0x0f 0xc0. */
6748FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6749{
6750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6751 IEMOP_HLP_MIN_486();
6752 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6753
6754 /*
6755 * If rm is denoting a register, no more instruction bytes.
6756 */
6757 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6758 {
6759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6760
6761 IEM_MC_BEGIN(3, 0);
6762 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6763 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6764 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6765
6766 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6767 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6768 IEM_MC_REF_EFLAGS(pEFlags);
6769 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6770
6771 IEM_MC_ADVANCE_RIP();
6772 IEM_MC_END();
6773 }
6774 else
6775 {
6776 /*
6777 * We're accessing memory.
6778 */
6779 IEM_MC_BEGIN(3, 3);
6780 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6781 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6782 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6783 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6785
6786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6787 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6788 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6789 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6790 IEM_MC_FETCH_EFLAGS(EFlags);
6791 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6792 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6793 else
6794 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6795
6796 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6797 IEM_MC_COMMIT_EFLAGS(EFlags);
6798 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6799 IEM_MC_ADVANCE_RIP();
6800 IEM_MC_END();
6801 return VINF_SUCCESS;
6802 }
6803 return VINF_SUCCESS;
6804}
6805
6806
6807/** Opcode 0x0f 0xc1. */
6808FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6809{
6810 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6811 IEMOP_HLP_MIN_486();
6812 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6813
6814 /*
6815 * If rm is denoting a register, no more instruction bytes.
6816 */
6817 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6818 {
6819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6820
6821 switch (pVCpu->iem.s.enmEffOpSize)
6822 {
6823 case IEMMODE_16BIT:
6824 IEM_MC_BEGIN(3, 0);
6825 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6826 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6827 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6828
6829 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6830 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6831 IEM_MC_REF_EFLAGS(pEFlags);
6832 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6833
6834 IEM_MC_ADVANCE_RIP();
6835 IEM_MC_END();
6836 return VINF_SUCCESS;
6837
6838 case IEMMODE_32BIT:
6839 IEM_MC_BEGIN(3, 0);
6840 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6841 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6842 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6843
6844 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6845 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6846 IEM_MC_REF_EFLAGS(pEFlags);
6847 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6848
6849 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6850 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6851 IEM_MC_ADVANCE_RIP();
6852 IEM_MC_END();
6853 return VINF_SUCCESS;
6854
6855 case IEMMODE_64BIT:
6856 IEM_MC_BEGIN(3, 0);
6857 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6858 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6859 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6860
6861 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6862 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6863 IEM_MC_REF_EFLAGS(pEFlags);
6864 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6865
6866 IEM_MC_ADVANCE_RIP();
6867 IEM_MC_END();
6868 return VINF_SUCCESS;
6869
6870 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6871 }
6872 }
6873 else
6874 {
6875 /*
6876 * We're accessing memory.
6877 */
6878 switch (pVCpu->iem.s.enmEffOpSize)
6879 {
6880 case IEMMODE_16BIT:
6881 IEM_MC_BEGIN(3, 3);
6882 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6883 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6884 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6885 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6887
6888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6889 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6890 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6891 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6892 IEM_MC_FETCH_EFLAGS(EFlags);
6893 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6894 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6895 else
6896 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6897
6898 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6899 IEM_MC_COMMIT_EFLAGS(EFlags);
6900 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
6901 IEM_MC_ADVANCE_RIP();
6902 IEM_MC_END();
6903 return VINF_SUCCESS;
6904
6905 case IEMMODE_32BIT:
6906 IEM_MC_BEGIN(3, 3);
6907 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6908 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6909 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6910 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6912
6913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6914 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6915 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6916 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6917 IEM_MC_FETCH_EFLAGS(EFlags);
6918 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6919 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6920 else
6921 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6922
6923 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6924 IEM_MC_COMMIT_EFLAGS(EFlags);
6925 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
6926 IEM_MC_ADVANCE_RIP();
6927 IEM_MC_END();
6928 return VINF_SUCCESS;
6929
6930 case IEMMODE_64BIT:
6931 IEM_MC_BEGIN(3, 3);
6932 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6933 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6934 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6935 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6937
6938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6939 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6940 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6941 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6942 IEM_MC_FETCH_EFLAGS(EFlags);
6943 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6944 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6945 else
6946 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6947
6948 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6949 IEM_MC_COMMIT_EFLAGS(EFlags);
6950 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
6951 IEM_MC_ADVANCE_RIP();
6952 IEM_MC_END();
6953 return VINF_SUCCESS;
6954
6955 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6956 }
6957 }
6958}
6959
6960
6961/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
6962FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
6963/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
6964FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
6965/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
6966FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
6967/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
6968FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
6969
6970
6971/** Opcode 0x0f 0xc3. */
6972FNIEMOP_DEF(iemOp_movnti_My_Gy)
6973{
6974 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
6975
6976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6977
6978 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6979 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6980 {
6981 switch (pVCpu->iem.s.enmEffOpSize)
6982 {
6983 case IEMMODE_32BIT:
6984 IEM_MC_BEGIN(0, 2);
6985 IEM_MC_LOCAL(uint32_t, u32Value);
6986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6987
6988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6990 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6991 return IEMOP_RAISE_INVALID_OPCODE();
6992
6993 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6994 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6995 IEM_MC_ADVANCE_RIP();
6996 IEM_MC_END();
6997 break;
6998
6999 case IEMMODE_64BIT:
7000 IEM_MC_BEGIN(0, 2);
7001 IEM_MC_LOCAL(uint64_t, u64Value);
7002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7003
7004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7006 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7007 return IEMOP_RAISE_INVALID_OPCODE();
7008
7009 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7010 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7011 IEM_MC_ADVANCE_RIP();
7012 IEM_MC_END();
7013 break;
7014
7015 case IEMMODE_16BIT:
7016 /** @todo check this form. */
7017 return IEMOP_RAISE_INVALID_OPCODE();
7018 }
7019 }
7020 else
7021 return IEMOP_RAISE_INVALID_OPCODE();
7022 return VINF_SUCCESS;
7023}
7024/* Opcode 0x66 0x0f 0xc3 - invalid */
7025/* Opcode 0xf3 0x0f 0xc3 - invalid */
7026/* Opcode 0xf2 0x0f 0xc3 - invalid */
7027
7028/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
7029FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7030/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
7031FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
7032/* Opcode 0xf3 0x0f 0xc4 - invalid */
7033/* Opcode 0xf2 0x0f 0xc4 - invalid */
7034
7035/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7036FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7037/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
7038FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
7039/* Opcode 0xf3 0x0f 0xc5 - invalid */
7040/* Opcode 0xf2 0x0f 0xc5 - invalid */
7041
7042/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
7043FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
7044/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
7045FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
7046/* Opcode 0xf3 0x0f 0xc6 - invalid */
7047/* Opcode 0xf2 0x0f 0xc6 - invalid */
7048
7049
7050/** Opcode 0x0f 0xc7 !11/1. */
7051FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7052{
7053 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7054
7055 IEM_MC_BEGIN(4, 3);
7056 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7057 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7058 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7059 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7060 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7061 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7063
7064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7065 IEMOP_HLP_DONE_DECODING();
7066 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7067
7068 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7069 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7070 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7071
7072 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7073 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7074 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7075
7076 IEM_MC_FETCH_EFLAGS(EFlags);
7077 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7078 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7079 else
7080 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7081
7082 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7083 IEM_MC_COMMIT_EFLAGS(EFlags);
7084 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7085 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7086 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7087 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7088 IEM_MC_ENDIF();
7089 IEM_MC_ADVANCE_RIP();
7090
7091 IEM_MC_END();
7092 return VINF_SUCCESS;
7093}
7094
7095
7096/** Opcode REX.W 0x0f 0xc7 !11/1. */
7097FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7098{
7099 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7100 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7101 {
7102#if 0
7103 RT_NOREF(bRm);
7104 IEMOP_BITCH_ABOUT_STUB();
7105 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7106#else
7107 IEM_MC_BEGIN(4, 3);
7108 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7109 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7110 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7111 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7112 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7113 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7115
7116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7117 IEMOP_HLP_DONE_DECODING();
7118 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7119 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7120
7121 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7122 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7123 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7124
7125 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7126 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7127 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7128
7129 IEM_MC_FETCH_EFLAGS(EFlags);
7130# ifdef RT_ARCH_AMD64
7131 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7132 {
7133 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7134 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7135 else
7136 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7137 }
7138 else
7139# endif
7140 {
7141 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7142 accesses and not all all atomic, which works fine on in UNI CPU guest
7143 configuration (ignoring DMA). If guest SMP is active we have no choice
7144 but to use a rendezvous callback here. Sigh. */
7145 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7146 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7147 else
7148 {
7149 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7150 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7151 }
7152 }
7153
7154 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7155 IEM_MC_COMMIT_EFLAGS(EFlags);
7156 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7157 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7158 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7159 IEM_MC_ENDIF();
7160 IEM_MC_ADVANCE_RIP();
7161
7162 IEM_MC_END();
7163 return VINF_SUCCESS;
7164#endif
7165 }
7166 Log(("cmpxchg16b -> #UD\n"));
7167 return IEMOP_RAISE_INVALID_OPCODE();
7168}
7169
7170
7171/** Opcode 0x0f 0xc7 11/6. */
7172FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7173
7174/** Opcode 0x0f 0xc7 !11/6. */
7175FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7176
7177/** Opcode 0x66 0x0f 0xc7 !11/6. */
7178FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7179
7180/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7181FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7182
7183/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7184FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7185
7186
7187/** Opcode 0x0f 0xc7. */
7188FNIEMOP_DEF(iemOp_Grp9)
7189{
7190 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
7191 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7192 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7193 {
7194 case 0: case 2: case 3: case 4: case 5:
7195 return IEMOP_RAISE_INVALID_OPCODE();
7196 case 1:
7197 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
7198 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
7199 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
7200 return IEMOP_RAISE_INVALID_OPCODE();
7201 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7202 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7203 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7204 case 6:
7205 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7206 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
7207 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7208 {
7209 case 0:
7210 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
7211 case IEM_OP_PRF_SIZE_OP:
7212 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
7213 case IEM_OP_PRF_REPZ:
7214 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
7215 default:
7216 return IEMOP_RAISE_INVALID_OPCODE();
7217 }
7218 case 7:
7219 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7220 {
7221 case 0:
7222 case IEM_OP_PRF_REPZ:
7223 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
7224 default:
7225 return IEMOP_RAISE_INVALID_OPCODE();
7226 }
7227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7228 }
7229}
7230
7231
7232/**
7233 * Common 'bswap register' helper.
7234 */
7235FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7236{
7237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7238 switch (pVCpu->iem.s.enmEffOpSize)
7239 {
7240 case IEMMODE_16BIT:
7241 IEM_MC_BEGIN(1, 0);
7242 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7243 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7244 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7245 IEM_MC_ADVANCE_RIP();
7246 IEM_MC_END();
7247 return VINF_SUCCESS;
7248
7249 case IEMMODE_32BIT:
7250 IEM_MC_BEGIN(1, 0);
7251 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7252 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7253 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7254 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7255 IEM_MC_ADVANCE_RIP();
7256 IEM_MC_END();
7257 return VINF_SUCCESS;
7258
7259 case IEMMODE_64BIT:
7260 IEM_MC_BEGIN(1, 0);
7261 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7262 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7263 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7264 IEM_MC_ADVANCE_RIP();
7265 IEM_MC_END();
7266 return VINF_SUCCESS;
7267
7268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7269 }
7270}
7271
7272
7273/** Opcode 0x0f 0xc8. */
7274FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7275{
7276 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7277 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7278 prefix. REX.B is the correct prefix it appears. For a parallel
7279 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7280 IEMOP_HLP_MIN_486();
7281 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7282}
7283
7284
7285/** Opcode 0x0f 0xc9. */
7286FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7287{
7288 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7289 IEMOP_HLP_MIN_486();
7290 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7291}
7292
7293
7294/** Opcode 0x0f 0xca. */
7295FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7296{
7297 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7298 IEMOP_HLP_MIN_486();
7299 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7300}
7301
7302
7303/** Opcode 0x0f 0xcb. */
7304FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7305{
7306 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7307 IEMOP_HLP_MIN_486();
7308 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7309}
7310
7311
7312/** Opcode 0x0f 0xcc. */
7313FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7314{
7315 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7316 IEMOP_HLP_MIN_486();
7317 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7318}
7319
7320
7321/** Opcode 0x0f 0xcd. */
7322FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7323{
7324 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7325 IEMOP_HLP_MIN_486();
7326 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7327}
7328
7329
7330/** Opcode 0x0f 0xce. */
7331FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7332{
7333 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7334 IEMOP_HLP_MIN_486();
7335 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7336}
7337
7338
7339/** Opcode 0x0f 0xcf. */
7340FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7341{
7342 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7343 IEMOP_HLP_MIN_486();
7344 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7345}
7346
7347
7348/* Opcode 0x0f 0xd0 - invalid */
7349/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7350FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7351/* Opcode 0xf3 0x0f 0xd0 - invalid */
7352/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7353FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7354
7355/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7356FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7357/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7358FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7359/* Opcode 0xf3 0x0f 0xd1 - invalid */
7360/* Opcode 0xf2 0x0f 0xd1 - invalid */
7361
7362/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7363FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7364/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7365FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7366/* Opcode 0xf3 0x0f 0xd2 - invalid */
7367/* Opcode 0xf2 0x0f 0xd2 - invalid */
7368
7369/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7370FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7371/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7372FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7373/* Opcode 0xf3 0x0f 0xd3 - invalid */
7374/* Opcode 0xf2 0x0f 0xd3 - invalid */
7375
7376/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7377FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7378/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7379FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7380/* Opcode 0xf3 0x0f 0xd4 - invalid */
7381/* Opcode 0xf2 0x0f 0xd4 - invalid */
7382
7383/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7384FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7385/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7386FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7387/* Opcode 0xf3 0x0f 0xd5 - invalid */
7388/* Opcode 0xf2 0x0f 0xd5 - invalid */
7389
7390/* Opcode 0x0f 0xd6 - invalid */
7391/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7392FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7393/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7394FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7395/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7396FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7397#if 0
7398FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7399{
7400 /* Docs says register only. */
7401 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7402
7403 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7404 {
7405 case IEM_OP_PRF_SIZE_OP: /* SSE */
7406 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7407 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7408 IEM_MC_BEGIN(2, 0);
7409 IEM_MC_ARG(uint64_t *, pDst, 0);
7410 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7411 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7412 IEM_MC_PREPARE_SSE_USAGE();
7413 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7414 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7415 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7416 IEM_MC_ADVANCE_RIP();
7417 IEM_MC_END();
7418 return VINF_SUCCESS;
7419
7420 case 0: /* MMX */
7421 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7422 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7423 IEM_MC_BEGIN(2, 0);
7424 IEM_MC_ARG(uint64_t *, pDst, 0);
7425 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7426 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7427 IEM_MC_PREPARE_FPU_USAGE();
7428 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7429 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7430 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7431 IEM_MC_ADVANCE_RIP();
7432 IEM_MC_END();
7433 return VINF_SUCCESS;
7434
7435 default:
7436 return IEMOP_RAISE_INVALID_OPCODE();
7437 }
7438}
7439#endif
7440
7441
7442/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7443FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7444{
7445 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7446 /** @todo testcase: Check that the instruction implicitly clears the high
7447 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7448 * and opcode modifications are made to work with the whole width (not
7449 * just 128). */
7450 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
7451 /* Docs says register only. */
7452 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7453 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7454 {
7455 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7456 IEM_MC_BEGIN(2, 0);
7457 IEM_MC_ARG(uint64_t *, pDst, 0);
7458 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7459 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7460 IEM_MC_PREPARE_FPU_USAGE();
7461 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7462 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7463 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7464 IEM_MC_ADVANCE_RIP();
7465 IEM_MC_END();
7466 return VINF_SUCCESS;
7467 }
7468 return IEMOP_RAISE_INVALID_OPCODE();
7469}
7470
7471/** Opcode 0x66 0x0f 0xd7 - */
7472FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
7473{
7474 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7475 /** @todo testcase: Check that the instruction implicitly clears the high
7476 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7477 * and opcode modifications are made to work with the whole width (not
7478 * just 128). */
7479 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
7480 /* Docs says register only. */
7481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7482 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7483 {
7484 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7485 IEM_MC_BEGIN(2, 0);
7486 IEM_MC_ARG(uint64_t *, pDst, 0);
7487 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7488 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7489 IEM_MC_PREPARE_SSE_USAGE();
7490 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7491 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7492 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7493 IEM_MC_ADVANCE_RIP();
7494 IEM_MC_END();
7495 return VINF_SUCCESS;
7496 }
7497 return IEMOP_RAISE_INVALID_OPCODE();
7498}
7499
7500/* Opcode 0xf3 0x0f 0xd7 - invalid */
7501/* Opcode 0xf2 0x0f 0xd7 - invalid */
7502
7503
7504/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7505FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7506/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7507FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7508/* Opcode 0xf3 0x0f 0xd8 - invalid */
7509/* Opcode 0xf2 0x0f 0xd8 - invalid */
7510
7511/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7512FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7513/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7514FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7515/* Opcode 0xf3 0x0f 0xd9 - invalid */
7516/* Opcode 0xf2 0x0f 0xd9 - invalid */
7517
7518/** Opcode 0x0f 0xda - pminub Pq, Qq */
7519FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7520/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7521FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7522/* Opcode 0xf3 0x0f 0xda - invalid */
7523/* Opcode 0xf2 0x0f 0xda - invalid */
7524
7525/** Opcode 0x0f 0xdb - pand Pq, Qq */
7526FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7527/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7528FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7529/* Opcode 0xf3 0x0f 0xdb - invalid */
7530/* Opcode 0xf2 0x0f 0xdb - invalid */
7531
7532/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7533FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7534/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7535FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7536/* Opcode 0xf3 0x0f 0xdc - invalid */
7537/* Opcode 0xf2 0x0f 0xdc - invalid */
7538
7539/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7540FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7541/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7542FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7543/* Opcode 0xf3 0x0f 0xdd - invalid */
7544/* Opcode 0xf2 0x0f 0xdd - invalid */
7545
7546/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7547FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7548/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7549FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7550/* Opcode 0xf3 0x0f 0xde - invalid */
7551/* Opcode 0xf2 0x0f 0xde - invalid */
7552
7553/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7554FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7555/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7556FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7557/* Opcode 0xf3 0x0f 0xdf - invalid */
7558/* Opcode 0xf2 0x0f 0xdf - invalid */
7559
7560/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7561FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7562/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7563FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7564/* Opcode 0xf3 0x0f 0xe0 - invalid */
7565/* Opcode 0xf2 0x0f 0xe0 - invalid */
7566
7567/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7568FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7569/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7570FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7571/* Opcode 0xf3 0x0f 0xe1 - invalid */
7572/* Opcode 0xf2 0x0f 0xe1 - invalid */
7573
7574/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7575FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7576/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7577FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7578/* Opcode 0xf3 0x0f 0xe2 - invalid */
7579/* Opcode 0xf2 0x0f 0xe2 - invalid */
7580
7581/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7582FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7583/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7584FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7585/* Opcode 0xf3 0x0f 0xe3 - invalid */
7586/* Opcode 0xf2 0x0f 0xe3 - invalid */
7587
7588/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7589FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7590/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7591FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7592/* Opcode 0xf3 0x0f 0xe4 - invalid */
7593/* Opcode 0xf2 0x0f 0xe4 - invalid */
7594
7595/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7596FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7597/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7598FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7599/* Opcode 0xf3 0x0f 0xe5 - invalid */
7600/* Opcode 0xf2 0x0f 0xe5 - invalid */
7601
7602/* Opcode 0x0f 0xe6 - invalid */
7603/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7604FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7605/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7606FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7607/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7608FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7609
7610
7611/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
7612FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
7613{
7614 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7615 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7616 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7617 {
7618 /* Register, memory. */
7619 IEM_MC_BEGIN(0, 2);
7620 IEM_MC_LOCAL(uint64_t, uSrc);
7621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7622
7623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7625 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7626 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7627
7628 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7629 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7630
7631 IEM_MC_ADVANCE_RIP();
7632 IEM_MC_END();
7633 return VINF_SUCCESS;
7634 }
7635 /* The register, register encoding is invalid. */
7636 return IEMOP_RAISE_INVALID_OPCODE();
7637}
7638
7639/** Opcode 0x66 0x0f 0xe7 - vmovntdq Mx, Vx */
7640FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
7641{
7642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7643 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7644 {
7645 /* Register, memory. */
7646 IEMOP_MNEMONIC(vmovntdq_Mx_Vx, "vmovntdq Mx,Vx");
7647 IEM_MC_BEGIN(0, 2);
7648 IEM_MC_LOCAL(uint128_t, uSrc);
7649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7650
7651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7653 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7654 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7655
7656 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7657 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7658
7659 IEM_MC_ADVANCE_RIP();
7660 IEM_MC_END();
7661 return VINF_SUCCESS;
7662 }
7663
7664 /* The register, register encoding is invalid. */
7665 return IEMOP_RAISE_INVALID_OPCODE();
7666}
7667
7668/* Opcode 0xf3 0x0f 0xe7 - invalid */
7669/* Opcode 0xf2 0x0f 0xe7 - invalid */
7670
7671
7672/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7673FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7674/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7675FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7676/* Opcode 0xf3 0x0f 0xe8 - invalid */
7677/* Opcode 0xf2 0x0f 0xe8 - invalid */
7678
7679/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7680FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7681/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7682FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7683/* Opcode 0xf3 0x0f 0xe9 - invalid */
7684/* Opcode 0xf2 0x0f 0xe9 - invalid */
7685
7686/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7687FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7688/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7689FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7690/* Opcode 0xf3 0x0f 0xea - invalid */
7691/* Opcode 0xf2 0x0f 0xea - invalid */
7692
7693/** Opcode 0x0f 0xeb - por Pq, Qq */
7694FNIEMOP_STUB(iemOp_por_Pq_Qq);
7695/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7696FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7697/* Opcode 0xf3 0x0f 0xeb - invalid */
7698/* Opcode 0xf2 0x0f 0xeb - invalid */
7699
7700/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7701FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7702/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7703FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7704/* Opcode 0xf3 0x0f 0xec - invalid */
7705/* Opcode 0xf2 0x0f 0xec - invalid */
7706
7707/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7708FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7709/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7710FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7711/* Opcode 0xf3 0x0f 0xed - invalid */
7712/* Opcode 0xf2 0x0f 0xed - invalid */
7713
7714/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7715FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7716/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7717FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7718/* Opcode 0xf3 0x0f 0xee - invalid */
7719/* Opcode 0xf2 0x0f 0xee - invalid */
7720
7721
7722/** Opcode 0x0f 0xef - pxor Pq, Qq */
7723FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
7724{
7725 IEMOP_MNEMONIC(pxor, "pxor");
7726 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
7727}
7728
7729/** Opcode 0x66 0x0f 0xef - vpxor Vx, Hx, Wx */
7730FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
7731{
7732 IEMOP_MNEMONIC(vpxor, "vpxor");
7733 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7734}
7735
7736/* Opcode 0xf3 0x0f 0xef - invalid */
7737/* Opcode 0xf2 0x0f 0xef - invalid */
7738
7739/* Opcode 0x0f 0xf0 - invalid */
7740/* Opcode 0x66 0x0f 0xf0 - invalid */
7741/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7742FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7743
7744/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7745FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7746/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7747FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7748/* Opcode 0xf2 0x0f 0xf1 - invalid */
7749
7750/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7751FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7752/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7753FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7754/* Opcode 0xf2 0x0f 0xf2 - invalid */
7755
7756/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7757FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7758/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7759FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7760/* Opcode 0xf2 0x0f 0xf3 - invalid */
7761
7762/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7763FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7764/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7765FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7766/* Opcode 0xf2 0x0f 0xf4 - invalid */
7767
7768/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7769FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7770/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7771FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7772/* Opcode 0xf2 0x0f 0xf5 - invalid */
7773
7774/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7775FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7776/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7777FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7778/* Opcode 0xf2 0x0f 0xf6 - invalid */
7779
7780/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7781FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7782/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7783FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7784/* Opcode 0xf2 0x0f 0xf7 - invalid */
7785
7786/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7787FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7788/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7789FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7790/* Opcode 0xf2 0x0f 0xf8 - invalid */
7791
7792/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7793FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7794/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7795FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7796/* Opcode 0xf2 0x0f 0xf9 - invalid */
7797
7798/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7799FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7800/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7801FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7802/* Opcode 0xf2 0x0f 0xfa - invalid */
7803
7804/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7805FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7806/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7807FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7808/* Opcode 0xf2 0x0f 0xfb - invalid */
7809
7810/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7811FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7812/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7813FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7814/* Opcode 0xf2 0x0f 0xfc - invalid */
7815
7816/** Opcode 0x0f 0xfd - paddw Pq, Qq */
7817FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
7818/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
7819FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
7820/* Opcode 0xf2 0x0f 0xfd - invalid */
7821
7822/** Opcode 0x0f 0xfe - paddd Pq, Qq */
7823FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
7824/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
7825FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
7826/* Opcode 0xf2 0x0f 0xfe - invalid */
7827
7828
7829/** Opcode **** 0x0f 0xff - UD0 */
7830FNIEMOP_DEF(iemOp_ud0)
7831{
7832 IEMOP_MNEMONIC(ud0, "ud0");
7833 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7834 {
7835 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7836#ifndef TST_IEM_CHECK_MC
7837 RTGCPTR GCPtrEff;
7838 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
7839 if (rcStrict != VINF_SUCCESS)
7840 return rcStrict;
7841#endif
7842 IEMOP_HLP_DONE_DECODING();
7843 }
7844 return IEMOP_RAISE_INVALID_OPCODE();
7845}
7846
7847
7848
7849/**
7850 * Two byte opcode map, first byte 0x0f.
7851 *
7852 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
7853 * check if it needs updating as well when making changes.
7854 */
7855IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
7856{
7857 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7858 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
7859 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
7860 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
7861 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
7862 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
7863 /* 0x05 */ IEMOP_X4(iemOp_syscall),
7864 /* 0x06 */ IEMOP_X4(iemOp_clts),
7865 /* 0x07 */ IEMOP_X4(iemOp_sysret),
7866 /* 0x08 */ IEMOP_X4(iemOp_invd),
7867 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
7868 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
7869 /* 0x0b */ IEMOP_X4(iemOp_ud2),
7870 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
7871 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
7872 /* 0x0e */ IEMOP_X4(iemOp_femms),
7873 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
7874
7875 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7876 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7877 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7878 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7879 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7880 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7881 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7882 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7883 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
7884 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
7885 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
7886 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
7887 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
7888 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
7889 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
7890 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
7891
7892 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
7893 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
7894 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
7895 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
7896 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
7897 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7898 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
7899 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7900 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7901 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7902 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7903 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7904 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7905 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7906 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7907 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7908
7909 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
7910 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
7911 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
7912 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
7913 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
7914 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
7915 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
7916 /* 0x37 */ IEMOP_X4(iemOp_getsec),
7917 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
7918 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7919 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
7920 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7921 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7922 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7923 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7924 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7925
7926 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
7927 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
7928 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
7929 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
7930 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
7931 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
7932 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
7933 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
7934 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
7935 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
7936 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
7937 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
7938 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
7939 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
7940 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
7941 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
7942
7943 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7944 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
7945 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7946 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7947 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7948 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7949 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7950 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7951 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
7952 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
7953 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
7954 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
7955 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
7956 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
7957 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
7958 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
7959
7960 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7961 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7962 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7963 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7964 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7965 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7966 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7967 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7968 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7969 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7970 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7971 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7972 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7973 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7974 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7975 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
7976
7977 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
7978 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
7979 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
7980 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
7981 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7982 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7983 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7984 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7985
7986 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7987 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7988 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7989 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7990 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
7991 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
7992 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
7993 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
7994
7995 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
7996 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
7997 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
7998 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
7999 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8000 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8001 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8002 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8003 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8004 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8005 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8006 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8007 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8008 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8009 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8010 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8011
8012 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8013 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8014 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8015 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8016 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8017 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8018 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8019 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8020 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8021 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8022 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8023 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8024 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8025 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8026 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8027 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8028
8029 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8030 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8031 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8032 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8033 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8034 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8035 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8036 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8037 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8038 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8039 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8040 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8041 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8042 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8043 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8044 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8045
8046 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8047 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8048 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8049 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8050 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8051 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8052 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8053 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8054 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8055 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8056 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8057 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8058 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8059 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8060 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8061 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8062
8063 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8064 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8065 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8066 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8067 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8068 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8069 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8070 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8071 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8072 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8073 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8074 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8075 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8076 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8077 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8078 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8079
8080 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8081 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8082 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8083 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8084 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8085 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8086 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8087 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8088 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8089 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8090 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8091 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8092 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8093 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8094 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8095 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8096
8097 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8098 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8099 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8100 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8101 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8102 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8103 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8104 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8105 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8106 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8107 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8108 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8109 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8110 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8111 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8112 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8113
8114 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8115 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8116 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8117 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8118 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8119 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8120 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8121 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8122 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8123 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8124 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8125 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8126 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8127 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8128 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8129 /* 0xff */ IEMOP_X4(iemOp_ud0),
8130};
8131AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8132
8133
8134/**
8135 * VEX opcode map \#1.
8136 *
8137 * @remarks This is (currently) a subset of g_apfnTwoByteMap, so please check if
8138 * it it needs updating too when making changes.
8139 */
8140IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
8141{
8142 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8143 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
8144 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
8145 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
8146 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
8147 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
8148 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
8149 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
8150 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
8151 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
8152 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
8153 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
8154 /* 0x0b */ IEMOP_X4(iemOp_InvalidNeedRM),
8155 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
8156 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
8157 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
8158 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
8159
8160 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
8161 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
8162 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
8163 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8164 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8165 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8166 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
8167 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8168 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
8169 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
8170 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
8171 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
8172 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
8173 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
8174 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
8175 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
8176
8177 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
8178 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
8179 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
8180 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
8181 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
8182 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
8183 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
8184 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
8185 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8186 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8187 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
8188 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8189 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
8190 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
8191 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8192 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8193
8194 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
8195 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
8196 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
8197 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
8198 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
8199 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
8200 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
8201 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
8202 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8203 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8204 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8205 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8206 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8207 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8208 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8209 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8210
8211 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
8212 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
8213 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
8214 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
8215 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
8216 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
8217 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
8218 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
8219 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
8220 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
8221 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
8222 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
8223 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
8224 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
8225 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
8226 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
8227
8228 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8229 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8230 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8231 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8232 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8233 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8234 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8235 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8236 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8237 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8238 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8239 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8240 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8241 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8242 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8243 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8244
8245 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8246 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8247 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8248 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8249 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8250 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8251 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8252 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8253 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8254 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8255 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8256 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8257 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8258 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8259 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8260 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8261
8262 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8263 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_Grp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8264 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_Grp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8265 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_Grp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8266 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8267 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8268 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8269 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8270 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
8271 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
8272 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
8273 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
8274 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8275 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8276 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
8277 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
8278
8279 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
8280 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
8281 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
8282 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
8283 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
8284 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
8285 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
8286 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
8287 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
8288 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
8289 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
8290 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
8291 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
8292 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
8293 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
8294 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
8295 IEMOP_X4(iemOp_InvalidNeedRM),
8296 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
8297 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
8298 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
8299 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
8300 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
8301 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
8302 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
8303 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
8304 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
8305 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
8306 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
8307 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
8308 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
8309 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
8310 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
8311 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
8312
8313 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8314 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8315 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8316 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8317 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8318 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8319 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8320 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8321 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8322 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8323 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
8324 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
8325 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
8326 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
8327 /* 0xae */ IEMOP_X4(iemOp_Grp15), /** @todo groups and vex */
8328 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
8329
8330 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8331 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8332 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8333 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8334 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8335 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8336 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8337 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8338 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8339 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8340 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
8341 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
8342 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
8343 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
8344 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
8345 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
8346
8347 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8348 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8349 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8350 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8351 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8352 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8353 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8354 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8355 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8356 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8357 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
8358 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
8359 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
8360 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
8361 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
8362 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
8363
8364 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8365 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8366 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8367 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8368 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8369 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8370 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8371 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8372 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8373 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8374 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8375 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8376 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8377 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8378 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8379 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8380
8381 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8382 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8383 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8384 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8385 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8386 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8387 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8388 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8389 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8390 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8391 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8392 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8393 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8394 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8395 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8396 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8397
8398 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8399 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8400 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8401 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8402 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8403 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8404 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8405 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8406 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8407 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8408 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8409 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8410 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8411 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8412 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8413 /* 0xff */ IEMOP_X4(iemOp_ud0),
8414};
8415AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8416/** @} */
8417
8418
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette