VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 65758

Last change on this file since 65758 was 65758, checked in by vboxsync, 8 years ago

IEM: Split out IEMAllInstructionsOneByte.cpp.h and IEMAllInstructionsTwoByte0f.cpp.h from IEMAllInstructions.cpp.h.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 285.1 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 65758 2017-02-13 09:44:19Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/** @name ..... opcodes.
26 *
27 * @{
28 */
29
30/** @} */
31
32
33/** @name Two byte opcodes (first byte 0x0f).
34 *
35 * @{
36 */
37
38/** Opcode 0x0f 0x00 /0. */
39FNIEMOPRM_DEF(iemOp_Grp6_sldt)
40{
41 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
42 IEMOP_HLP_MIN_286();
43 IEMOP_HLP_NO_REAL_OR_V86_MODE();
44
45 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
46 {
47 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
48 switch (pVCpu->iem.s.enmEffOpSize)
49 {
50 case IEMMODE_16BIT:
51 IEM_MC_BEGIN(0, 1);
52 IEM_MC_LOCAL(uint16_t, u16Ldtr);
53 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
54 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
55 IEM_MC_ADVANCE_RIP();
56 IEM_MC_END();
57 break;
58
59 case IEMMODE_32BIT:
60 IEM_MC_BEGIN(0, 1);
61 IEM_MC_LOCAL(uint32_t, u32Ldtr);
62 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
63 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
64 IEM_MC_ADVANCE_RIP();
65 IEM_MC_END();
66 break;
67
68 case IEMMODE_64BIT:
69 IEM_MC_BEGIN(0, 1);
70 IEM_MC_LOCAL(uint64_t, u64Ldtr);
71 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
72 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
73 IEM_MC_ADVANCE_RIP();
74 IEM_MC_END();
75 break;
76
77 IEM_NOT_REACHED_DEFAULT_CASE_RET();
78 }
79 }
80 else
81 {
82 IEM_MC_BEGIN(0, 2);
83 IEM_MC_LOCAL(uint16_t, u16Ldtr);
84 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
85 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
86 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
87 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
88 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
89 IEM_MC_ADVANCE_RIP();
90 IEM_MC_END();
91 }
92 return VINF_SUCCESS;
93}
94
95
96/** Opcode 0x0f 0x00 /1. */
97FNIEMOPRM_DEF(iemOp_Grp6_str)
98{
99 IEMOP_MNEMONIC(str, "str Rv/Mw");
100 IEMOP_HLP_MIN_286();
101 IEMOP_HLP_NO_REAL_OR_V86_MODE();
102
103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
104 {
105 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
106 switch (pVCpu->iem.s.enmEffOpSize)
107 {
108 case IEMMODE_16BIT:
109 IEM_MC_BEGIN(0, 1);
110 IEM_MC_LOCAL(uint16_t, u16Tr);
111 IEM_MC_FETCH_TR_U16(u16Tr);
112 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 break;
116
117 case IEMMODE_32BIT:
118 IEM_MC_BEGIN(0, 1);
119 IEM_MC_LOCAL(uint32_t, u32Tr);
120 IEM_MC_FETCH_TR_U32(u32Tr);
121 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
122 IEM_MC_ADVANCE_RIP();
123 IEM_MC_END();
124 break;
125
126 case IEMMODE_64BIT:
127 IEM_MC_BEGIN(0, 1);
128 IEM_MC_LOCAL(uint64_t, u64Tr);
129 IEM_MC_FETCH_TR_U64(u64Tr);
130 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
131 IEM_MC_ADVANCE_RIP();
132 IEM_MC_END();
133 break;
134
135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
136 }
137 }
138 else
139 {
140 IEM_MC_BEGIN(0, 2);
141 IEM_MC_LOCAL(uint16_t, u16Tr);
142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
144 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
145 IEM_MC_FETCH_TR_U16(u16Tr);
146 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
147 IEM_MC_ADVANCE_RIP();
148 IEM_MC_END();
149 }
150 return VINF_SUCCESS;
151}
152
153
154/** Opcode 0x0f 0x00 /2. */
155FNIEMOPRM_DEF(iemOp_Grp6_lldt)
156{
157 IEMOP_MNEMONIC(lldt, "lldt Ew");
158 IEMOP_HLP_MIN_286();
159 IEMOP_HLP_NO_REAL_OR_V86_MODE();
160
161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
162 {
163 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
164 IEM_MC_BEGIN(1, 0);
165 IEM_MC_ARG(uint16_t, u16Sel, 0);
166 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
167 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
168 IEM_MC_END();
169 }
170 else
171 {
172 IEM_MC_BEGIN(1, 1);
173 IEM_MC_ARG(uint16_t, u16Sel, 0);
174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
176 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
177 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
178 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
179 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
180 IEM_MC_END();
181 }
182 return VINF_SUCCESS;
183}
184
185
186/** Opcode 0x0f 0x00 /3. */
187FNIEMOPRM_DEF(iemOp_Grp6_ltr)
188{
189 IEMOP_MNEMONIC(ltr, "ltr Ew");
190 IEMOP_HLP_MIN_286();
191 IEMOP_HLP_NO_REAL_OR_V86_MODE();
192
193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
194 {
195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
196 IEM_MC_BEGIN(1, 0);
197 IEM_MC_ARG(uint16_t, u16Sel, 0);
198 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
199 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
200 IEM_MC_END();
201 }
202 else
203 {
204 IEM_MC_BEGIN(1, 1);
205 IEM_MC_ARG(uint16_t, u16Sel, 0);
206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
209 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
210 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
211 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
212 IEM_MC_END();
213 }
214 return VINF_SUCCESS;
215}
216
217
218/** Opcode 0x0f 0x00 /3. */
219FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
220{
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
223
224 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
225 {
226 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
227 IEM_MC_BEGIN(2, 0);
228 IEM_MC_ARG(uint16_t, u16Sel, 0);
229 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
230 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
231 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
232 IEM_MC_END();
233 }
234 else
235 {
236 IEM_MC_BEGIN(2, 1);
237 IEM_MC_ARG(uint16_t, u16Sel, 0);
238 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
241 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
242 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
243 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
244 IEM_MC_END();
245 }
246 return VINF_SUCCESS;
247}
248
249
250/** Opcode 0x0f 0x00 /4. */
251FNIEMOPRM_DEF(iemOp_Grp6_verr)
252{
253 IEMOP_MNEMONIC(verr, "verr Ew");
254 IEMOP_HLP_MIN_286();
255 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
256}
257
258
259/** Opcode 0x0f 0x00 /5. */
260FNIEMOPRM_DEF(iemOp_Grp6_verw)
261{
262 IEMOP_MNEMONIC(verw, "verw Ew");
263 IEMOP_HLP_MIN_286();
264 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
265}
266
267
268/**
269 * Group 6 jump table.
270 */
271IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
272{
273 iemOp_Grp6_sldt,
274 iemOp_Grp6_str,
275 iemOp_Grp6_lldt,
276 iemOp_Grp6_ltr,
277 iemOp_Grp6_verr,
278 iemOp_Grp6_verw,
279 iemOp_InvalidWithRM,
280 iemOp_InvalidWithRM
281};
282
283/** Opcode 0x0f 0x00. */
284FNIEMOP_DEF(iemOp_Grp6)
285{
286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
287 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
288}
289
290
291/** Opcode 0x0f 0x01 /0. */
292FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
293{
294 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
295 IEMOP_HLP_MIN_286();
296 IEMOP_HLP_64BIT_OP_SIZE();
297 IEM_MC_BEGIN(2, 1);
298 IEM_MC_ARG(uint8_t, iEffSeg, 0);
299 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
302 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
303 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
304 IEM_MC_END();
305 return VINF_SUCCESS;
306}
307
308
309/** Opcode 0x0f 0x01 /0. */
310FNIEMOP_DEF(iemOp_Grp7_vmcall)
311{
312 IEMOP_BITCH_ABOUT_STUB();
313 return IEMOP_RAISE_INVALID_OPCODE();
314}
315
316
317/** Opcode 0x0f 0x01 /0. */
318FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
319{
320 IEMOP_BITCH_ABOUT_STUB();
321 return IEMOP_RAISE_INVALID_OPCODE();
322}
323
324
325/** Opcode 0x0f 0x01 /0. */
326FNIEMOP_DEF(iemOp_Grp7_vmresume)
327{
328 IEMOP_BITCH_ABOUT_STUB();
329 return IEMOP_RAISE_INVALID_OPCODE();
330}
331
332
333/** Opcode 0x0f 0x01 /0. */
334FNIEMOP_DEF(iemOp_Grp7_vmxoff)
335{
336 IEMOP_BITCH_ABOUT_STUB();
337 return IEMOP_RAISE_INVALID_OPCODE();
338}
339
340
341/** Opcode 0x0f 0x01 /1. */
342FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
343{
344 IEMOP_MNEMONIC(sidt, "sidt Ms");
345 IEMOP_HLP_MIN_286();
346 IEMOP_HLP_64BIT_OP_SIZE();
347 IEM_MC_BEGIN(2, 1);
348 IEM_MC_ARG(uint8_t, iEffSeg, 0);
349 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
352 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
353 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
354 IEM_MC_END();
355 return VINF_SUCCESS;
356}
357
358
359/** Opcode 0x0f 0x01 /1. */
360FNIEMOP_DEF(iemOp_Grp7_monitor)
361{
362 IEMOP_MNEMONIC(monitor, "monitor");
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
364 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
365}
366
367
368/** Opcode 0x0f 0x01 /1. */
369FNIEMOP_DEF(iemOp_Grp7_mwait)
370{
371 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
374}
375
376
377/** Opcode 0x0f 0x01 /2. */
378FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
379{
380 IEMOP_MNEMONIC(lgdt, "lgdt");
381 IEMOP_HLP_64BIT_OP_SIZE();
382 IEM_MC_BEGIN(3, 1);
383 IEM_MC_ARG(uint8_t, iEffSeg, 0);
384 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
385 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
389 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
390 IEM_MC_END();
391 return VINF_SUCCESS;
392}
393
394
395/** Opcode 0x0f 0x01 0xd0. */
396FNIEMOP_DEF(iemOp_Grp7_xgetbv)
397{
398 IEMOP_MNEMONIC(xgetbv, "xgetbv");
399 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
400 {
401 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
402 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
403 }
404 return IEMOP_RAISE_INVALID_OPCODE();
405}
406
407
408/** Opcode 0x0f 0x01 0xd1. */
409FNIEMOP_DEF(iemOp_Grp7_xsetbv)
410{
411 IEMOP_MNEMONIC(xsetbv, "xsetbv");
412 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
413 {
414 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
416 }
417 return IEMOP_RAISE_INVALID_OPCODE();
418}
419
420
421/** Opcode 0x0f 0x01 /3. */
422FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
423{
424 IEMOP_MNEMONIC(lidt, "lidt");
425 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
426 ? IEMMODE_64BIT
427 : pVCpu->iem.s.enmEffOpSize;
428 IEM_MC_BEGIN(3, 1);
429 IEM_MC_ARG(uint8_t, iEffSeg, 0);
430 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
431 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
434 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
435 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
436 IEM_MC_END();
437 return VINF_SUCCESS;
438}
439
440
441/** Opcode 0x0f 0x01 0xd8. */
442FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
443
444/** Opcode 0x0f 0x01 0xd9. */
445FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
446
447/** Opcode 0x0f 0x01 0xda. */
448FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
449
450/** Opcode 0x0f 0x01 0xdb. */
451FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
452
453/** Opcode 0x0f 0x01 0xdc. */
454FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
455
456/** Opcode 0x0f 0x01 0xdd. */
457FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
458
459/** Opcode 0x0f 0x01 0xde. */
460FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
461
462/** Opcode 0x0f 0x01 0xdf. */
463FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
464
465/** Opcode 0x0f 0x01 /4. */
466FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
467{
468 IEMOP_MNEMONIC(smsw, "smsw");
469 IEMOP_HLP_MIN_286();
470 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
471 {
472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
473 switch (pVCpu->iem.s.enmEffOpSize)
474 {
475 case IEMMODE_16BIT:
476 IEM_MC_BEGIN(0, 1);
477 IEM_MC_LOCAL(uint16_t, u16Tmp);
478 IEM_MC_FETCH_CR0_U16(u16Tmp);
479 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
480 { /* likely */ }
481 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
482 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
483 else
484 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
485 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
486 IEM_MC_ADVANCE_RIP();
487 IEM_MC_END();
488 return VINF_SUCCESS;
489
490 case IEMMODE_32BIT:
491 IEM_MC_BEGIN(0, 1);
492 IEM_MC_LOCAL(uint32_t, u32Tmp);
493 IEM_MC_FETCH_CR0_U32(u32Tmp);
494 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
495 IEM_MC_ADVANCE_RIP();
496 IEM_MC_END();
497 return VINF_SUCCESS;
498
499 case IEMMODE_64BIT:
500 IEM_MC_BEGIN(0, 1);
501 IEM_MC_LOCAL(uint64_t, u64Tmp);
502 IEM_MC_FETCH_CR0_U64(u64Tmp);
503 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
504 IEM_MC_ADVANCE_RIP();
505 IEM_MC_END();
506 return VINF_SUCCESS;
507
508 IEM_NOT_REACHED_DEFAULT_CASE_RET();
509 }
510 }
511 else
512 {
513 /* Ignore operand size here, memory refs are always 16-bit. */
514 IEM_MC_BEGIN(0, 2);
515 IEM_MC_LOCAL(uint16_t, u16Tmp);
516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
519 IEM_MC_FETCH_CR0_U16(u16Tmp);
520 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
521 { /* likely */ }
522 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
523 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
524 else
525 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
526 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
527 IEM_MC_ADVANCE_RIP();
528 IEM_MC_END();
529 return VINF_SUCCESS;
530 }
531}
532
533
534/** Opcode 0x0f 0x01 /6. */
535FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
536{
537 /* The operand size is effectively ignored, all is 16-bit and only the
538 lower 3-bits are used. */
539 IEMOP_MNEMONIC(lmsw, "lmsw");
540 IEMOP_HLP_MIN_286();
541 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
542 {
543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
544 IEM_MC_BEGIN(1, 0);
545 IEM_MC_ARG(uint16_t, u16Tmp, 0);
546 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
547 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
548 IEM_MC_END();
549 }
550 else
551 {
552 IEM_MC_BEGIN(1, 1);
553 IEM_MC_ARG(uint16_t, u16Tmp, 0);
554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
557 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
558 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
559 IEM_MC_END();
560 }
561 return VINF_SUCCESS;
562}
563
564
565/** Opcode 0x0f 0x01 /7. */
566FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
567{
568 IEMOP_MNEMONIC(invlpg, "invlpg");
569 IEMOP_HLP_MIN_486();
570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
571 IEM_MC_BEGIN(1, 1);
572 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
574 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
575 IEM_MC_END();
576 return VINF_SUCCESS;
577}
578
579
580/** Opcode 0x0f 0x01 /7. */
581FNIEMOP_DEF(iemOp_Grp7_swapgs)
582{
583 IEMOP_MNEMONIC(swapgs, "swapgs");
584 IEMOP_HLP_ONLY_64BIT();
585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
586 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
587}
588
589
590/** Opcode 0x0f 0x01 /7. */
591FNIEMOP_DEF(iemOp_Grp7_rdtscp)
592{
593 NOREF(pVCpu);
594 IEMOP_BITCH_ABOUT_STUB();
595 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
596}
597
598
599/** Opcode 0x0f 0x01. */
600FNIEMOP_DEF(iemOp_Grp7)
601{
602 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
603 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
604 {
605 case 0:
606 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
607 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
608 switch (bRm & X86_MODRM_RM_MASK)
609 {
610 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
611 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
612 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
613 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
614 }
615 return IEMOP_RAISE_INVALID_OPCODE();
616
617 case 1:
618 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
619 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
620 switch (bRm & X86_MODRM_RM_MASK)
621 {
622 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
623 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
624 }
625 return IEMOP_RAISE_INVALID_OPCODE();
626
627 case 2:
628 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
629 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
630 switch (bRm & X86_MODRM_RM_MASK)
631 {
632 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
633 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
634 }
635 return IEMOP_RAISE_INVALID_OPCODE();
636
637 case 3:
638 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
639 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
640 switch (bRm & X86_MODRM_RM_MASK)
641 {
642 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
643 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
644 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
645 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
646 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
647 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
648 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
649 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
651 }
652
653 case 4:
654 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
655
656 case 5:
657 return IEMOP_RAISE_INVALID_OPCODE();
658
659 case 6:
660 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
661
662 case 7:
663 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
664 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
665 switch (bRm & X86_MODRM_RM_MASK)
666 {
667 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
668 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
669 }
670 return IEMOP_RAISE_INVALID_OPCODE();
671
672 IEM_NOT_REACHED_DEFAULT_CASE_RET();
673 }
674}
675
676/** Opcode 0x0f 0x00 /3. */
677FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
678{
679 IEMOP_HLP_NO_REAL_OR_V86_MODE();
680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
681
682 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
683 {
684 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
685 switch (pVCpu->iem.s.enmEffOpSize)
686 {
687 case IEMMODE_16BIT:
688 {
689 IEM_MC_BEGIN(3, 0);
690 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
691 IEM_MC_ARG(uint16_t, u16Sel, 1);
692 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
693
694 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
695 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
696 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
697
698 IEM_MC_END();
699 return VINF_SUCCESS;
700 }
701
702 case IEMMODE_32BIT:
703 case IEMMODE_64BIT:
704 {
705 IEM_MC_BEGIN(3, 0);
706 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
707 IEM_MC_ARG(uint16_t, u16Sel, 1);
708 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
709
710 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
711 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
712 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
713
714 IEM_MC_END();
715 return VINF_SUCCESS;
716 }
717
718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
719 }
720 }
721 else
722 {
723 switch (pVCpu->iem.s.enmEffOpSize)
724 {
725 case IEMMODE_16BIT:
726 {
727 IEM_MC_BEGIN(3, 1);
728 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
729 IEM_MC_ARG(uint16_t, u16Sel, 1);
730 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
732
733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
734 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
735
736 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
737 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
738 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
739
740 IEM_MC_END();
741 return VINF_SUCCESS;
742 }
743
744 case IEMMODE_32BIT:
745 case IEMMODE_64BIT:
746 {
747 IEM_MC_BEGIN(3, 1);
748 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
749 IEM_MC_ARG(uint16_t, u16Sel, 1);
750 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
752
753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755/** @todo testcase: make sure it's a 16-bit read. */
756
757 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
758 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
759 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
760
761 IEM_MC_END();
762 return VINF_SUCCESS;
763 }
764
765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
766 }
767 }
768}
769
770
771
772/** Opcode 0x0f 0x02. */
773FNIEMOP_DEF(iemOp_lar_Gv_Ew)
774{
775 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
776 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
777}
778
779
780/** Opcode 0x0f 0x03. */
781FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
782{
783 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
784 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
785}
786
787
788/** Opcode 0x0f 0x05. */
789FNIEMOP_DEF(iemOp_syscall)
790{
791 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
793 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
794}
795
796
797/** Opcode 0x0f 0x06. */
798FNIEMOP_DEF(iemOp_clts)
799{
800 IEMOP_MNEMONIC(clts, "clts");
801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
802 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
803}
804
805
806/** Opcode 0x0f 0x07. */
807FNIEMOP_DEF(iemOp_sysret)
808{
809 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
811 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
812}
813
814
815/** Opcode 0x0f 0x08. */
816FNIEMOP_STUB(iemOp_invd);
817// IEMOP_HLP_MIN_486();
818
819
820/** Opcode 0x0f 0x09. */
821FNIEMOP_DEF(iemOp_wbinvd)
822{
823 IEMOP_MNEMONIC(wbinvd, "wbinvd");
824 IEMOP_HLP_MIN_486();
825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
826 IEM_MC_BEGIN(0, 0);
827 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
828 IEM_MC_ADVANCE_RIP();
829 IEM_MC_END();
830 return VINF_SUCCESS; /* ignore for now */
831}
832
833
834/** Opcode 0x0f 0x0b. */
835FNIEMOP_DEF(iemOp_ud2)
836{
837 IEMOP_MNEMONIC(ud2, "ud2");
838 return IEMOP_RAISE_INVALID_OPCODE();
839}
840
841/** Opcode 0x0f 0x0d. */
842FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
843{
844 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
845 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
846 {
847 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
848 return IEMOP_RAISE_INVALID_OPCODE();
849 }
850
851 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
852 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
853 {
854 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
855 return IEMOP_RAISE_INVALID_OPCODE();
856 }
857
858 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
859 {
860 case 2: /* Aliased to /0 for the time being. */
861 case 4: /* Aliased to /0 for the time being. */
862 case 5: /* Aliased to /0 for the time being. */
863 case 6: /* Aliased to /0 for the time being. */
864 case 7: /* Aliased to /0 for the time being. */
865 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
866 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
867 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
869 }
870
871 IEM_MC_BEGIN(0, 1);
872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
875 /* Currently a NOP. */
876 NOREF(GCPtrEffSrc);
877 IEM_MC_ADVANCE_RIP();
878 IEM_MC_END();
879 return VINF_SUCCESS;
880}
881
882
883/** Opcode 0x0f 0x0e. */
884FNIEMOP_STUB(iemOp_femms);
885
886
887/** Opcode 0x0f 0x0f 0x0c. */
888FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
889
890/** Opcode 0x0f 0x0f 0x0d. */
891FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
892
893/** Opcode 0x0f 0x0f 0x1c. */
894FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
895
896/** Opcode 0x0f 0x0f 0x1d. */
897FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
898
899/** Opcode 0x0f 0x0f 0x8a. */
900FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
901
902/** Opcode 0x0f 0x0f 0x8e. */
903FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
904
905/** Opcode 0x0f 0x0f 0x90. */
906FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
907
908/** Opcode 0x0f 0x0f 0x94. */
909FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
910
911/** Opcode 0x0f 0x0f 0x96. */
912FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
913
914/** Opcode 0x0f 0x0f 0x97. */
915FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
916
917/** Opcode 0x0f 0x0f 0x9a. */
918FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
919
920/** Opcode 0x0f 0x0f 0x9e. */
921FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
922
923/** Opcode 0x0f 0x0f 0xa0. */
924FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
925
926/** Opcode 0x0f 0x0f 0xa4. */
927FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
928
929/** Opcode 0x0f 0x0f 0xa6. */
930FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
931
932/** Opcode 0x0f 0x0f 0xa7. */
933FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
934
935/** Opcode 0x0f 0x0f 0xaa. */
936FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
937
938/** Opcode 0x0f 0x0f 0xae. */
939FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
940
941/** Opcode 0x0f 0x0f 0xb0. */
942FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
943
944/** Opcode 0x0f 0x0f 0xb4. */
945FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
946
947/** Opcode 0x0f 0x0f 0xb6. */
948FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
949
950/** Opcode 0x0f 0x0f 0xb7. */
951FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
952
953/** Opcode 0x0f 0x0f 0xbb. */
954FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
955
956/** Opcode 0x0f 0x0f 0xbf. */
957FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
958
959
960/** Opcode 0x0f 0x0f. */
961FNIEMOP_DEF(iemOp_3Dnow)
962{
963 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
964 {
965 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
966 return IEMOP_RAISE_INVALID_OPCODE();
967 }
968
969 /* This is pretty sparse, use switch instead of table. */
970 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
971 switch (b)
972 {
973 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
974 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
975 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
976 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
977 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
978 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
979 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
980 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
981 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
982 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
983 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
984 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
985 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
986 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
987 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
988 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
989 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
990 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
991 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
992 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
993 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
994 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
995 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
996 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
997 default:
998 return IEMOP_RAISE_INVALID_OPCODE();
999 }
1000}
1001
1002
1003/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1004FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1005/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1006FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1007/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1008FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1009/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1010FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1011
1012
1013/** Opcode 0x0f 0x11 - vmovups Wps, Vps */
1014FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1015{
1016 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1017 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1018 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1019 {
1020 /*
1021 * Register, register.
1022 */
1023 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1024 IEM_MC_BEGIN(0, 0);
1025 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1026 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1027 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1028 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1029 IEM_MC_ADVANCE_RIP();
1030 IEM_MC_END();
1031 }
1032 else
1033 {
1034 /*
1035 * Memory, register.
1036 */
1037 IEM_MC_BEGIN(0, 2);
1038 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1040
1041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1042 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1043 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1044 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1045
1046 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1047 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1048
1049 IEM_MC_ADVANCE_RIP();
1050 IEM_MC_END();
1051 }
1052 return VINF_SUCCESS;
1053}
1054
1055
1056/** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */
1057FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
1058
1059/** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */
1060FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
1061
1062/** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */
1063FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1064{
1065 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1066 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1067 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1068 {
1069 /*
1070 * Register, register.
1071 */
1072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1073 IEM_MC_BEGIN(0, 1);
1074 IEM_MC_LOCAL(uint64_t, uSrc);
1075
1076 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1077 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1078 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1079 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1080
1081 IEM_MC_ADVANCE_RIP();
1082 IEM_MC_END();
1083 }
1084 else
1085 {
1086 /*
1087 * Memory, register.
1088 */
1089 IEM_MC_BEGIN(0, 2);
1090 IEM_MC_LOCAL(uint64_t, uSrc);
1091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1092
1093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1095 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1096 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1097
1098 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1099 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1100
1101 IEM_MC_ADVANCE_RIP();
1102 IEM_MC_END();
1103 }
1104 return VINF_SUCCESS;
1105}
1106
1107
1108/** Opcode 0x0f 0x12. */
1109FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1110
1111/** Opcode 0x66 0x0f 0x12. */
1112FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1113
1114/** Opcode 0xf3 0x0f 0x12. */
1115FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1116
1117/** Opcode 0xf2 0x0f 0x12. */
1118FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1119
1120/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1121FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1122
1123/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1124FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1125{
1126 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1127 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1128 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1129 {
1130#if 0
1131 /*
1132 * Register, register.
1133 */
1134 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1135 IEM_MC_BEGIN(0, 1);
1136 IEM_MC_LOCAL(uint64_t, uSrc);
1137 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1138 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1139 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1140 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1141 IEM_MC_ADVANCE_RIP();
1142 IEM_MC_END();
1143#else
1144 return IEMOP_RAISE_INVALID_OPCODE();
1145#endif
1146 }
1147 else
1148 {
1149 /*
1150 * Memory, register.
1151 */
1152 IEM_MC_BEGIN(0, 2);
1153 IEM_MC_LOCAL(uint64_t, uSrc);
1154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1155
1156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1157 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1158 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1159 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1160
1161 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1162 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1163
1164 IEM_MC_ADVANCE_RIP();
1165 IEM_MC_END();
1166 }
1167 return VINF_SUCCESS;
1168}
1169
1170/* Opcode 0xf3 0x0f 0x13 - invalid */
1171/* Opcode 0xf2 0x0f 0x13 - invalid */
1172
1173/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1174FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1175/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1176FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1177/* Opcode 0xf3 0x0f 0x14 - invalid */
1178/* Opcode 0xf2 0x0f 0x14 - invalid */
1179/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1180FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1181/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1182FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1183/* Opcode 0xf3 0x0f 0x15 - invalid */
1184/* Opcode 0xf2 0x0f 0x15 - invalid */
1185/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1186FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1187/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1188FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1189/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1190FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1191/* Opcode 0xf2 0x0f 0x16 - invalid */
1192/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1193FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1194/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1195FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1196/* Opcode 0xf3 0x0f 0x17 - invalid */
1197/* Opcode 0xf2 0x0f 0x17 - invalid */
1198
1199
1200/** Opcode 0x0f 0x18. */
1201FNIEMOP_DEF(iemOp_prefetch_Grp16)
1202{
1203 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1204 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1205 {
1206 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1207 {
1208 case 4: /* Aliased to /0 for the time being according to AMD. */
1209 case 5: /* Aliased to /0 for the time being according to AMD. */
1210 case 6: /* Aliased to /0 for the time being according to AMD. */
1211 case 7: /* Aliased to /0 for the time being according to AMD. */
1212 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1213 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1214 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1215 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1217 }
1218
1219 IEM_MC_BEGIN(0, 1);
1220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1223 /* Currently a NOP. */
1224 NOREF(GCPtrEffSrc);
1225 IEM_MC_ADVANCE_RIP();
1226 IEM_MC_END();
1227 return VINF_SUCCESS;
1228 }
1229
1230 return IEMOP_RAISE_INVALID_OPCODE();
1231}
1232
1233
1234/** Opcode 0x0f 0x19..0x1f. */
1235FNIEMOP_DEF(iemOp_nop_Ev)
1236{
1237 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1238 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1239 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1240 {
1241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1242 IEM_MC_BEGIN(0, 0);
1243 IEM_MC_ADVANCE_RIP();
1244 IEM_MC_END();
1245 }
1246 else
1247 {
1248 IEM_MC_BEGIN(0, 1);
1249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1252 /* Currently a NOP. */
1253 NOREF(GCPtrEffSrc);
1254 IEM_MC_ADVANCE_RIP();
1255 IEM_MC_END();
1256 }
1257 return VINF_SUCCESS;
1258}
1259
1260
1261/** Opcode 0x0f 0x20. */
1262FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1263{
1264 /* mod is ignored, as is operand size overrides. */
1265 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1266 IEMOP_HLP_MIN_386();
1267 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1268 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1269 else
1270 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1271
1272 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1273 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1274 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1275 {
1276 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1277 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1278 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1279 iCrReg |= 8;
1280 }
1281 switch (iCrReg)
1282 {
1283 case 0: case 2: case 3: case 4: case 8:
1284 break;
1285 default:
1286 return IEMOP_RAISE_INVALID_OPCODE();
1287 }
1288 IEMOP_HLP_DONE_DECODING();
1289
1290 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1291}
1292
1293
1294/** Opcode 0x0f 0x21. */
1295FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1296{
1297 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1298 IEMOP_HLP_MIN_386();
1299 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1301 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1302 return IEMOP_RAISE_INVALID_OPCODE();
1303 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1304 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1305 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1306}
1307
1308
1309/** Opcode 0x0f 0x22. */
1310FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1311{
1312 /* mod is ignored, as is operand size overrides. */
1313 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1314 IEMOP_HLP_MIN_386();
1315 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1316 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1317 else
1318 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1319
1320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1321 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1322 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1323 {
1324 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1325 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1326 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1327 iCrReg |= 8;
1328 }
1329 switch (iCrReg)
1330 {
1331 case 0: case 2: case 3: case 4: case 8:
1332 break;
1333 default:
1334 return IEMOP_RAISE_INVALID_OPCODE();
1335 }
1336 IEMOP_HLP_DONE_DECODING();
1337
1338 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1339}
1340
1341
1342/** Opcode 0x0f 0x23. */
1343FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1344{
1345 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1346 IEMOP_HLP_MIN_386();
1347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1349 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1350 return IEMOP_RAISE_INVALID_OPCODE();
1351 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1352 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1353 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1354}
1355
1356
1357/** Opcode 0x0f 0x24. */
1358FNIEMOP_DEF(iemOp_mov_Rd_Td)
1359{
1360 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1361 /** @todo works on 386 and 486. */
1362 /* The RM byte is not considered, see testcase. */
1363 return IEMOP_RAISE_INVALID_OPCODE();
1364}
1365
1366
1367/** Opcode 0x0f 0x26. */
1368FNIEMOP_DEF(iemOp_mov_Td_Rd)
1369{
1370 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1371 /** @todo works on 386 and 486. */
1372 /* The RM byte is not considered, see testcase. */
1373 return IEMOP_RAISE_INVALID_OPCODE();
1374}
1375
1376
1377/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
1378FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1379{
1380 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1382 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1383 {
1384 /*
1385 * Register, register.
1386 */
1387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1388 IEM_MC_BEGIN(0, 0);
1389 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1390 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1391 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1392 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1393 IEM_MC_ADVANCE_RIP();
1394 IEM_MC_END();
1395 }
1396 else
1397 {
1398 /*
1399 * Register, memory.
1400 */
1401 IEM_MC_BEGIN(0, 2);
1402 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1404
1405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1407 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1408 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1409
1410 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1411 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1412
1413 IEM_MC_ADVANCE_RIP();
1414 IEM_MC_END();
1415 }
1416 return VINF_SUCCESS;
1417}
1418
1419/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
1420FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1421{
1422 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1423 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1424 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1425 {
1426 /*
1427 * Register, register.
1428 */
1429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1430 IEM_MC_BEGIN(0, 0);
1431 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1432 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1433 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1434 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1435 IEM_MC_ADVANCE_RIP();
1436 IEM_MC_END();
1437 }
1438 else
1439 {
1440 /*
1441 * Register, memory.
1442 */
1443 IEM_MC_BEGIN(0, 2);
1444 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1446
1447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1449 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1450 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1451
1452 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1453 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1454
1455 IEM_MC_ADVANCE_RIP();
1456 IEM_MC_END();
1457 }
1458 return VINF_SUCCESS;
1459}
1460
1461/* Opcode 0xf3 0x0f 0x28 - invalid */
1462/* Opcode 0xf2 0x0f 0x28 - invalid */
1463
1464/** Opcode 0x0f 0x29. */
1465FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
1466{
1467 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1468 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1469 else
1470 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1472 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1473 {
1474 /*
1475 * Register, register.
1476 */
1477 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1478 IEM_MC_BEGIN(0, 0);
1479 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1480 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1481 else
1482 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1483 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1484 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1485 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1486 IEM_MC_ADVANCE_RIP();
1487 IEM_MC_END();
1488 }
1489 else
1490 {
1491 /*
1492 * Memory, register.
1493 */
1494 IEM_MC_BEGIN(0, 2);
1495 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1497
1498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1500 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1501 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1502 else
1503 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1504 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1505
1506 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1507 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1508
1509 IEM_MC_ADVANCE_RIP();
1510 IEM_MC_END();
1511 }
1512 return VINF_SUCCESS;
1513}
1514
1515
1516/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
1517FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
1518/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
1519FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
1520/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
1521FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
1522/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1523FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
1524
1525
1526/** Opcode 0x0f 0x2b. */
1527FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
1528{
1529 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1530 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1531 else
1532 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1534 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1535 {
1536 /*
1537 * memory, register.
1538 */
1539 IEM_MC_BEGIN(0, 2);
1540 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1542
1543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1544 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1545 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1546 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1547 else
1548 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1549 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1550
1551 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1552 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1553
1554 IEM_MC_ADVANCE_RIP();
1555 IEM_MC_END();
1556 }
1557 /* The register, register encoding is invalid. */
1558 else
1559 return IEMOP_RAISE_INVALID_OPCODE();
1560 return VINF_SUCCESS;
1561}
1562
1563
1564/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
1565FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
1566/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
1567FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
1568/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
1569FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
1570/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
1571FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
1572
1573/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
1574FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
1575/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
1576FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
1577/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
1578FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
1579/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
1580FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
1581
1582/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
1583FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
1584/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
1585FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
1586/* Opcode 0xf3 0x0f 0x2e - invalid */
1587/* Opcode 0xf2 0x0f 0x2e - invalid */
1588
1589/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
1590FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
1591/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
1592FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
1593/* Opcode 0xf3 0x0f 0x2f - invalid */
1594/* Opcode 0xf2 0x0f 0x2f - invalid */
1595
1596/** Opcode 0x0f 0x30. */
1597FNIEMOP_DEF(iemOp_wrmsr)
1598{
1599 IEMOP_MNEMONIC(wrmsr, "wrmsr");
1600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1601 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1602}
1603
1604
1605/** Opcode 0x0f 0x31. */
1606FNIEMOP_DEF(iemOp_rdtsc)
1607{
1608 IEMOP_MNEMONIC(rdtsc, "rdtsc");
1609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1610 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1611}
1612
1613
1614/** Opcode 0x0f 0x33. */
1615FNIEMOP_DEF(iemOp_rdmsr)
1616{
1617 IEMOP_MNEMONIC(rdmsr, "rdmsr");
1618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1619 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1620}
1621
1622
1623/** Opcode 0x0f 0x34. */
1624FNIEMOP_STUB(iemOp_rdpmc);
1625/** Opcode 0x0f 0x34. */
1626FNIEMOP_STUB(iemOp_sysenter);
1627/** Opcode 0x0f 0x35. */
1628FNIEMOP_STUB(iemOp_sysexit);
1629/** Opcode 0x0f 0x37. */
1630FNIEMOP_STUB(iemOp_getsec);
1631/** Opcode 0x0f 0x38. */
1632FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1633/** Opcode 0x0f 0x3a. */
1634FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1635
1636
1637/**
1638 * Implements a conditional move.
1639 *
1640 * Wish there was an obvious way to do this where we could share and reduce
1641 * code bloat.
1642 *
1643 * @param a_Cnd The conditional "microcode" operation.
1644 */
1645#define CMOV_X(a_Cnd) \
1646 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1647 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1648 { \
1649 switch (pVCpu->iem.s.enmEffOpSize) \
1650 { \
1651 case IEMMODE_16BIT: \
1652 IEM_MC_BEGIN(0, 1); \
1653 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1654 a_Cnd { \
1655 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1656 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
1657 } IEM_MC_ENDIF(); \
1658 IEM_MC_ADVANCE_RIP(); \
1659 IEM_MC_END(); \
1660 return VINF_SUCCESS; \
1661 \
1662 case IEMMODE_32BIT: \
1663 IEM_MC_BEGIN(0, 1); \
1664 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1665 a_Cnd { \
1666 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1667 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
1668 } IEM_MC_ELSE() { \
1669 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
1670 } IEM_MC_ENDIF(); \
1671 IEM_MC_ADVANCE_RIP(); \
1672 IEM_MC_END(); \
1673 return VINF_SUCCESS; \
1674 \
1675 case IEMMODE_64BIT: \
1676 IEM_MC_BEGIN(0, 1); \
1677 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1678 a_Cnd { \
1679 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1680 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
1681 } IEM_MC_ENDIF(); \
1682 IEM_MC_ADVANCE_RIP(); \
1683 IEM_MC_END(); \
1684 return VINF_SUCCESS; \
1685 \
1686 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1687 } \
1688 } \
1689 else \
1690 { \
1691 switch (pVCpu->iem.s.enmEffOpSize) \
1692 { \
1693 case IEMMODE_16BIT: \
1694 IEM_MC_BEGIN(0, 2); \
1695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1696 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1698 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1699 a_Cnd { \
1700 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
1701 } IEM_MC_ENDIF(); \
1702 IEM_MC_ADVANCE_RIP(); \
1703 IEM_MC_END(); \
1704 return VINF_SUCCESS; \
1705 \
1706 case IEMMODE_32BIT: \
1707 IEM_MC_BEGIN(0, 2); \
1708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1709 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1711 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1712 a_Cnd { \
1713 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
1714 } IEM_MC_ELSE() { \
1715 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
1716 } IEM_MC_ENDIF(); \
1717 IEM_MC_ADVANCE_RIP(); \
1718 IEM_MC_END(); \
1719 return VINF_SUCCESS; \
1720 \
1721 case IEMMODE_64BIT: \
1722 IEM_MC_BEGIN(0, 2); \
1723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1724 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1726 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1727 a_Cnd { \
1728 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
1729 } IEM_MC_ENDIF(); \
1730 IEM_MC_ADVANCE_RIP(); \
1731 IEM_MC_END(); \
1732 return VINF_SUCCESS; \
1733 \
1734 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1735 } \
1736 } do {} while (0)
1737
1738
1739
1740/** Opcode 0x0f 0x40. */
1741FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1742{
1743 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
1744 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1745}
1746
1747
1748/** Opcode 0x0f 0x41. */
1749FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1750{
1751 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
1752 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1753}
1754
1755
1756/** Opcode 0x0f 0x42. */
1757FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1758{
1759 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
1760 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1761}
1762
1763
1764/** Opcode 0x0f 0x43. */
1765FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1766{
1767 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
1768 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1769}
1770
1771
1772/** Opcode 0x0f 0x44. */
1773FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1774{
1775 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
1776 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1777}
1778
1779
1780/** Opcode 0x0f 0x45. */
1781FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1782{
1783 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
1784 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1785}
1786
1787
1788/** Opcode 0x0f 0x46. */
1789FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1790{
1791 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
1792 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1793}
1794
1795
1796/** Opcode 0x0f 0x47. */
1797FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1798{
1799 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
1800 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1801}
1802
1803
1804/** Opcode 0x0f 0x48. */
1805FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1806{
1807 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
1808 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1809}
1810
1811
1812/** Opcode 0x0f 0x49. */
1813FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1814{
1815 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
1816 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1817}
1818
1819
1820/** Opcode 0x0f 0x4a. */
1821FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1822{
1823 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
1824 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1825}
1826
1827
1828/** Opcode 0x0f 0x4b. */
1829FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1830{
1831 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
1832 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1833}
1834
1835
1836/** Opcode 0x0f 0x4c. */
1837FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1838{
1839 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
1840 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1841}
1842
1843
1844/** Opcode 0x0f 0x4d. */
1845FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1846{
1847 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
1848 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1849}
1850
1851
1852/** Opcode 0x0f 0x4e. */
1853FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1854{
1855 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
1856 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1857}
1858
1859
1860/** Opcode 0x0f 0x4f. */
1861FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1862{
1863 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
1864 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1865}
1866
1867#undef CMOV_X
1868
1869/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
1870FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
1871/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
1872FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
1873/* Opcode 0xf3 0x0f 0x50 - invalid */
1874/* Opcode 0xf2 0x0f 0x50 - invalid */
1875
1876/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
1877FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
1878/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
1879FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
1880/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
1881FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
1882/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
1883FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
1884
1885/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
1886FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
1887/* Opcode 0x66 0x0f 0x52 - invalid */
1888/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
1889FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
1890/* Opcode 0xf2 0x0f 0x52 - invalid */
1891
1892/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
1893FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
1894/* Opcode 0x66 0x0f 0x53 - invalid */
1895/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
1896FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
1897/* Opcode 0xf2 0x0f 0x53 - invalid */
1898
1899/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
1900FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
1901/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
1902FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
1903/* Opcode 0xf3 0x0f 0x54 - invalid */
1904/* Opcode 0xf2 0x0f 0x54 - invalid */
1905
1906/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
1907FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
1908/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
1909FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
1910/* Opcode 0xf3 0x0f 0x55 - invalid */
1911/* Opcode 0xf2 0x0f 0x55 - invalid */
1912
1913/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
1914FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
1915/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
1916FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
1917/* Opcode 0xf3 0x0f 0x56 - invalid */
1918/* Opcode 0xf2 0x0f 0x56 - invalid */
1919
1920/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
1921FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
1922/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
1923FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
1924/* Opcode 0xf3 0x0f 0x57 - invalid */
1925/* Opcode 0xf2 0x0f 0x57 - invalid */
1926
1927/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
1928FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
1929/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
1930FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
1931/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
1932FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
1933/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
1934FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
1935
1936/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
1937FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
1938/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
1939FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
1940/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
1941FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
1942/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
1943FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
1944
1945/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
1946FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
1947/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
1948FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
1949/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
1950FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
1951/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
1952FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
1953
1954/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
1955FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
1956/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
1957FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
1958/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
1959FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
1960/* Opcode 0xf2 0x0f 0x5b - invalid */
1961
1962/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
1963FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
1964/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
1965FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
1966/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
1967FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
1968/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
1969FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
1970
1971/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
1972FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
1973/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
1974FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
1975/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
1976FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
1977/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
1978FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
1979
1980/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
1981FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
1982/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
1983FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
1984/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
1985FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
1986/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
1987FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
1988
1989/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
1990FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
1991/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
1992FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
1993/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
1994FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
1995/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
1996FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
1997
1998/**
1999 * Common worker for MMX instructions on the forms:
2000 * pxxxx mm1, mm2/mem32
2001 *
2002 * The 2nd operand is the first half of a register, which in the memory case
2003 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2004 * memory accessed for MMX.
2005 *
2006 * Exceptions type 4.
2007 */
2008FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2009{
2010 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2011 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2012 {
2013 /*
2014 * Register, register.
2015 */
2016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2017 IEM_MC_BEGIN(2, 0);
2018 IEM_MC_ARG(uint128_t *, pDst, 0);
2019 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2020 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2021 IEM_MC_PREPARE_SSE_USAGE();
2022 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2023 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2024 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2025 IEM_MC_ADVANCE_RIP();
2026 IEM_MC_END();
2027 }
2028 else
2029 {
2030 /*
2031 * Register, memory.
2032 */
2033 IEM_MC_BEGIN(2, 2);
2034 IEM_MC_ARG(uint128_t *, pDst, 0);
2035 IEM_MC_LOCAL(uint64_t, uSrc);
2036 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2038
2039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2041 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2042 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2043
2044 IEM_MC_PREPARE_SSE_USAGE();
2045 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2046 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2047
2048 IEM_MC_ADVANCE_RIP();
2049 IEM_MC_END();
2050 }
2051 return VINF_SUCCESS;
2052}
2053
2054
2055/**
2056 * Common worker for SSE2 instructions on the forms:
2057 * pxxxx xmm1, xmm2/mem128
2058 *
2059 * The 2nd operand is the first half of a register, which in the memory case
2060 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2061 * memory accessed for MMX.
2062 *
2063 * Exceptions type 4.
2064 */
2065FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2066{
2067 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2068 if (!pImpl->pfnU64)
2069 return IEMOP_RAISE_INVALID_OPCODE();
2070 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2071 {
2072 /*
2073 * Register, register.
2074 */
2075 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2076 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2078 IEM_MC_BEGIN(2, 0);
2079 IEM_MC_ARG(uint64_t *, pDst, 0);
2080 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2081 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2082 IEM_MC_PREPARE_FPU_USAGE();
2083 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2084 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2085 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2086 IEM_MC_ADVANCE_RIP();
2087 IEM_MC_END();
2088 }
2089 else
2090 {
2091 /*
2092 * Register, memory.
2093 */
2094 IEM_MC_BEGIN(2, 2);
2095 IEM_MC_ARG(uint64_t *, pDst, 0);
2096 IEM_MC_LOCAL(uint32_t, uSrc);
2097 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2099
2100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2102 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2103 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2104
2105 IEM_MC_PREPARE_FPU_USAGE();
2106 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2107 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2108
2109 IEM_MC_ADVANCE_RIP();
2110 IEM_MC_END();
2111 }
2112 return VINF_SUCCESS;
2113}
2114
2115
2116/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2117FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2118{
2119 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2120 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2121}
2122
2123/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2124FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2125{
2126 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2127 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2128}
2129
2130/* Opcode 0xf3 0x0f 0x60 - invalid */
2131
2132
2133/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2134FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2135{
2136 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2137 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2138}
2139
2140/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2141FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2142{
2143 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2144 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2145}
2146
2147/* Opcode 0xf3 0x0f 0x61 - invalid */
2148
2149
2150/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2151FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2152{
2153 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2154 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2155}
2156
2157/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2158FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2159{
2160 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2161 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2162}
2163
2164/* Opcode 0xf3 0x0f 0x62 - invalid */
2165
2166
2167
2168/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2169FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2170/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2171FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2172/* Opcode 0xf3 0x0f 0x63 - invalid */
2173
2174/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2175FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2176/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2177FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2178/* Opcode 0xf3 0x0f 0x64 - invalid */
2179
2180/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2181FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2182/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2183FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2184/* Opcode 0xf3 0x0f 0x65 - invalid */
2185
2186/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2187FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2188/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2189FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2190/* Opcode 0xf3 0x0f 0x66 - invalid */
2191
2192/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2193FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2194/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2195FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2196/* Opcode 0xf3 0x0f 0x67 - invalid */
2197
2198
2199/**
2200 * Common worker for MMX instructions on the form:
2201 * pxxxx mm1, mm2/mem64
2202 *
2203 * The 2nd operand is the second half of a register, which in the memory case
2204 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2205 * where it may read the full 128 bits or only the upper 64 bits.
2206 *
2207 * Exceptions type 4.
2208 */
2209FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2210{
2211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2212 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2213 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2214 {
2215 /*
2216 * Register, register.
2217 */
2218 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2219 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2221 IEM_MC_BEGIN(2, 0);
2222 IEM_MC_ARG(uint64_t *, pDst, 0);
2223 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2224 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2225 IEM_MC_PREPARE_FPU_USAGE();
2226 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2227 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2228 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2229 IEM_MC_ADVANCE_RIP();
2230 IEM_MC_END();
2231 }
2232 else
2233 {
2234 /*
2235 * Register, memory.
2236 */
2237 IEM_MC_BEGIN(2, 2);
2238 IEM_MC_ARG(uint64_t *, pDst, 0);
2239 IEM_MC_LOCAL(uint64_t, uSrc);
2240 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2242
2243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2245 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2246 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2247
2248 IEM_MC_PREPARE_FPU_USAGE();
2249 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2250 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2251
2252 IEM_MC_ADVANCE_RIP();
2253 IEM_MC_END();
2254 }
2255 return VINF_SUCCESS;
2256}
2257
2258
2259/**
2260 * Common worker for SSE2 instructions on the form:
2261 * pxxxx xmm1, xmm2/mem128
2262 *
2263 * The 2nd operand is the second half of a register, which in the memory case
2264 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2265 * where it may read the full 128 bits or only the upper 64 bits.
2266 *
2267 * Exceptions type 4.
2268 */
2269FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2270{
2271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2272 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2273 {
2274 /*
2275 * Register, register.
2276 */
2277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2278 IEM_MC_BEGIN(2, 0);
2279 IEM_MC_ARG(uint128_t *, pDst, 0);
2280 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2281 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2282 IEM_MC_PREPARE_SSE_USAGE();
2283 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2284 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2285 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2286 IEM_MC_ADVANCE_RIP();
2287 IEM_MC_END();
2288 }
2289 else
2290 {
2291 /*
2292 * Register, memory.
2293 */
2294 IEM_MC_BEGIN(2, 2);
2295 IEM_MC_ARG(uint128_t *, pDst, 0);
2296 IEM_MC_LOCAL(uint128_t, uSrc);
2297 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2299
2300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2302 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2303 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2304
2305 IEM_MC_PREPARE_SSE_USAGE();
2306 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2307 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2308
2309 IEM_MC_ADVANCE_RIP();
2310 IEM_MC_END();
2311 }
2312 return VINF_SUCCESS;
2313}
2314
2315
2316/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2317FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2318{
2319 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2320 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2321}
2322
2323/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2324FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2325{
2326 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2327 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2328}
2329/* Opcode 0xf3 0x0f 0x68 - invalid */
2330
2331
2332/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2333FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2334{
2335 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2336 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2337}
2338
2339/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2340FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2341{
2342 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2343 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2344
2345}
2346/* Opcode 0xf3 0x0f 0x69 - invalid */
2347
2348
2349/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2350FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2351{
2352 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2353 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2354}
2355
2356/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2357FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2358{
2359 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2360 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2361}
2362/* Opcode 0xf3 0x0f 0x6a - invalid */
2363
2364
2365/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2366FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2367/** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */
2368FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2369/* Opcode 0xf3 0x0f 0x6b - invalid */
2370
2371
2372/* Opcode 0x0f 0x6c - invalid */
2373
2374/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2375FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2376{
2377 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
2378 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2379}
2380
2381/* Opcode 0xf3 0x0f 0x6c - invalid */
2382/* Opcode 0xf2 0x0f 0x6c - invalid */
2383
2384
2385/* Opcode 0x0f 0x6d - invalid */
2386
2387/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
2388FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
2389{
2390 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2391 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2392}
2393
2394/* Opcode 0xf3 0x0f 0x6d - invalid */
2395
2396
2397/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
2398FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
2399{
2400 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2401 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2402 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2403 else
2404 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2405 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2406 {
2407 /* MMX, greg */
2408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2409 IEM_MC_BEGIN(0, 1);
2410 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2411 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2412 IEM_MC_LOCAL(uint64_t, u64Tmp);
2413 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2414 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2415 else
2416 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2417 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2418 IEM_MC_ADVANCE_RIP();
2419 IEM_MC_END();
2420 }
2421 else
2422 {
2423 /* MMX, [mem] */
2424 IEM_MC_BEGIN(0, 2);
2425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2426 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2429 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2430 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2431 {
2432 IEM_MC_LOCAL(uint64_t, u64Tmp);
2433 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2434 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2435 }
2436 else
2437 {
2438 IEM_MC_LOCAL(uint32_t, u32Tmp);
2439 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2440 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2441 }
2442 IEM_MC_ADVANCE_RIP();
2443 IEM_MC_END();
2444 }
2445 return VINF_SUCCESS;
2446}
2447
2448/** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */
2449FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
2450{
2451 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2452 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2453 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
2454 else
2455 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
2456 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2457 {
2458 /* XMM, greg*/
2459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2460 IEM_MC_BEGIN(0, 1);
2461 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2462 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2463 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2464 {
2465 IEM_MC_LOCAL(uint64_t, u64Tmp);
2466 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2467 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2468 }
2469 else
2470 {
2471 IEM_MC_LOCAL(uint32_t, u32Tmp);
2472 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2473 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2474 }
2475 IEM_MC_ADVANCE_RIP();
2476 IEM_MC_END();
2477 }
2478 else
2479 {
2480 /* XMM, [mem] */
2481 IEM_MC_BEGIN(0, 2);
2482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2483 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2486 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2487 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2488 {
2489 IEM_MC_LOCAL(uint64_t, u64Tmp);
2490 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2491 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2492 }
2493 else
2494 {
2495 IEM_MC_LOCAL(uint32_t, u32Tmp);
2496 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2497 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2498 }
2499 IEM_MC_ADVANCE_RIP();
2500 IEM_MC_END();
2501 }
2502 return VINF_SUCCESS;
2503}
2504
2505/* Opcode 0xf3 0x0f 0x6e - invalid */
2506
2507
2508/** Opcode 0x0f 0x6f - movq Pq, Qq */
2509FNIEMOP_DEF(iemOp_movq_Pq_Qq)
2510{
2511 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2512 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2513 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2514 {
2515 /*
2516 * Register, register.
2517 */
2518 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2519 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2521 IEM_MC_BEGIN(0, 1);
2522 IEM_MC_LOCAL(uint64_t, u64Tmp);
2523 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2524 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2525 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2526 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2527 IEM_MC_ADVANCE_RIP();
2528 IEM_MC_END();
2529 }
2530 else
2531 {
2532 /*
2533 * Register, memory.
2534 */
2535 IEM_MC_BEGIN(0, 2);
2536 IEM_MC_LOCAL(uint64_t, u64Tmp);
2537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2538
2539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2541 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2542 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2543 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2544 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2545
2546 IEM_MC_ADVANCE_RIP();
2547 IEM_MC_END();
2548 }
2549 return VINF_SUCCESS;
2550}
2551
2552/** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */
2553FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
2554{
2555 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2556 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
2557 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2558 {
2559 /*
2560 * Register, register.
2561 */
2562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2563 IEM_MC_BEGIN(0, 0);
2564 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2565 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2566 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2567 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2568 IEM_MC_ADVANCE_RIP();
2569 IEM_MC_END();
2570 }
2571 else
2572 {
2573 /*
2574 * Register, memory.
2575 */
2576 IEM_MC_BEGIN(0, 2);
2577 IEM_MC_LOCAL(uint128_t, u128Tmp);
2578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2579
2580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2582 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2583 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2584 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2585 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2586
2587 IEM_MC_ADVANCE_RIP();
2588 IEM_MC_END();
2589 }
2590 return VINF_SUCCESS;
2591}
2592
2593/** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */
2594FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
2595{
2596 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2597 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
2598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2599 {
2600 /*
2601 * Register, register.
2602 */
2603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2604 IEM_MC_BEGIN(0, 0);
2605 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2606 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2607 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2608 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2609 IEM_MC_ADVANCE_RIP();
2610 IEM_MC_END();
2611 }
2612 else
2613 {
2614 /*
2615 * Register, memory.
2616 */
2617 IEM_MC_BEGIN(0, 2);
2618 IEM_MC_LOCAL(uint128_t, u128Tmp);
2619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2620
2621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2623 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2624 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2625 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2626 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2627
2628 IEM_MC_ADVANCE_RIP();
2629 IEM_MC_END();
2630 }
2631 return VINF_SUCCESS;
2632}
2633
2634
2635/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
2636FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
2637{
2638 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
2639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2640 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2641 {
2642 /*
2643 * Register, register.
2644 */
2645 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2647
2648 IEM_MC_BEGIN(3, 0);
2649 IEM_MC_ARG(uint64_t *, pDst, 0);
2650 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2651 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2652 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2653 IEM_MC_PREPARE_FPU_USAGE();
2654 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2655 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2656 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2657 IEM_MC_ADVANCE_RIP();
2658 IEM_MC_END();
2659 }
2660 else
2661 {
2662 /*
2663 * Register, memory.
2664 */
2665 IEM_MC_BEGIN(3, 2);
2666 IEM_MC_ARG(uint64_t *, pDst, 0);
2667 IEM_MC_LOCAL(uint64_t, uSrc);
2668 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2670
2671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2672 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2673 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2675 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2676
2677 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2678 IEM_MC_PREPARE_FPU_USAGE();
2679 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2680 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2681
2682 IEM_MC_ADVANCE_RIP();
2683 IEM_MC_END();
2684 }
2685 return VINF_SUCCESS;
2686}
2687
2688/** Opcode 0x66 0x0f 0x70 - vpshufd Vx, Wx, Ib */
2689FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
2690{
2691 IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
2692 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2693 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2694 {
2695 /*
2696 * Register, register.
2697 */
2698 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2700
2701 IEM_MC_BEGIN(3, 0);
2702 IEM_MC_ARG(uint128_t *, pDst, 0);
2703 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2704 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2705 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2706 IEM_MC_PREPARE_SSE_USAGE();
2707 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2708 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2709 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2710 IEM_MC_ADVANCE_RIP();
2711 IEM_MC_END();
2712 }
2713 else
2714 {
2715 /*
2716 * Register, memory.
2717 */
2718 IEM_MC_BEGIN(3, 2);
2719 IEM_MC_ARG(uint128_t *, pDst, 0);
2720 IEM_MC_LOCAL(uint128_t, uSrc);
2721 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2722 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2723
2724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2725 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2726 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2728 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2729
2730 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2731 IEM_MC_PREPARE_SSE_USAGE();
2732 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2733 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2734
2735 IEM_MC_ADVANCE_RIP();
2736 IEM_MC_END();
2737 }
2738 return VINF_SUCCESS;
2739}
2740
2741/** Opcode 0xf3 0x0f 0x70 - vpshufhw Vx, Wx, Ib */
2742FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
2743{
2744 IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
2745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2746 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2747 {
2748 /*
2749 * Register, register.
2750 */
2751 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2753
2754 IEM_MC_BEGIN(3, 0);
2755 IEM_MC_ARG(uint128_t *, pDst, 0);
2756 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2757 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2758 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2759 IEM_MC_PREPARE_SSE_USAGE();
2760 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2761 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2762 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2763 IEM_MC_ADVANCE_RIP();
2764 IEM_MC_END();
2765 }
2766 else
2767 {
2768 /*
2769 * Register, memory.
2770 */
2771 IEM_MC_BEGIN(3, 2);
2772 IEM_MC_ARG(uint128_t *, pDst, 0);
2773 IEM_MC_LOCAL(uint128_t, uSrc);
2774 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2776
2777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2778 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2779 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2781 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2782
2783 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2784 IEM_MC_PREPARE_SSE_USAGE();
2785 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2786 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2787
2788 IEM_MC_ADVANCE_RIP();
2789 IEM_MC_END();
2790 }
2791 return VINF_SUCCESS;
2792}
2793
2794/** Opcode 0xf2 0x0f 0x70 - vpshuflw Vx, Wx, Ib */
2795FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
2796{
2797 IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
2798 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2799 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2800 {
2801 /*
2802 * Register, register.
2803 */
2804 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2806
2807 IEM_MC_BEGIN(3, 0);
2808 IEM_MC_ARG(uint128_t *, pDst, 0);
2809 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2810 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2811 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2812 IEM_MC_PREPARE_SSE_USAGE();
2813 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2814 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2815 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2816 IEM_MC_ADVANCE_RIP();
2817 IEM_MC_END();
2818 }
2819 else
2820 {
2821 /*
2822 * Register, memory.
2823 */
2824 IEM_MC_BEGIN(3, 2);
2825 IEM_MC_ARG(uint128_t *, pDst, 0);
2826 IEM_MC_LOCAL(uint128_t, uSrc);
2827 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2829
2830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2831 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2832 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2834 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2835
2836 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2837 IEM_MC_PREPARE_SSE_USAGE();
2838 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2839 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2840
2841 IEM_MC_ADVANCE_RIP();
2842 IEM_MC_END();
2843 }
2844 return VINF_SUCCESS;
2845}
2846
2847
2848/** Opcode 0x0f 0x71 11/2. */
2849FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2850
2851/** Opcode 0x66 0x0f 0x71 11/2. */
2852FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2853
2854/** Opcode 0x0f 0x71 11/4. */
2855FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2856
2857/** Opcode 0x66 0x0f 0x71 11/4. */
2858FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2859
2860/** Opcode 0x0f 0x71 11/6. */
2861FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2862
2863/** Opcode 0x66 0x0f 0x71 11/6. */
2864FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2865
2866
2867/** Opcode 0x0f 0x71. */
2868FNIEMOP_DEF(iemOp_Grp12)
2869{
2870 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2871 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2872 return IEMOP_RAISE_INVALID_OPCODE();
2873 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2874 {
2875 case 0: case 1: case 3: case 5: case 7:
2876 return IEMOP_RAISE_INVALID_OPCODE();
2877 case 2:
2878 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2879 {
2880 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2881 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
2882 default: return IEMOP_RAISE_INVALID_OPCODE();
2883 }
2884 case 4:
2885 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2886 {
2887 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
2888 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
2889 default: return IEMOP_RAISE_INVALID_OPCODE();
2890 }
2891 case 6:
2892 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2893 {
2894 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
2895 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
2896 default: return IEMOP_RAISE_INVALID_OPCODE();
2897 }
2898 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2899 }
2900}
2901
2902
2903/** Opcode 0x0f 0x72 11/2. */
2904FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2905
2906/** Opcode 0x66 0x0f 0x72 11/2. */
2907FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
2908
2909/** Opcode 0x0f 0x72 11/4. */
2910FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2911
2912/** Opcode 0x66 0x0f 0x72 11/4. */
2913FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
2914
2915/** Opcode 0x0f 0x72 11/6. */
2916FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2917
2918/** Opcode 0x66 0x0f 0x72 11/6. */
2919FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
2920
2921
2922/** Opcode 0x0f 0x72. */
2923FNIEMOP_DEF(iemOp_Grp13)
2924{
2925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2926 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2927 return IEMOP_RAISE_INVALID_OPCODE();
2928 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2929 {
2930 case 0: case 1: case 3: case 5: case 7:
2931 return IEMOP_RAISE_INVALID_OPCODE();
2932 case 2:
2933 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2934 {
2935 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
2936 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
2937 default: return IEMOP_RAISE_INVALID_OPCODE();
2938 }
2939 case 4:
2940 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2941 {
2942 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
2943 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
2944 default: return IEMOP_RAISE_INVALID_OPCODE();
2945 }
2946 case 6:
2947 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2948 {
2949 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
2950 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
2951 default: return IEMOP_RAISE_INVALID_OPCODE();
2952 }
2953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2954 }
2955}
2956
2957
2958/** Opcode 0x0f 0x73 11/2. */
2959FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
2960
2961/** Opcode 0x66 0x0f 0x73 11/2. */
2962FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
2963
2964/** Opcode 0x66 0x0f 0x73 11/3. */
2965FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
2966
2967/** Opcode 0x0f 0x73 11/6. */
2968FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
2969
2970/** Opcode 0x66 0x0f 0x73 11/6. */
2971FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
2972
2973/** Opcode 0x66 0x0f 0x73 11/7. */
2974FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
2975
2976
2977/** Opcode 0x0f 0x73. */
2978FNIEMOP_DEF(iemOp_Grp14)
2979{
2980 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2981 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2982 return IEMOP_RAISE_INVALID_OPCODE();
2983 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2984 {
2985 case 0: case 1: case 4: case 5:
2986 return IEMOP_RAISE_INVALID_OPCODE();
2987 case 2:
2988 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2989 {
2990 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
2991 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
2992 default: return IEMOP_RAISE_INVALID_OPCODE();
2993 }
2994 case 3:
2995 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2996 {
2997 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
2998 default: return IEMOP_RAISE_INVALID_OPCODE();
2999 }
3000 case 6:
3001 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3002 {
3003 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3004 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3005 default: return IEMOP_RAISE_INVALID_OPCODE();
3006 }
3007 case 7:
3008 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3009 {
3010 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3011 default: return IEMOP_RAISE_INVALID_OPCODE();
3012 }
3013 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3014 }
3015}
3016
3017
3018/**
3019 * Common worker for MMX instructions on the form:
3020 * pxxx mm1, mm2/mem64
3021 */
3022FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3023{
3024 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3025 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3026 {
3027 /*
3028 * Register, register.
3029 */
3030 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3031 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3033 IEM_MC_BEGIN(2, 0);
3034 IEM_MC_ARG(uint64_t *, pDst, 0);
3035 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3036 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3037 IEM_MC_PREPARE_FPU_USAGE();
3038 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3039 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3040 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3041 IEM_MC_ADVANCE_RIP();
3042 IEM_MC_END();
3043 }
3044 else
3045 {
3046 /*
3047 * Register, memory.
3048 */
3049 IEM_MC_BEGIN(2, 2);
3050 IEM_MC_ARG(uint64_t *, pDst, 0);
3051 IEM_MC_LOCAL(uint64_t, uSrc);
3052 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3054
3055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3057 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3058 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3059
3060 IEM_MC_PREPARE_FPU_USAGE();
3061 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3062 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3063
3064 IEM_MC_ADVANCE_RIP();
3065 IEM_MC_END();
3066 }
3067 return VINF_SUCCESS;
3068}
3069
3070
3071/**
3072 * Common worker for SSE2 instructions on the forms:
3073 * pxxx xmm1, xmm2/mem128
3074 *
3075 * Proper alignment of the 128-bit operand is enforced.
3076 * Exceptions type 4. SSE2 cpuid checks.
3077 */
3078FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3079{
3080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3081 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3082 {
3083 /*
3084 * Register, register.
3085 */
3086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3087 IEM_MC_BEGIN(2, 0);
3088 IEM_MC_ARG(uint128_t *, pDst, 0);
3089 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3090 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3091 IEM_MC_PREPARE_SSE_USAGE();
3092 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3093 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3094 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3095 IEM_MC_ADVANCE_RIP();
3096 IEM_MC_END();
3097 }
3098 else
3099 {
3100 /*
3101 * Register, memory.
3102 */
3103 IEM_MC_BEGIN(2, 2);
3104 IEM_MC_ARG(uint128_t *, pDst, 0);
3105 IEM_MC_LOCAL(uint128_t, uSrc);
3106 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3108
3109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3111 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3112 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3113
3114 IEM_MC_PREPARE_SSE_USAGE();
3115 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3116 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3117
3118 IEM_MC_ADVANCE_RIP();
3119 IEM_MC_END();
3120 }
3121 return VINF_SUCCESS;
3122}
3123
3124
3125/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3126FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3127{
3128 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3129 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3130}
3131
3132/** Opcode 0x66 0x0f 0x74 - vpcmpeqb Vx, Hx, Wx */
3133FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3134{
3135 IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb");
3136 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3137}
3138
3139/* Opcode 0xf3 0x0f 0x74 - invalid */
3140/* Opcode 0xf2 0x0f 0x74 - invalid */
3141
3142
3143/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3144FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3145{
3146 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3147 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3148}
3149
3150/** Opcode 0x66 0x0f 0x75 - vpcmpeqw Vx, Hx, Wx */
3151FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3152{
3153 IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw");
3154 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3155}
3156
3157/* Opcode 0xf3 0x0f 0x75 - invalid */
3158/* Opcode 0xf2 0x0f 0x75 - invalid */
3159
3160
3161/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3162FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3163{
3164 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3165 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3166}
3167
3168/** Opcode 0x66 0x0f 0x76 - vpcmpeqd Vx, Hx, Wx */
3169FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3170{
3171 IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd");
3172 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3173}
3174
3175/* Opcode 0xf3 0x0f 0x76 - invalid */
3176/* Opcode 0xf2 0x0f 0x76 - invalid */
3177
3178
3179/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3180FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3181/* Opcode 0x66 0x0f 0x77 - invalid */
3182/* Opcode 0xf3 0x0f 0x77 - invalid */
3183/* Opcode 0xf2 0x0f 0x77 - invalid */
3184
3185/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3186FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3187/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3188FNIEMOP_STUB(iemOp_AmdGrp17);
3189/* Opcode 0xf3 0x0f 0x78 - invalid */
3190/* Opcode 0xf2 0x0f 0x78 - invalid */
3191
3192/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3193FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3194/* Opcode 0x66 0x0f 0x79 - invalid */
3195/* Opcode 0xf3 0x0f 0x79 - invalid */
3196/* Opcode 0xf2 0x0f 0x79 - invalid */
3197
3198/* Opcode 0x0f 0x7a - invalid */
3199/* Opcode 0x66 0x0f 0x7a - invalid */
3200/* Opcode 0xf3 0x0f 0x7a - invalid */
3201/* Opcode 0xf2 0x0f 0x7a - invalid */
3202
3203/* Opcode 0x0f 0x7b - invalid */
3204/* Opcode 0x66 0x0f 0x7b - invalid */
3205/* Opcode 0xf3 0x0f 0x7b - invalid */
3206/* Opcode 0xf2 0x0f 0x7b - invalid */
3207
3208/* Opcode 0x0f 0x7c - invalid */
3209/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3210FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3211/* Opcode 0xf3 0x0f 0x7c - invalid */
3212/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3213FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3214
3215/* Opcode 0x0f 0x7d - invalid */
3216/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3217FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3218/* Opcode 0xf3 0x0f 0x7d - invalid */
3219/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3220FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3221
3222
3223/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3224FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3225{
3226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3227 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3228 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3229 else
3230 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3231 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3232 {
3233 /* greg, MMX */
3234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3235 IEM_MC_BEGIN(0, 1);
3236 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3237 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3238 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3239 {
3240 IEM_MC_LOCAL(uint64_t, u64Tmp);
3241 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3242 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3243 }
3244 else
3245 {
3246 IEM_MC_LOCAL(uint32_t, u32Tmp);
3247 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3248 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3249 }
3250 IEM_MC_ADVANCE_RIP();
3251 IEM_MC_END();
3252 }
3253 else
3254 {
3255 /* [mem], MMX */
3256 IEM_MC_BEGIN(0, 2);
3257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3258 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3261 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3262 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3263 {
3264 IEM_MC_LOCAL(uint64_t, u64Tmp);
3265 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3266 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3267 }
3268 else
3269 {
3270 IEM_MC_LOCAL(uint32_t, u32Tmp);
3271 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3272 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3273 }
3274 IEM_MC_ADVANCE_RIP();
3275 IEM_MC_END();
3276 }
3277 return VINF_SUCCESS;
3278}
3279
3280/** Opcode 0x66 0x0f 0x7e - vmovd_q Ey, Vy */
3281FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3282{
3283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3284 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3285 IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq");
3286 else
3287 IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd");
3288 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3289 {
3290 /* greg, XMM */
3291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3292 IEM_MC_BEGIN(0, 1);
3293 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3294 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3295 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3296 {
3297 IEM_MC_LOCAL(uint64_t, u64Tmp);
3298 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3299 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3300 }
3301 else
3302 {
3303 IEM_MC_LOCAL(uint32_t, u32Tmp);
3304 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3305 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3306 }
3307 IEM_MC_ADVANCE_RIP();
3308 IEM_MC_END();
3309 }
3310 else
3311 {
3312 /* [mem], XMM */
3313 IEM_MC_BEGIN(0, 2);
3314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3315 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3318 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3319 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3320 {
3321 IEM_MC_LOCAL(uint64_t, u64Tmp);
3322 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3323 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3324 }
3325 else
3326 {
3327 IEM_MC_LOCAL(uint32_t, u32Tmp);
3328 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3329 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3330 }
3331 IEM_MC_ADVANCE_RIP();
3332 IEM_MC_END();
3333 }
3334 return VINF_SUCCESS;
3335}
3336
3337/** Opcode 0xf3 0x0f 0x7e - vmovq Vq, Wq */
3338FNIEMOP_STUB(iemOp_vmovq_Vq_Wq);
3339/* Opcode 0xf2 0x0f 0x7e - invalid */
3340
3341
3342/** Opcode 0x0f 0x7f - movq Qq, Pq */
3343FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3344{
3345 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3346 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3347 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3348 {
3349 /*
3350 * Register, register.
3351 */
3352 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3353 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3355 IEM_MC_BEGIN(0, 1);
3356 IEM_MC_LOCAL(uint64_t, u64Tmp);
3357 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3358 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3359 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3360 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3361 IEM_MC_ADVANCE_RIP();
3362 IEM_MC_END();
3363 }
3364 else
3365 {
3366 /*
3367 * Register, memory.
3368 */
3369 IEM_MC_BEGIN(0, 2);
3370 IEM_MC_LOCAL(uint64_t, u64Tmp);
3371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3372
3373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3375 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3376 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3377
3378 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3379 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3380
3381 IEM_MC_ADVANCE_RIP();
3382 IEM_MC_END();
3383 }
3384 return VINF_SUCCESS;
3385}
3386
3387/** Opcode 0x66 0x0f 0x7f - vmovdqa Wx,Vx */
3388FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3389{
3390 IEMOP_MNEMONIC(vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx");
3391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3392 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3393 {
3394 /*
3395 * Register, register.
3396 */
3397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3398 IEM_MC_BEGIN(0, 0);
3399 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3400 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3401 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3402 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3403 IEM_MC_ADVANCE_RIP();
3404 IEM_MC_END();
3405 }
3406 else
3407 {
3408 /*
3409 * Register, memory.
3410 */
3411 IEM_MC_BEGIN(0, 2);
3412 IEM_MC_LOCAL(uint128_t, u128Tmp);
3413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3414
3415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3417 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3418 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3419
3420 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3421 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3422
3423 IEM_MC_ADVANCE_RIP();
3424 IEM_MC_END();
3425 }
3426 return VINF_SUCCESS;
3427}
3428
3429/** Opcode 0xf3 0x0f 0x7f - vmovdqu Wx,Vx */
3430FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
3431{
3432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3433 IEMOP_MNEMONIC(vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx");
3434 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3435 {
3436 /*
3437 * Register, register.
3438 */
3439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3440 IEM_MC_BEGIN(0, 0);
3441 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3442 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3443 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3444 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3445 IEM_MC_ADVANCE_RIP();
3446 IEM_MC_END();
3447 }
3448 else
3449 {
3450 /*
3451 * Register, memory.
3452 */
3453 IEM_MC_BEGIN(0, 2);
3454 IEM_MC_LOCAL(uint128_t, u128Tmp);
3455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3456
3457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3459 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3460 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3461
3462 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3463 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3464
3465 IEM_MC_ADVANCE_RIP();
3466 IEM_MC_END();
3467 }
3468 return VINF_SUCCESS;
3469}
3470
3471/* Opcode 0xf2 0x0f 0x7f - invalid */
3472
3473
3474
3475/** Opcode 0x0f 0x80. */
3476FNIEMOP_DEF(iemOp_jo_Jv)
3477{
3478 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3479 IEMOP_HLP_MIN_386();
3480 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3481 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3482 {
3483 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3485
3486 IEM_MC_BEGIN(0, 0);
3487 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3488 IEM_MC_REL_JMP_S16(i16Imm);
3489 } IEM_MC_ELSE() {
3490 IEM_MC_ADVANCE_RIP();
3491 } IEM_MC_ENDIF();
3492 IEM_MC_END();
3493 }
3494 else
3495 {
3496 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3498
3499 IEM_MC_BEGIN(0, 0);
3500 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3501 IEM_MC_REL_JMP_S32(i32Imm);
3502 } IEM_MC_ELSE() {
3503 IEM_MC_ADVANCE_RIP();
3504 } IEM_MC_ENDIF();
3505 IEM_MC_END();
3506 }
3507 return VINF_SUCCESS;
3508}
3509
3510
3511/** Opcode 0x0f 0x81. */
3512FNIEMOP_DEF(iemOp_jno_Jv)
3513{
3514 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3515 IEMOP_HLP_MIN_386();
3516 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3517 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3518 {
3519 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3521
3522 IEM_MC_BEGIN(0, 0);
3523 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3524 IEM_MC_ADVANCE_RIP();
3525 } IEM_MC_ELSE() {
3526 IEM_MC_REL_JMP_S16(i16Imm);
3527 } IEM_MC_ENDIF();
3528 IEM_MC_END();
3529 }
3530 else
3531 {
3532 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3534
3535 IEM_MC_BEGIN(0, 0);
3536 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3537 IEM_MC_ADVANCE_RIP();
3538 } IEM_MC_ELSE() {
3539 IEM_MC_REL_JMP_S32(i32Imm);
3540 } IEM_MC_ENDIF();
3541 IEM_MC_END();
3542 }
3543 return VINF_SUCCESS;
3544}
3545
3546
3547/** Opcode 0x0f 0x82. */
3548FNIEMOP_DEF(iemOp_jc_Jv)
3549{
3550 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3551 IEMOP_HLP_MIN_386();
3552 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3553 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3554 {
3555 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3557
3558 IEM_MC_BEGIN(0, 0);
3559 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3560 IEM_MC_REL_JMP_S16(i16Imm);
3561 } IEM_MC_ELSE() {
3562 IEM_MC_ADVANCE_RIP();
3563 } IEM_MC_ENDIF();
3564 IEM_MC_END();
3565 }
3566 else
3567 {
3568 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3570
3571 IEM_MC_BEGIN(0, 0);
3572 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3573 IEM_MC_REL_JMP_S32(i32Imm);
3574 } IEM_MC_ELSE() {
3575 IEM_MC_ADVANCE_RIP();
3576 } IEM_MC_ENDIF();
3577 IEM_MC_END();
3578 }
3579 return VINF_SUCCESS;
3580}
3581
3582
3583/** Opcode 0x0f 0x83. */
3584FNIEMOP_DEF(iemOp_jnc_Jv)
3585{
3586 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3587 IEMOP_HLP_MIN_386();
3588 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3589 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3590 {
3591 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3593
3594 IEM_MC_BEGIN(0, 0);
3595 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3596 IEM_MC_ADVANCE_RIP();
3597 } IEM_MC_ELSE() {
3598 IEM_MC_REL_JMP_S16(i16Imm);
3599 } IEM_MC_ENDIF();
3600 IEM_MC_END();
3601 }
3602 else
3603 {
3604 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3606
3607 IEM_MC_BEGIN(0, 0);
3608 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3609 IEM_MC_ADVANCE_RIP();
3610 } IEM_MC_ELSE() {
3611 IEM_MC_REL_JMP_S32(i32Imm);
3612 } IEM_MC_ENDIF();
3613 IEM_MC_END();
3614 }
3615 return VINF_SUCCESS;
3616}
3617
3618
3619/** Opcode 0x0f 0x84. */
3620FNIEMOP_DEF(iemOp_je_Jv)
3621{
3622 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
3623 IEMOP_HLP_MIN_386();
3624 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3625 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3626 {
3627 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3629
3630 IEM_MC_BEGIN(0, 0);
3631 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3632 IEM_MC_REL_JMP_S16(i16Imm);
3633 } IEM_MC_ELSE() {
3634 IEM_MC_ADVANCE_RIP();
3635 } IEM_MC_ENDIF();
3636 IEM_MC_END();
3637 }
3638 else
3639 {
3640 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3642
3643 IEM_MC_BEGIN(0, 0);
3644 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3645 IEM_MC_REL_JMP_S32(i32Imm);
3646 } IEM_MC_ELSE() {
3647 IEM_MC_ADVANCE_RIP();
3648 } IEM_MC_ENDIF();
3649 IEM_MC_END();
3650 }
3651 return VINF_SUCCESS;
3652}
3653
3654
3655/** Opcode 0x0f 0x85. */
3656FNIEMOP_DEF(iemOp_jne_Jv)
3657{
3658 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
3659 IEMOP_HLP_MIN_386();
3660 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3661 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3662 {
3663 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3665
3666 IEM_MC_BEGIN(0, 0);
3667 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3668 IEM_MC_ADVANCE_RIP();
3669 } IEM_MC_ELSE() {
3670 IEM_MC_REL_JMP_S16(i16Imm);
3671 } IEM_MC_ENDIF();
3672 IEM_MC_END();
3673 }
3674 else
3675 {
3676 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3678
3679 IEM_MC_BEGIN(0, 0);
3680 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3681 IEM_MC_ADVANCE_RIP();
3682 } IEM_MC_ELSE() {
3683 IEM_MC_REL_JMP_S32(i32Imm);
3684 } IEM_MC_ENDIF();
3685 IEM_MC_END();
3686 }
3687 return VINF_SUCCESS;
3688}
3689
3690
3691/** Opcode 0x0f 0x86. */
3692FNIEMOP_DEF(iemOp_jbe_Jv)
3693{
3694 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
3695 IEMOP_HLP_MIN_386();
3696 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3697 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3698 {
3699 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3701
3702 IEM_MC_BEGIN(0, 0);
3703 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3704 IEM_MC_REL_JMP_S16(i16Imm);
3705 } IEM_MC_ELSE() {
3706 IEM_MC_ADVANCE_RIP();
3707 } IEM_MC_ENDIF();
3708 IEM_MC_END();
3709 }
3710 else
3711 {
3712 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3714
3715 IEM_MC_BEGIN(0, 0);
3716 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3717 IEM_MC_REL_JMP_S32(i32Imm);
3718 } IEM_MC_ELSE() {
3719 IEM_MC_ADVANCE_RIP();
3720 } IEM_MC_ENDIF();
3721 IEM_MC_END();
3722 }
3723 return VINF_SUCCESS;
3724}
3725
3726
3727/** Opcode 0x0f 0x87. */
3728FNIEMOP_DEF(iemOp_jnbe_Jv)
3729{
3730 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
3731 IEMOP_HLP_MIN_386();
3732 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3733 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3734 {
3735 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3737
3738 IEM_MC_BEGIN(0, 0);
3739 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3740 IEM_MC_ADVANCE_RIP();
3741 } IEM_MC_ELSE() {
3742 IEM_MC_REL_JMP_S16(i16Imm);
3743 } IEM_MC_ENDIF();
3744 IEM_MC_END();
3745 }
3746 else
3747 {
3748 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3750
3751 IEM_MC_BEGIN(0, 0);
3752 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3753 IEM_MC_ADVANCE_RIP();
3754 } IEM_MC_ELSE() {
3755 IEM_MC_REL_JMP_S32(i32Imm);
3756 } IEM_MC_ENDIF();
3757 IEM_MC_END();
3758 }
3759 return VINF_SUCCESS;
3760}
3761
3762
3763/** Opcode 0x0f 0x88. */
3764FNIEMOP_DEF(iemOp_js_Jv)
3765{
3766 IEMOP_MNEMONIC(js_Jv, "js Jv");
3767 IEMOP_HLP_MIN_386();
3768 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3769 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3770 {
3771 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3773
3774 IEM_MC_BEGIN(0, 0);
3775 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3776 IEM_MC_REL_JMP_S16(i16Imm);
3777 } IEM_MC_ELSE() {
3778 IEM_MC_ADVANCE_RIP();
3779 } IEM_MC_ENDIF();
3780 IEM_MC_END();
3781 }
3782 else
3783 {
3784 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3786
3787 IEM_MC_BEGIN(0, 0);
3788 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3789 IEM_MC_REL_JMP_S32(i32Imm);
3790 } IEM_MC_ELSE() {
3791 IEM_MC_ADVANCE_RIP();
3792 } IEM_MC_ENDIF();
3793 IEM_MC_END();
3794 }
3795 return VINF_SUCCESS;
3796}
3797
3798
3799/** Opcode 0x0f 0x89. */
3800FNIEMOP_DEF(iemOp_jns_Jv)
3801{
3802 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
3803 IEMOP_HLP_MIN_386();
3804 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3805 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3806 {
3807 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3809
3810 IEM_MC_BEGIN(0, 0);
3811 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3812 IEM_MC_ADVANCE_RIP();
3813 } IEM_MC_ELSE() {
3814 IEM_MC_REL_JMP_S16(i16Imm);
3815 } IEM_MC_ENDIF();
3816 IEM_MC_END();
3817 }
3818 else
3819 {
3820 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3822
3823 IEM_MC_BEGIN(0, 0);
3824 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3825 IEM_MC_ADVANCE_RIP();
3826 } IEM_MC_ELSE() {
3827 IEM_MC_REL_JMP_S32(i32Imm);
3828 } IEM_MC_ENDIF();
3829 IEM_MC_END();
3830 }
3831 return VINF_SUCCESS;
3832}
3833
3834
3835/** Opcode 0x0f 0x8a. */
3836FNIEMOP_DEF(iemOp_jp_Jv)
3837{
3838 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
3839 IEMOP_HLP_MIN_386();
3840 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3841 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3842 {
3843 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3845
3846 IEM_MC_BEGIN(0, 0);
3847 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3848 IEM_MC_REL_JMP_S16(i16Imm);
3849 } IEM_MC_ELSE() {
3850 IEM_MC_ADVANCE_RIP();
3851 } IEM_MC_ENDIF();
3852 IEM_MC_END();
3853 }
3854 else
3855 {
3856 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3858
3859 IEM_MC_BEGIN(0, 0);
3860 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3861 IEM_MC_REL_JMP_S32(i32Imm);
3862 } IEM_MC_ELSE() {
3863 IEM_MC_ADVANCE_RIP();
3864 } IEM_MC_ENDIF();
3865 IEM_MC_END();
3866 }
3867 return VINF_SUCCESS;
3868}
3869
3870
3871/** Opcode 0x0f 0x8b. */
3872FNIEMOP_DEF(iemOp_jnp_Jv)
3873{
3874 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
3875 IEMOP_HLP_MIN_386();
3876 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3877 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3878 {
3879 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3881
3882 IEM_MC_BEGIN(0, 0);
3883 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3884 IEM_MC_ADVANCE_RIP();
3885 } IEM_MC_ELSE() {
3886 IEM_MC_REL_JMP_S16(i16Imm);
3887 } IEM_MC_ENDIF();
3888 IEM_MC_END();
3889 }
3890 else
3891 {
3892 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3894
3895 IEM_MC_BEGIN(0, 0);
3896 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3897 IEM_MC_ADVANCE_RIP();
3898 } IEM_MC_ELSE() {
3899 IEM_MC_REL_JMP_S32(i32Imm);
3900 } IEM_MC_ENDIF();
3901 IEM_MC_END();
3902 }
3903 return VINF_SUCCESS;
3904}
3905
3906
3907/** Opcode 0x0f 0x8c. */
3908FNIEMOP_DEF(iemOp_jl_Jv)
3909{
3910 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
3911 IEMOP_HLP_MIN_386();
3912 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3913 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3914 {
3915 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3917
3918 IEM_MC_BEGIN(0, 0);
3919 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3920 IEM_MC_REL_JMP_S16(i16Imm);
3921 } IEM_MC_ELSE() {
3922 IEM_MC_ADVANCE_RIP();
3923 } IEM_MC_ENDIF();
3924 IEM_MC_END();
3925 }
3926 else
3927 {
3928 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3930
3931 IEM_MC_BEGIN(0, 0);
3932 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3933 IEM_MC_REL_JMP_S32(i32Imm);
3934 } IEM_MC_ELSE() {
3935 IEM_MC_ADVANCE_RIP();
3936 } IEM_MC_ENDIF();
3937 IEM_MC_END();
3938 }
3939 return VINF_SUCCESS;
3940}
3941
3942
3943/** Opcode 0x0f 0x8d. */
3944FNIEMOP_DEF(iemOp_jnl_Jv)
3945{
3946 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
3947 IEMOP_HLP_MIN_386();
3948 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3949 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3950 {
3951 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3953
3954 IEM_MC_BEGIN(0, 0);
3955 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3956 IEM_MC_ADVANCE_RIP();
3957 } IEM_MC_ELSE() {
3958 IEM_MC_REL_JMP_S16(i16Imm);
3959 } IEM_MC_ENDIF();
3960 IEM_MC_END();
3961 }
3962 else
3963 {
3964 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3966
3967 IEM_MC_BEGIN(0, 0);
3968 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3969 IEM_MC_ADVANCE_RIP();
3970 } IEM_MC_ELSE() {
3971 IEM_MC_REL_JMP_S32(i32Imm);
3972 } IEM_MC_ENDIF();
3973 IEM_MC_END();
3974 }
3975 return VINF_SUCCESS;
3976}
3977
3978
3979/** Opcode 0x0f 0x8e. */
3980FNIEMOP_DEF(iemOp_jle_Jv)
3981{
3982 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
3983 IEMOP_HLP_MIN_386();
3984 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3985 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3986 {
3987 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3989
3990 IEM_MC_BEGIN(0, 0);
3991 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3992 IEM_MC_REL_JMP_S16(i16Imm);
3993 } IEM_MC_ELSE() {
3994 IEM_MC_ADVANCE_RIP();
3995 } IEM_MC_ENDIF();
3996 IEM_MC_END();
3997 }
3998 else
3999 {
4000 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4002
4003 IEM_MC_BEGIN(0, 0);
4004 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4005 IEM_MC_REL_JMP_S32(i32Imm);
4006 } IEM_MC_ELSE() {
4007 IEM_MC_ADVANCE_RIP();
4008 } IEM_MC_ENDIF();
4009 IEM_MC_END();
4010 }
4011 return VINF_SUCCESS;
4012}
4013
4014
4015/** Opcode 0x0f 0x8f. */
4016FNIEMOP_DEF(iemOp_jnle_Jv)
4017{
4018 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4019 IEMOP_HLP_MIN_386();
4020 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4021 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4022 {
4023 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4025
4026 IEM_MC_BEGIN(0, 0);
4027 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4028 IEM_MC_ADVANCE_RIP();
4029 } IEM_MC_ELSE() {
4030 IEM_MC_REL_JMP_S16(i16Imm);
4031 } IEM_MC_ENDIF();
4032 IEM_MC_END();
4033 }
4034 else
4035 {
4036 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4038
4039 IEM_MC_BEGIN(0, 0);
4040 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4041 IEM_MC_ADVANCE_RIP();
4042 } IEM_MC_ELSE() {
4043 IEM_MC_REL_JMP_S32(i32Imm);
4044 } IEM_MC_ENDIF();
4045 IEM_MC_END();
4046 }
4047 return VINF_SUCCESS;
4048}
4049
4050
4051/** Opcode 0x0f 0x90. */
4052FNIEMOP_DEF(iemOp_seto_Eb)
4053{
4054 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4055 IEMOP_HLP_MIN_386();
4056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4057
4058 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4059 * any way. AMD says it's "unused", whatever that means. We're
4060 * ignoring for now. */
4061 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4062 {
4063 /* register target */
4064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4065 IEM_MC_BEGIN(0, 0);
4066 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4067 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4068 } IEM_MC_ELSE() {
4069 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4070 } IEM_MC_ENDIF();
4071 IEM_MC_ADVANCE_RIP();
4072 IEM_MC_END();
4073 }
4074 else
4075 {
4076 /* memory target */
4077 IEM_MC_BEGIN(0, 1);
4078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4081 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4082 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4083 } IEM_MC_ELSE() {
4084 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4085 } IEM_MC_ENDIF();
4086 IEM_MC_ADVANCE_RIP();
4087 IEM_MC_END();
4088 }
4089 return VINF_SUCCESS;
4090}
4091
4092
4093/** Opcode 0x0f 0x91. */
4094FNIEMOP_DEF(iemOp_setno_Eb)
4095{
4096 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4097 IEMOP_HLP_MIN_386();
4098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4099
4100 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4101 * any way. AMD says it's "unused", whatever that means. We're
4102 * ignoring for now. */
4103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4104 {
4105 /* register target */
4106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4107 IEM_MC_BEGIN(0, 0);
4108 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4109 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4110 } IEM_MC_ELSE() {
4111 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4112 } IEM_MC_ENDIF();
4113 IEM_MC_ADVANCE_RIP();
4114 IEM_MC_END();
4115 }
4116 else
4117 {
4118 /* memory target */
4119 IEM_MC_BEGIN(0, 1);
4120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4123 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4124 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4125 } IEM_MC_ELSE() {
4126 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4127 } IEM_MC_ENDIF();
4128 IEM_MC_ADVANCE_RIP();
4129 IEM_MC_END();
4130 }
4131 return VINF_SUCCESS;
4132}
4133
4134
4135/** Opcode 0x0f 0x92. */
4136FNIEMOP_DEF(iemOp_setc_Eb)
4137{
4138 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4139 IEMOP_HLP_MIN_386();
4140 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4141
4142 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4143 * any way. AMD says it's "unused", whatever that means. We're
4144 * ignoring for now. */
4145 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4146 {
4147 /* register target */
4148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4149 IEM_MC_BEGIN(0, 0);
4150 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4151 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4152 } IEM_MC_ELSE() {
4153 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4154 } IEM_MC_ENDIF();
4155 IEM_MC_ADVANCE_RIP();
4156 IEM_MC_END();
4157 }
4158 else
4159 {
4160 /* memory target */
4161 IEM_MC_BEGIN(0, 1);
4162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4165 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4166 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4167 } IEM_MC_ELSE() {
4168 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4169 } IEM_MC_ENDIF();
4170 IEM_MC_ADVANCE_RIP();
4171 IEM_MC_END();
4172 }
4173 return VINF_SUCCESS;
4174}
4175
4176
4177/** Opcode 0x0f 0x93. */
4178FNIEMOP_DEF(iemOp_setnc_Eb)
4179{
4180 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4181 IEMOP_HLP_MIN_386();
4182 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4183
4184 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4185 * any way. AMD says it's "unused", whatever that means. We're
4186 * ignoring for now. */
4187 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4188 {
4189 /* register target */
4190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4191 IEM_MC_BEGIN(0, 0);
4192 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4193 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4194 } IEM_MC_ELSE() {
4195 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4196 } IEM_MC_ENDIF();
4197 IEM_MC_ADVANCE_RIP();
4198 IEM_MC_END();
4199 }
4200 else
4201 {
4202 /* memory target */
4203 IEM_MC_BEGIN(0, 1);
4204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4207 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4208 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4209 } IEM_MC_ELSE() {
4210 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4211 } IEM_MC_ENDIF();
4212 IEM_MC_ADVANCE_RIP();
4213 IEM_MC_END();
4214 }
4215 return VINF_SUCCESS;
4216}
4217
4218
4219/** Opcode 0x0f 0x94. */
4220FNIEMOP_DEF(iemOp_sete_Eb)
4221{
4222 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4223 IEMOP_HLP_MIN_386();
4224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4225
4226 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4227 * any way. AMD says it's "unused", whatever that means. We're
4228 * ignoring for now. */
4229 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4230 {
4231 /* register target */
4232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4233 IEM_MC_BEGIN(0, 0);
4234 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4235 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4236 } IEM_MC_ELSE() {
4237 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4238 } IEM_MC_ENDIF();
4239 IEM_MC_ADVANCE_RIP();
4240 IEM_MC_END();
4241 }
4242 else
4243 {
4244 /* memory target */
4245 IEM_MC_BEGIN(0, 1);
4246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4249 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4250 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4251 } IEM_MC_ELSE() {
4252 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4253 } IEM_MC_ENDIF();
4254 IEM_MC_ADVANCE_RIP();
4255 IEM_MC_END();
4256 }
4257 return VINF_SUCCESS;
4258}
4259
4260
4261/** Opcode 0x0f 0x95. */
4262FNIEMOP_DEF(iemOp_setne_Eb)
4263{
4264 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4265 IEMOP_HLP_MIN_386();
4266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4267
4268 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4269 * any way. AMD says it's "unused", whatever that means. We're
4270 * ignoring for now. */
4271 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4272 {
4273 /* register target */
4274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4275 IEM_MC_BEGIN(0, 0);
4276 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4277 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4278 } IEM_MC_ELSE() {
4279 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4280 } IEM_MC_ENDIF();
4281 IEM_MC_ADVANCE_RIP();
4282 IEM_MC_END();
4283 }
4284 else
4285 {
4286 /* memory target */
4287 IEM_MC_BEGIN(0, 1);
4288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4291 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4292 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4293 } IEM_MC_ELSE() {
4294 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4295 } IEM_MC_ENDIF();
4296 IEM_MC_ADVANCE_RIP();
4297 IEM_MC_END();
4298 }
4299 return VINF_SUCCESS;
4300}
4301
4302
4303/** Opcode 0x0f 0x96. */
4304FNIEMOP_DEF(iemOp_setbe_Eb)
4305{
4306 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4307 IEMOP_HLP_MIN_386();
4308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4309
4310 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4311 * any way. AMD says it's "unused", whatever that means. We're
4312 * ignoring for now. */
4313 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4314 {
4315 /* register target */
4316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4317 IEM_MC_BEGIN(0, 0);
4318 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4319 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4320 } IEM_MC_ELSE() {
4321 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4322 } IEM_MC_ENDIF();
4323 IEM_MC_ADVANCE_RIP();
4324 IEM_MC_END();
4325 }
4326 else
4327 {
4328 /* memory target */
4329 IEM_MC_BEGIN(0, 1);
4330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4333 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4334 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4335 } IEM_MC_ELSE() {
4336 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4337 } IEM_MC_ENDIF();
4338 IEM_MC_ADVANCE_RIP();
4339 IEM_MC_END();
4340 }
4341 return VINF_SUCCESS;
4342}
4343
4344
4345/** Opcode 0x0f 0x97. */
4346FNIEMOP_DEF(iemOp_setnbe_Eb)
4347{
4348 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4349 IEMOP_HLP_MIN_386();
4350 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4351
4352 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4353 * any way. AMD says it's "unused", whatever that means. We're
4354 * ignoring for now. */
4355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4356 {
4357 /* register target */
4358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4359 IEM_MC_BEGIN(0, 0);
4360 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4361 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4362 } IEM_MC_ELSE() {
4363 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4364 } IEM_MC_ENDIF();
4365 IEM_MC_ADVANCE_RIP();
4366 IEM_MC_END();
4367 }
4368 else
4369 {
4370 /* memory target */
4371 IEM_MC_BEGIN(0, 1);
4372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4375 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4376 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4377 } IEM_MC_ELSE() {
4378 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4379 } IEM_MC_ENDIF();
4380 IEM_MC_ADVANCE_RIP();
4381 IEM_MC_END();
4382 }
4383 return VINF_SUCCESS;
4384}
4385
4386
4387/** Opcode 0x0f 0x98. */
4388FNIEMOP_DEF(iemOp_sets_Eb)
4389{
4390 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4391 IEMOP_HLP_MIN_386();
4392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4393
4394 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4395 * any way. AMD says it's "unused", whatever that means. We're
4396 * ignoring for now. */
4397 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4398 {
4399 /* register target */
4400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4401 IEM_MC_BEGIN(0, 0);
4402 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4403 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4404 } IEM_MC_ELSE() {
4405 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4406 } IEM_MC_ENDIF();
4407 IEM_MC_ADVANCE_RIP();
4408 IEM_MC_END();
4409 }
4410 else
4411 {
4412 /* memory target */
4413 IEM_MC_BEGIN(0, 1);
4414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4417 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4418 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4419 } IEM_MC_ELSE() {
4420 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4421 } IEM_MC_ENDIF();
4422 IEM_MC_ADVANCE_RIP();
4423 IEM_MC_END();
4424 }
4425 return VINF_SUCCESS;
4426}
4427
4428
4429/** Opcode 0x0f 0x99. */
4430FNIEMOP_DEF(iemOp_setns_Eb)
4431{
4432 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4433 IEMOP_HLP_MIN_386();
4434 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4435
4436 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4437 * any way. AMD says it's "unused", whatever that means. We're
4438 * ignoring for now. */
4439 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4440 {
4441 /* register target */
4442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4443 IEM_MC_BEGIN(0, 0);
4444 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4445 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4446 } IEM_MC_ELSE() {
4447 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4448 } IEM_MC_ENDIF();
4449 IEM_MC_ADVANCE_RIP();
4450 IEM_MC_END();
4451 }
4452 else
4453 {
4454 /* memory target */
4455 IEM_MC_BEGIN(0, 1);
4456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4459 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4460 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4461 } IEM_MC_ELSE() {
4462 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4463 } IEM_MC_ENDIF();
4464 IEM_MC_ADVANCE_RIP();
4465 IEM_MC_END();
4466 }
4467 return VINF_SUCCESS;
4468}
4469
4470
4471/** Opcode 0x0f 0x9a. */
4472FNIEMOP_DEF(iemOp_setp_Eb)
4473{
4474 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4475 IEMOP_HLP_MIN_386();
4476 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4477
4478 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4479 * any way. AMD says it's "unused", whatever that means. We're
4480 * ignoring for now. */
4481 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4482 {
4483 /* register target */
4484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4485 IEM_MC_BEGIN(0, 0);
4486 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4487 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4488 } IEM_MC_ELSE() {
4489 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4490 } IEM_MC_ENDIF();
4491 IEM_MC_ADVANCE_RIP();
4492 IEM_MC_END();
4493 }
4494 else
4495 {
4496 /* memory target */
4497 IEM_MC_BEGIN(0, 1);
4498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4501 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4502 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4503 } IEM_MC_ELSE() {
4504 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4505 } IEM_MC_ENDIF();
4506 IEM_MC_ADVANCE_RIP();
4507 IEM_MC_END();
4508 }
4509 return VINF_SUCCESS;
4510}
4511
4512
4513/** Opcode 0x0f 0x9b. */
4514FNIEMOP_DEF(iemOp_setnp_Eb)
4515{
4516 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4517 IEMOP_HLP_MIN_386();
4518 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4519
4520 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4521 * any way. AMD says it's "unused", whatever that means. We're
4522 * ignoring for now. */
4523 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4524 {
4525 /* register target */
4526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4527 IEM_MC_BEGIN(0, 0);
4528 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4529 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4530 } IEM_MC_ELSE() {
4531 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4532 } IEM_MC_ENDIF();
4533 IEM_MC_ADVANCE_RIP();
4534 IEM_MC_END();
4535 }
4536 else
4537 {
4538 /* memory target */
4539 IEM_MC_BEGIN(0, 1);
4540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4543 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4544 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4545 } IEM_MC_ELSE() {
4546 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4547 } IEM_MC_ENDIF();
4548 IEM_MC_ADVANCE_RIP();
4549 IEM_MC_END();
4550 }
4551 return VINF_SUCCESS;
4552}
4553
4554
4555/** Opcode 0x0f 0x9c. */
4556FNIEMOP_DEF(iemOp_setl_Eb)
4557{
4558 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4559 IEMOP_HLP_MIN_386();
4560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4561
4562 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4563 * any way. AMD says it's "unused", whatever that means. We're
4564 * ignoring for now. */
4565 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4566 {
4567 /* register target */
4568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4569 IEM_MC_BEGIN(0, 0);
4570 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4571 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4572 } IEM_MC_ELSE() {
4573 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4574 } IEM_MC_ENDIF();
4575 IEM_MC_ADVANCE_RIP();
4576 IEM_MC_END();
4577 }
4578 else
4579 {
4580 /* memory target */
4581 IEM_MC_BEGIN(0, 1);
4582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4585 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4586 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4587 } IEM_MC_ELSE() {
4588 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4589 } IEM_MC_ENDIF();
4590 IEM_MC_ADVANCE_RIP();
4591 IEM_MC_END();
4592 }
4593 return VINF_SUCCESS;
4594}
4595
4596
4597/** Opcode 0x0f 0x9d. */
4598FNIEMOP_DEF(iemOp_setnl_Eb)
4599{
4600 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4601 IEMOP_HLP_MIN_386();
4602 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4603
4604 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4605 * any way. AMD says it's "unused", whatever that means. We're
4606 * ignoring for now. */
4607 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4608 {
4609 /* register target */
4610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4611 IEM_MC_BEGIN(0, 0);
4612 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4613 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4614 } IEM_MC_ELSE() {
4615 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4616 } IEM_MC_ENDIF();
4617 IEM_MC_ADVANCE_RIP();
4618 IEM_MC_END();
4619 }
4620 else
4621 {
4622 /* memory target */
4623 IEM_MC_BEGIN(0, 1);
4624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4627 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4628 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4629 } IEM_MC_ELSE() {
4630 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4631 } IEM_MC_ENDIF();
4632 IEM_MC_ADVANCE_RIP();
4633 IEM_MC_END();
4634 }
4635 return VINF_SUCCESS;
4636}
4637
4638
4639/** Opcode 0x0f 0x9e. */
4640FNIEMOP_DEF(iemOp_setle_Eb)
4641{
4642 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
4643 IEMOP_HLP_MIN_386();
4644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4645
4646 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4647 * any way. AMD says it's "unused", whatever that means. We're
4648 * ignoring for now. */
4649 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4650 {
4651 /* register target */
4652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4653 IEM_MC_BEGIN(0, 0);
4654 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4655 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4656 } IEM_MC_ELSE() {
4657 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4658 } IEM_MC_ENDIF();
4659 IEM_MC_ADVANCE_RIP();
4660 IEM_MC_END();
4661 }
4662 else
4663 {
4664 /* memory target */
4665 IEM_MC_BEGIN(0, 1);
4666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4669 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4670 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4671 } IEM_MC_ELSE() {
4672 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4673 } IEM_MC_ENDIF();
4674 IEM_MC_ADVANCE_RIP();
4675 IEM_MC_END();
4676 }
4677 return VINF_SUCCESS;
4678}
4679
4680
4681/** Opcode 0x0f 0x9f. */
4682FNIEMOP_DEF(iemOp_setnle_Eb)
4683{
4684 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
4685 IEMOP_HLP_MIN_386();
4686 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4687
4688 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4689 * any way. AMD says it's "unused", whatever that means. We're
4690 * ignoring for now. */
4691 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4692 {
4693 /* register target */
4694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4695 IEM_MC_BEGIN(0, 0);
4696 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4697 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4698 } IEM_MC_ELSE() {
4699 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4700 } IEM_MC_ENDIF();
4701 IEM_MC_ADVANCE_RIP();
4702 IEM_MC_END();
4703 }
4704 else
4705 {
4706 /* memory target */
4707 IEM_MC_BEGIN(0, 1);
4708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4711 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4712 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4713 } IEM_MC_ELSE() {
4714 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4715 } IEM_MC_ENDIF();
4716 IEM_MC_ADVANCE_RIP();
4717 IEM_MC_END();
4718 }
4719 return VINF_SUCCESS;
4720}
4721
4722
4723/**
4724 * Common 'push segment-register' helper.
4725 */
4726FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4727{
4728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4729 if (iReg < X86_SREG_FS)
4730 IEMOP_HLP_NO_64BIT();
4731 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4732
4733 switch (pVCpu->iem.s.enmEffOpSize)
4734 {
4735 case IEMMODE_16BIT:
4736 IEM_MC_BEGIN(0, 1);
4737 IEM_MC_LOCAL(uint16_t, u16Value);
4738 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4739 IEM_MC_PUSH_U16(u16Value);
4740 IEM_MC_ADVANCE_RIP();
4741 IEM_MC_END();
4742 break;
4743
4744 case IEMMODE_32BIT:
4745 IEM_MC_BEGIN(0, 1);
4746 IEM_MC_LOCAL(uint32_t, u32Value);
4747 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4748 IEM_MC_PUSH_U32_SREG(u32Value);
4749 IEM_MC_ADVANCE_RIP();
4750 IEM_MC_END();
4751 break;
4752
4753 case IEMMODE_64BIT:
4754 IEM_MC_BEGIN(0, 1);
4755 IEM_MC_LOCAL(uint64_t, u64Value);
4756 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4757 IEM_MC_PUSH_U64(u64Value);
4758 IEM_MC_ADVANCE_RIP();
4759 IEM_MC_END();
4760 break;
4761 }
4762
4763 return VINF_SUCCESS;
4764}
4765
4766
4767/** Opcode 0x0f 0xa0. */
4768FNIEMOP_DEF(iemOp_push_fs)
4769{
4770 IEMOP_MNEMONIC(push_fs, "push fs");
4771 IEMOP_HLP_MIN_386();
4772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4773 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4774}
4775
4776
4777/** Opcode 0x0f 0xa1. */
4778FNIEMOP_DEF(iemOp_pop_fs)
4779{
4780 IEMOP_MNEMONIC(pop_fs, "pop fs");
4781 IEMOP_HLP_MIN_386();
4782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4783 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
4784}
4785
4786
4787/** Opcode 0x0f 0xa2. */
4788FNIEMOP_DEF(iemOp_cpuid)
4789{
4790 IEMOP_MNEMONIC(cpuid, "cpuid");
4791 IEMOP_HLP_MIN_486(); /* not all 486es. */
4792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4793 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4794}
4795
4796
4797/**
4798 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4799 * iemOp_bts_Ev_Gv.
4800 */
4801FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4802{
4803 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4804 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4805
4806 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4807 {
4808 /* register destination. */
4809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4810 switch (pVCpu->iem.s.enmEffOpSize)
4811 {
4812 case IEMMODE_16BIT:
4813 IEM_MC_BEGIN(3, 0);
4814 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4815 IEM_MC_ARG(uint16_t, u16Src, 1);
4816 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4817
4818 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4819 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4820 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4821 IEM_MC_REF_EFLAGS(pEFlags);
4822 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4823
4824 IEM_MC_ADVANCE_RIP();
4825 IEM_MC_END();
4826 return VINF_SUCCESS;
4827
4828 case IEMMODE_32BIT:
4829 IEM_MC_BEGIN(3, 0);
4830 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4831 IEM_MC_ARG(uint32_t, u32Src, 1);
4832 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4833
4834 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4835 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4836 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4837 IEM_MC_REF_EFLAGS(pEFlags);
4838 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4839
4840 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4841 IEM_MC_ADVANCE_RIP();
4842 IEM_MC_END();
4843 return VINF_SUCCESS;
4844
4845 case IEMMODE_64BIT:
4846 IEM_MC_BEGIN(3, 0);
4847 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4848 IEM_MC_ARG(uint64_t, u64Src, 1);
4849 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4850
4851 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4852 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4853 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4854 IEM_MC_REF_EFLAGS(pEFlags);
4855 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4856
4857 IEM_MC_ADVANCE_RIP();
4858 IEM_MC_END();
4859 return VINF_SUCCESS;
4860
4861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4862 }
4863 }
4864 else
4865 {
4866 /* memory destination. */
4867
4868 uint32_t fAccess;
4869 if (pImpl->pfnLockedU16)
4870 fAccess = IEM_ACCESS_DATA_RW;
4871 else /* BT */
4872 fAccess = IEM_ACCESS_DATA_R;
4873
4874 /** @todo test negative bit offsets! */
4875 switch (pVCpu->iem.s.enmEffOpSize)
4876 {
4877 case IEMMODE_16BIT:
4878 IEM_MC_BEGIN(3, 2);
4879 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4880 IEM_MC_ARG(uint16_t, u16Src, 1);
4881 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4883 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4884
4885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4886 if (pImpl->pfnLockedU16)
4887 IEMOP_HLP_DONE_DECODING();
4888 else
4889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4890 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4891 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4892 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4893 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4894 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
4895 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4896 IEM_MC_FETCH_EFLAGS(EFlags);
4897
4898 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4899 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4900 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4901 else
4902 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4903 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
4904
4905 IEM_MC_COMMIT_EFLAGS(EFlags);
4906 IEM_MC_ADVANCE_RIP();
4907 IEM_MC_END();
4908 return VINF_SUCCESS;
4909
4910 case IEMMODE_32BIT:
4911 IEM_MC_BEGIN(3, 2);
4912 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4913 IEM_MC_ARG(uint32_t, u32Src, 1);
4914 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4916 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4917
4918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4919 if (pImpl->pfnLockedU16)
4920 IEMOP_HLP_DONE_DECODING();
4921 else
4922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4923 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4924 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4925 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4926 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4927 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4928 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4929 IEM_MC_FETCH_EFLAGS(EFlags);
4930
4931 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4932 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4933 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4934 else
4935 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4936 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
4937
4938 IEM_MC_COMMIT_EFLAGS(EFlags);
4939 IEM_MC_ADVANCE_RIP();
4940 IEM_MC_END();
4941 return VINF_SUCCESS;
4942
4943 case IEMMODE_64BIT:
4944 IEM_MC_BEGIN(3, 2);
4945 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4946 IEM_MC_ARG(uint64_t, u64Src, 1);
4947 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4949 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4950
4951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4952 if (pImpl->pfnLockedU16)
4953 IEMOP_HLP_DONE_DECODING();
4954 else
4955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4956 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4957 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4958 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4959 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4960 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
4961 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
4962 IEM_MC_FETCH_EFLAGS(EFlags);
4963
4964 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4965 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4966 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4967 else
4968 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4969 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
4970
4971 IEM_MC_COMMIT_EFLAGS(EFlags);
4972 IEM_MC_ADVANCE_RIP();
4973 IEM_MC_END();
4974 return VINF_SUCCESS;
4975
4976 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4977 }
4978 }
4979}
4980
4981
4982/** Opcode 0x0f 0xa3. */
4983FNIEMOP_DEF(iemOp_bt_Ev_Gv)
4984{
4985 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
4986 IEMOP_HLP_MIN_386();
4987 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
4988}
4989
4990
4991/**
4992 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
4993 */
4994FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
4995{
4996 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4997 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4998
4999 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5000 {
5001 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5003
5004 switch (pVCpu->iem.s.enmEffOpSize)
5005 {
5006 case IEMMODE_16BIT:
5007 IEM_MC_BEGIN(4, 0);
5008 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5009 IEM_MC_ARG(uint16_t, u16Src, 1);
5010 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5011 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5012
5013 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5014 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5015 IEM_MC_REF_EFLAGS(pEFlags);
5016 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5017
5018 IEM_MC_ADVANCE_RIP();
5019 IEM_MC_END();
5020 return VINF_SUCCESS;
5021
5022 case IEMMODE_32BIT:
5023 IEM_MC_BEGIN(4, 0);
5024 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5025 IEM_MC_ARG(uint32_t, u32Src, 1);
5026 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5027 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5028
5029 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5030 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5031 IEM_MC_REF_EFLAGS(pEFlags);
5032 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5033
5034 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5035 IEM_MC_ADVANCE_RIP();
5036 IEM_MC_END();
5037 return VINF_SUCCESS;
5038
5039 case IEMMODE_64BIT:
5040 IEM_MC_BEGIN(4, 0);
5041 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5042 IEM_MC_ARG(uint64_t, u64Src, 1);
5043 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5044 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5045
5046 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5047 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5048 IEM_MC_REF_EFLAGS(pEFlags);
5049 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5050
5051 IEM_MC_ADVANCE_RIP();
5052 IEM_MC_END();
5053 return VINF_SUCCESS;
5054
5055 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5056 }
5057 }
5058 else
5059 {
5060 switch (pVCpu->iem.s.enmEffOpSize)
5061 {
5062 case IEMMODE_16BIT:
5063 IEM_MC_BEGIN(4, 2);
5064 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5065 IEM_MC_ARG(uint16_t, u16Src, 1);
5066 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5067 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5069
5070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5071 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5072 IEM_MC_ASSIGN(cShiftArg, cShift);
5073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5074 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5075 IEM_MC_FETCH_EFLAGS(EFlags);
5076 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5077 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5078
5079 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5080 IEM_MC_COMMIT_EFLAGS(EFlags);
5081 IEM_MC_ADVANCE_RIP();
5082 IEM_MC_END();
5083 return VINF_SUCCESS;
5084
5085 case IEMMODE_32BIT:
5086 IEM_MC_BEGIN(4, 2);
5087 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5088 IEM_MC_ARG(uint32_t, u32Src, 1);
5089 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5090 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5092
5093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5094 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5095 IEM_MC_ASSIGN(cShiftArg, cShift);
5096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5097 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5098 IEM_MC_FETCH_EFLAGS(EFlags);
5099 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5100 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5101
5102 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5103 IEM_MC_COMMIT_EFLAGS(EFlags);
5104 IEM_MC_ADVANCE_RIP();
5105 IEM_MC_END();
5106 return VINF_SUCCESS;
5107
5108 case IEMMODE_64BIT:
5109 IEM_MC_BEGIN(4, 2);
5110 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5111 IEM_MC_ARG(uint64_t, u64Src, 1);
5112 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5113 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5115
5116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5117 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5118 IEM_MC_ASSIGN(cShiftArg, cShift);
5119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5120 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5121 IEM_MC_FETCH_EFLAGS(EFlags);
5122 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5123 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5124
5125 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5126 IEM_MC_COMMIT_EFLAGS(EFlags);
5127 IEM_MC_ADVANCE_RIP();
5128 IEM_MC_END();
5129 return VINF_SUCCESS;
5130
5131 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5132 }
5133 }
5134}
5135
5136
5137/**
5138 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5139 */
5140FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5141{
5142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5143 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5144
5145 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5146 {
5147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5148
5149 switch (pVCpu->iem.s.enmEffOpSize)
5150 {
5151 case IEMMODE_16BIT:
5152 IEM_MC_BEGIN(4, 0);
5153 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5154 IEM_MC_ARG(uint16_t, u16Src, 1);
5155 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5156 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5157
5158 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5159 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5160 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5161 IEM_MC_REF_EFLAGS(pEFlags);
5162 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5163
5164 IEM_MC_ADVANCE_RIP();
5165 IEM_MC_END();
5166 return VINF_SUCCESS;
5167
5168 case IEMMODE_32BIT:
5169 IEM_MC_BEGIN(4, 0);
5170 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5171 IEM_MC_ARG(uint32_t, u32Src, 1);
5172 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5173 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5174
5175 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5176 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5177 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5178 IEM_MC_REF_EFLAGS(pEFlags);
5179 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5180
5181 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5182 IEM_MC_ADVANCE_RIP();
5183 IEM_MC_END();
5184 return VINF_SUCCESS;
5185
5186 case IEMMODE_64BIT:
5187 IEM_MC_BEGIN(4, 0);
5188 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5189 IEM_MC_ARG(uint64_t, u64Src, 1);
5190 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5191 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5192
5193 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5194 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5195 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5196 IEM_MC_REF_EFLAGS(pEFlags);
5197 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5198
5199 IEM_MC_ADVANCE_RIP();
5200 IEM_MC_END();
5201 return VINF_SUCCESS;
5202
5203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5204 }
5205 }
5206 else
5207 {
5208 switch (pVCpu->iem.s.enmEffOpSize)
5209 {
5210 case IEMMODE_16BIT:
5211 IEM_MC_BEGIN(4, 2);
5212 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5213 IEM_MC_ARG(uint16_t, u16Src, 1);
5214 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5215 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5217
5218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5220 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5221 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5222 IEM_MC_FETCH_EFLAGS(EFlags);
5223 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5224 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5225
5226 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5227 IEM_MC_COMMIT_EFLAGS(EFlags);
5228 IEM_MC_ADVANCE_RIP();
5229 IEM_MC_END();
5230 return VINF_SUCCESS;
5231
5232 case IEMMODE_32BIT:
5233 IEM_MC_BEGIN(4, 2);
5234 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5235 IEM_MC_ARG(uint32_t, u32Src, 1);
5236 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5237 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5239
5240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5242 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5243 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5244 IEM_MC_FETCH_EFLAGS(EFlags);
5245 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5246 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5247
5248 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5249 IEM_MC_COMMIT_EFLAGS(EFlags);
5250 IEM_MC_ADVANCE_RIP();
5251 IEM_MC_END();
5252 return VINF_SUCCESS;
5253
5254 case IEMMODE_64BIT:
5255 IEM_MC_BEGIN(4, 2);
5256 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5257 IEM_MC_ARG(uint64_t, u64Src, 1);
5258 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5259 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5261
5262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5264 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5265 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5266 IEM_MC_FETCH_EFLAGS(EFlags);
5267 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5268 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5269
5270 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5271 IEM_MC_COMMIT_EFLAGS(EFlags);
5272 IEM_MC_ADVANCE_RIP();
5273 IEM_MC_END();
5274 return VINF_SUCCESS;
5275
5276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5277 }
5278 }
5279}
5280
5281
5282
5283/** Opcode 0x0f 0xa4. */
5284FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5285{
5286 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5287 IEMOP_HLP_MIN_386();
5288 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5289}
5290
5291
5292/** Opcode 0x0f 0xa5. */
5293FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5294{
5295 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5296 IEMOP_HLP_MIN_386();
5297 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5298}
5299
5300
5301/** Opcode 0x0f 0xa8. */
5302FNIEMOP_DEF(iemOp_push_gs)
5303{
5304 IEMOP_MNEMONIC(push_gs, "push gs");
5305 IEMOP_HLP_MIN_386();
5306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5307 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5308}
5309
5310
5311/** Opcode 0x0f 0xa9. */
5312FNIEMOP_DEF(iemOp_pop_gs)
5313{
5314 IEMOP_MNEMONIC(pop_gs, "pop gs");
5315 IEMOP_HLP_MIN_386();
5316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5317 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5318}
5319
5320
5321/** Opcode 0x0f 0xaa. */
5322FNIEMOP_STUB(iemOp_rsm);
5323//IEMOP_HLP_MIN_386();
5324
5325
5326/** Opcode 0x0f 0xab. */
5327FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5328{
5329 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5330 IEMOP_HLP_MIN_386();
5331 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5332}
5333
5334
5335/** Opcode 0x0f 0xac. */
5336FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5337{
5338 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5339 IEMOP_HLP_MIN_386();
5340 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5341}
5342
5343
5344/** Opcode 0x0f 0xad. */
5345FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5346{
5347 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5348 IEMOP_HLP_MIN_386();
5349 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5350}
5351
5352
5353/** Opcode 0x0f 0xae mem/0. */
5354FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5355{
5356 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5357 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5358 return IEMOP_RAISE_INVALID_OPCODE();
5359
5360 IEM_MC_BEGIN(3, 1);
5361 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5362 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5363 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5366 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5367 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5368 IEM_MC_END();
5369 return VINF_SUCCESS;
5370}
5371
5372
5373/** Opcode 0x0f 0xae mem/1. */
5374FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5375{
5376 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5377 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5378 return IEMOP_RAISE_INVALID_OPCODE();
5379
5380 IEM_MC_BEGIN(3, 1);
5381 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5382 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5383 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5386 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5387 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5388 IEM_MC_END();
5389 return VINF_SUCCESS;
5390}
5391
5392
5393/** Opcode 0x0f 0xae mem/2. */
5394FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5395
5396/** Opcode 0x0f 0xae mem/3. */
5397FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5398
5399/** Opcode 0x0f 0xae mem/4. */
5400FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5401
5402/** Opcode 0x0f 0xae mem/5. */
5403FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5404
5405/** Opcode 0x0f 0xae mem/6. */
5406FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5407
5408/** Opcode 0x0f 0xae mem/7. */
5409FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5410
5411
5412/** Opcode 0x0f 0xae 11b/5. */
5413FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5414{
5415 RT_NOREF_PV(bRm);
5416 IEMOP_MNEMONIC(lfence, "lfence");
5417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5418 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5419 return IEMOP_RAISE_INVALID_OPCODE();
5420
5421 IEM_MC_BEGIN(0, 0);
5422 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5423 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5424 else
5425 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5426 IEM_MC_ADVANCE_RIP();
5427 IEM_MC_END();
5428 return VINF_SUCCESS;
5429}
5430
5431
5432/** Opcode 0x0f 0xae 11b/6. */
5433FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5434{
5435 RT_NOREF_PV(bRm);
5436 IEMOP_MNEMONIC(mfence, "mfence");
5437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5438 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5439 return IEMOP_RAISE_INVALID_OPCODE();
5440
5441 IEM_MC_BEGIN(0, 0);
5442 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5443 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5444 else
5445 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5446 IEM_MC_ADVANCE_RIP();
5447 IEM_MC_END();
5448 return VINF_SUCCESS;
5449}
5450
5451
5452/** Opcode 0x0f 0xae 11b/7. */
5453FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5454{
5455 RT_NOREF_PV(bRm);
5456 IEMOP_MNEMONIC(sfence, "sfence");
5457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5458 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5459 return IEMOP_RAISE_INVALID_OPCODE();
5460
5461 IEM_MC_BEGIN(0, 0);
5462 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5463 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5464 else
5465 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5466 IEM_MC_ADVANCE_RIP();
5467 IEM_MC_END();
5468 return VINF_SUCCESS;
5469}
5470
5471
5472/** Opcode 0xf3 0x0f 0xae 11b/0. */
5473FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5474
5475/** Opcode 0xf3 0x0f 0xae 11b/1. */
5476FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5477
5478/** Opcode 0xf3 0x0f 0xae 11b/2. */
5479FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5480
5481/** Opcode 0xf3 0x0f 0xae 11b/3. */
5482FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5483
5484
5485/** Opcode 0x0f 0xae. */
5486FNIEMOP_DEF(iemOp_Grp15)
5487{
5488 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5490 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5491 {
5492 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5493 {
5494 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5495 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5496 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5497 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5498 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5499 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5500 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5501 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5502 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5503 }
5504 }
5505 else
5506 {
5507 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5508 {
5509 case 0:
5510 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5511 {
5512 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5513 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5514 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5515 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5516 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5517 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5518 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5519 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5521 }
5522 break;
5523
5524 case IEM_OP_PRF_REPZ:
5525 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5526 {
5527 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5528 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5529 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5530 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5531 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5532 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5533 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5534 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5536 }
5537 break;
5538
5539 default:
5540 return IEMOP_RAISE_INVALID_OPCODE();
5541 }
5542 }
5543}
5544
5545
5546/** Opcode 0x0f 0xaf. */
5547FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5548{
5549 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5550 IEMOP_HLP_MIN_386();
5551 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5552 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5553}
5554
5555
5556/** Opcode 0x0f 0xb0. */
5557FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5558{
5559 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5560 IEMOP_HLP_MIN_486();
5561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5562
5563 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5564 {
5565 IEMOP_HLP_DONE_DECODING();
5566 IEM_MC_BEGIN(4, 0);
5567 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5568 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5569 IEM_MC_ARG(uint8_t, u8Src, 2);
5570 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5571
5572 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5573 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5574 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5575 IEM_MC_REF_EFLAGS(pEFlags);
5576 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5577 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5578 else
5579 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5580
5581 IEM_MC_ADVANCE_RIP();
5582 IEM_MC_END();
5583 }
5584 else
5585 {
5586 IEM_MC_BEGIN(4, 3);
5587 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5588 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5589 IEM_MC_ARG(uint8_t, u8Src, 2);
5590 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5592 IEM_MC_LOCAL(uint8_t, u8Al);
5593
5594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5595 IEMOP_HLP_DONE_DECODING();
5596 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5597 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5598 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5599 IEM_MC_FETCH_EFLAGS(EFlags);
5600 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5601 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5602 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5603 else
5604 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5605
5606 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5607 IEM_MC_COMMIT_EFLAGS(EFlags);
5608 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5609 IEM_MC_ADVANCE_RIP();
5610 IEM_MC_END();
5611 }
5612 return VINF_SUCCESS;
5613}
5614
5615/** Opcode 0x0f 0xb1. */
5616FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5617{
5618 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
5619 IEMOP_HLP_MIN_486();
5620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5621
5622 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5623 {
5624 IEMOP_HLP_DONE_DECODING();
5625 switch (pVCpu->iem.s.enmEffOpSize)
5626 {
5627 case IEMMODE_16BIT:
5628 IEM_MC_BEGIN(4, 0);
5629 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5630 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5631 IEM_MC_ARG(uint16_t, u16Src, 2);
5632 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5633
5634 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5635 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5636 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5637 IEM_MC_REF_EFLAGS(pEFlags);
5638 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5639 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5640 else
5641 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5642
5643 IEM_MC_ADVANCE_RIP();
5644 IEM_MC_END();
5645 return VINF_SUCCESS;
5646
5647 case IEMMODE_32BIT:
5648 IEM_MC_BEGIN(4, 0);
5649 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5650 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5651 IEM_MC_ARG(uint32_t, u32Src, 2);
5652 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5653
5654 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5655 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5656 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5657 IEM_MC_REF_EFLAGS(pEFlags);
5658 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5659 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5660 else
5661 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5662
5663 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5664 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5665 IEM_MC_ADVANCE_RIP();
5666 IEM_MC_END();
5667 return VINF_SUCCESS;
5668
5669 case IEMMODE_64BIT:
5670 IEM_MC_BEGIN(4, 0);
5671 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5672 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5673#ifdef RT_ARCH_X86
5674 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5675#else
5676 IEM_MC_ARG(uint64_t, u64Src, 2);
5677#endif
5678 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5679
5680 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5681 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5682 IEM_MC_REF_EFLAGS(pEFlags);
5683#ifdef RT_ARCH_X86
5684 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5685 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5686 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5687 else
5688 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5689#else
5690 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5691 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5692 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5693 else
5694 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5695#endif
5696
5697 IEM_MC_ADVANCE_RIP();
5698 IEM_MC_END();
5699 return VINF_SUCCESS;
5700
5701 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5702 }
5703 }
5704 else
5705 {
5706 switch (pVCpu->iem.s.enmEffOpSize)
5707 {
5708 case IEMMODE_16BIT:
5709 IEM_MC_BEGIN(4, 3);
5710 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5711 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5712 IEM_MC_ARG(uint16_t, u16Src, 2);
5713 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5715 IEM_MC_LOCAL(uint16_t, u16Ax);
5716
5717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5718 IEMOP_HLP_DONE_DECODING();
5719 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5720 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5721 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5722 IEM_MC_FETCH_EFLAGS(EFlags);
5723 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5724 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5725 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5726 else
5727 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5728
5729 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5730 IEM_MC_COMMIT_EFLAGS(EFlags);
5731 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5732 IEM_MC_ADVANCE_RIP();
5733 IEM_MC_END();
5734 return VINF_SUCCESS;
5735
5736 case IEMMODE_32BIT:
5737 IEM_MC_BEGIN(4, 3);
5738 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5739 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5740 IEM_MC_ARG(uint32_t, u32Src, 2);
5741 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5743 IEM_MC_LOCAL(uint32_t, u32Eax);
5744
5745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5746 IEMOP_HLP_DONE_DECODING();
5747 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5748 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5749 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5750 IEM_MC_FETCH_EFLAGS(EFlags);
5751 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5752 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5753 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5754 else
5755 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5756
5757 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5758 IEM_MC_COMMIT_EFLAGS(EFlags);
5759 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5760 IEM_MC_ADVANCE_RIP();
5761 IEM_MC_END();
5762 return VINF_SUCCESS;
5763
5764 case IEMMODE_64BIT:
5765 IEM_MC_BEGIN(4, 3);
5766 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5767 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5768#ifdef RT_ARCH_X86
5769 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5770#else
5771 IEM_MC_ARG(uint64_t, u64Src, 2);
5772#endif
5773 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5775 IEM_MC_LOCAL(uint64_t, u64Rax);
5776
5777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5778 IEMOP_HLP_DONE_DECODING();
5779 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5780 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5781 IEM_MC_FETCH_EFLAGS(EFlags);
5782 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5783#ifdef RT_ARCH_X86
5784 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5785 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5786 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5787 else
5788 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5789#else
5790 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5791 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5792 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5793 else
5794 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5795#endif
5796
5797 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5798 IEM_MC_COMMIT_EFLAGS(EFlags);
5799 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5800 IEM_MC_ADVANCE_RIP();
5801 IEM_MC_END();
5802 return VINF_SUCCESS;
5803
5804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5805 }
5806 }
5807}
5808
5809
5810FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5811{
5812 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5813 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
5814
5815 switch (pVCpu->iem.s.enmEffOpSize)
5816 {
5817 case IEMMODE_16BIT:
5818 IEM_MC_BEGIN(5, 1);
5819 IEM_MC_ARG(uint16_t, uSel, 0);
5820 IEM_MC_ARG(uint16_t, offSeg, 1);
5821 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5822 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5823 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5824 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5827 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5828 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
5829 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5830 IEM_MC_END();
5831 return VINF_SUCCESS;
5832
5833 case IEMMODE_32BIT:
5834 IEM_MC_BEGIN(5, 1);
5835 IEM_MC_ARG(uint16_t, uSel, 0);
5836 IEM_MC_ARG(uint32_t, offSeg, 1);
5837 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5838 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5839 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5840 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5843 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5844 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
5845 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5846 IEM_MC_END();
5847 return VINF_SUCCESS;
5848
5849 case IEMMODE_64BIT:
5850 IEM_MC_BEGIN(5, 1);
5851 IEM_MC_ARG(uint16_t, uSel, 0);
5852 IEM_MC_ARG(uint64_t, offSeg, 1);
5853 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5854 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5855 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5856 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5859 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5860 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5861 else
5862 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5863 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
5864 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5865 IEM_MC_END();
5866 return VINF_SUCCESS;
5867
5868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5869 }
5870}
5871
5872
5873/** Opcode 0x0f 0xb2. */
5874FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5875{
5876 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
5877 IEMOP_HLP_MIN_386();
5878 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5879 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5880 return IEMOP_RAISE_INVALID_OPCODE();
5881 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5882}
5883
5884
5885/** Opcode 0x0f 0xb3. */
5886FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5887{
5888 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
5889 IEMOP_HLP_MIN_386();
5890 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5891}
5892
5893
5894/** Opcode 0x0f 0xb4. */
5895FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5896{
5897 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
5898 IEMOP_HLP_MIN_386();
5899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5900 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5901 return IEMOP_RAISE_INVALID_OPCODE();
5902 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5903}
5904
5905
5906/** Opcode 0x0f 0xb5. */
5907FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5908{
5909 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
5910 IEMOP_HLP_MIN_386();
5911 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5912 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5913 return IEMOP_RAISE_INVALID_OPCODE();
5914 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5915}
5916
5917
5918/** Opcode 0x0f 0xb6. */
5919FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5920{
5921 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
5922 IEMOP_HLP_MIN_386();
5923
5924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5925
5926 /*
5927 * If rm is denoting a register, no more instruction bytes.
5928 */
5929 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5930 {
5931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5932 switch (pVCpu->iem.s.enmEffOpSize)
5933 {
5934 case IEMMODE_16BIT:
5935 IEM_MC_BEGIN(0, 1);
5936 IEM_MC_LOCAL(uint16_t, u16Value);
5937 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5938 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
5939 IEM_MC_ADVANCE_RIP();
5940 IEM_MC_END();
5941 return VINF_SUCCESS;
5942
5943 case IEMMODE_32BIT:
5944 IEM_MC_BEGIN(0, 1);
5945 IEM_MC_LOCAL(uint32_t, u32Value);
5946 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5947 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
5948 IEM_MC_ADVANCE_RIP();
5949 IEM_MC_END();
5950 return VINF_SUCCESS;
5951
5952 case IEMMODE_64BIT:
5953 IEM_MC_BEGIN(0, 1);
5954 IEM_MC_LOCAL(uint64_t, u64Value);
5955 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5956 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
5957 IEM_MC_ADVANCE_RIP();
5958 IEM_MC_END();
5959 return VINF_SUCCESS;
5960
5961 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5962 }
5963 }
5964 else
5965 {
5966 /*
5967 * We're loading a register from memory.
5968 */
5969 switch (pVCpu->iem.s.enmEffOpSize)
5970 {
5971 case IEMMODE_16BIT:
5972 IEM_MC_BEGIN(0, 2);
5973 IEM_MC_LOCAL(uint16_t, u16Value);
5974 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5977 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5978 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
5979 IEM_MC_ADVANCE_RIP();
5980 IEM_MC_END();
5981 return VINF_SUCCESS;
5982
5983 case IEMMODE_32BIT:
5984 IEM_MC_BEGIN(0, 2);
5985 IEM_MC_LOCAL(uint32_t, u32Value);
5986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5989 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5990 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
5991 IEM_MC_ADVANCE_RIP();
5992 IEM_MC_END();
5993 return VINF_SUCCESS;
5994
5995 case IEMMODE_64BIT:
5996 IEM_MC_BEGIN(0, 2);
5997 IEM_MC_LOCAL(uint64_t, u64Value);
5998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6001 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6002 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6003 IEM_MC_ADVANCE_RIP();
6004 IEM_MC_END();
6005 return VINF_SUCCESS;
6006
6007 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6008 }
6009 }
6010}
6011
6012
6013/** Opcode 0x0f 0xb7. */
6014FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6015{
6016 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6017 IEMOP_HLP_MIN_386();
6018
6019 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6020
6021 /** @todo Not entirely sure how the operand size prefix is handled here,
6022 * assuming that it will be ignored. Would be nice to have a few
6023 * test for this. */
6024 /*
6025 * If rm is denoting a register, no more instruction bytes.
6026 */
6027 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6028 {
6029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6030 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6031 {
6032 IEM_MC_BEGIN(0, 1);
6033 IEM_MC_LOCAL(uint32_t, u32Value);
6034 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6035 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6036 IEM_MC_ADVANCE_RIP();
6037 IEM_MC_END();
6038 }
6039 else
6040 {
6041 IEM_MC_BEGIN(0, 1);
6042 IEM_MC_LOCAL(uint64_t, u64Value);
6043 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6044 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6045 IEM_MC_ADVANCE_RIP();
6046 IEM_MC_END();
6047 }
6048 }
6049 else
6050 {
6051 /*
6052 * We're loading a register from memory.
6053 */
6054 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6055 {
6056 IEM_MC_BEGIN(0, 2);
6057 IEM_MC_LOCAL(uint32_t, u32Value);
6058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6061 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6062 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6063 IEM_MC_ADVANCE_RIP();
6064 IEM_MC_END();
6065 }
6066 else
6067 {
6068 IEM_MC_BEGIN(0, 2);
6069 IEM_MC_LOCAL(uint64_t, u64Value);
6070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6071 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6073 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6074 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6075 IEM_MC_ADVANCE_RIP();
6076 IEM_MC_END();
6077 }
6078 }
6079 return VINF_SUCCESS;
6080}
6081
6082
6083/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6084FNIEMOP_UD_STUB(iemOp_jmpe);
6085/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6086FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6087
6088
6089/** Opcode 0x0f 0xb9. */
6090FNIEMOP_DEF(iemOp_Grp10)
6091{
6092 Log(("iemOp_Grp10 -> #UD\n"));
6093 return IEMOP_RAISE_INVALID_OPCODE();
6094}
6095
6096
6097/** Opcode 0x0f 0xba. */
6098FNIEMOP_DEF(iemOp_Grp8)
6099{
6100 IEMOP_HLP_MIN_386();
6101 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6102 PCIEMOPBINSIZES pImpl;
6103 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6104 {
6105 case 0: case 1: case 2: case 3:
6106 return IEMOP_RAISE_INVALID_OPCODE();
6107 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6108 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6109 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6110 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6112 }
6113 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6114
6115 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6116 {
6117 /* register destination. */
6118 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6120
6121 switch (pVCpu->iem.s.enmEffOpSize)
6122 {
6123 case IEMMODE_16BIT:
6124 IEM_MC_BEGIN(3, 0);
6125 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6126 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6127 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6128
6129 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6130 IEM_MC_REF_EFLAGS(pEFlags);
6131 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6132
6133 IEM_MC_ADVANCE_RIP();
6134 IEM_MC_END();
6135 return VINF_SUCCESS;
6136
6137 case IEMMODE_32BIT:
6138 IEM_MC_BEGIN(3, 0);
6139 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6140 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6141 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6142
6143 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6144 IEM_MC_REF_EFLAGS(pEFlags);
6145 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6146
6147 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6148 IEM_MC_ADVANCE_RIP();
6149 IEM_MC_END();
6150 return VINF_SUCCESS;
6151
6152 case IEMMODE_64BIT:
6153 IEM_MC_BEGIN(3, 0);
6154 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6155 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6156 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6157
6158 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6159 IEM_MC_REF_EFLAGS(pEFlags);
6160 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6161
6162 IEM_MC_ADVANCE_RIP();
6163 IEM_MC_END();
6164 return VINF_SUCCESS;
6165
6166 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6167 }
6168 }
6169 else
6170 {
6171 /* memory destination. */
6172
6173 uint32_t fAccess;
6174 if (pImpl->pfnLockedU16)
6175 fAccess = IEM_ACCESS_DATA_RW;
6176 else /* BT */
6177 fAccess = IEM_ACCESS_DATA_R;
6178
6179 /** @todo test negative bit offsets! */
6180 switch (pVCpu->iem.s.enmEffOpSize)
6181 {
6182 case IEMMODE_16BIT:
6183 IEM_MC_BEGIN(3, 1);
6184 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6185 IEM_MC_ARG(uint16_t, u16Src, 1);
6186 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6188
6189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6190 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6191 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6192 if (pImpl->pfnLockedU16)
6193 IEMOP_HLP_DONE_DECODING();
6194 else
6195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6196 IEM_MC_FETCH_EFLAGS(EFlags);
6197 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6198 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6199 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6200 else
6201 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6203
6204 IEM_MC_COMMIT_EFLAGS(EFlags);
6205 IEM_MC_ADVANCE_RIP();
6206 IEM_MC_END();
6207 return VINF_SUCCESS;
6208
6209 case IEMMODE_32BIT:
6210 IEM_MC_BEGIN(3, 1);
6211 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6212 IEM_MC_ARG(uint32_t, u32Src, 1);
6213 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6214 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6215
6216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6217 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6218 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6219 if (pImpl->pfnLockedU16)
6220 IEMOP_HLP_DONE_DECODING();
6221 else
6222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6223 IEM_MC_FETCH_EFLAGS(EFlags);
6224 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6225 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6226 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6227 else
6228 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6229 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6230
6231 IEM_MC_COMMIT_EFLAGS(EFlags);
6232 IEM_MC_ADVANCE_RIP();
6233 IEM_MC_END();
6234 return VINF_SUCCESS;
6235
6236 case IEMMODE_64BIT:
6237 IEM_MC_BEGIN(3, 1);
6238 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6239 IEM_MC_ARG(uint64_t, u64Src, 1);
6240 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6242
6243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6244 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6245 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6246 if (pImpl->pfnLockedU16)
6247 IEMOP_HLP_DONE_DECODING();
6248 else
6249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6250 IEM_MC_FETCH_EFLAGS(EFlags);
6251 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6252 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6253 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6254 else
6255 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6256 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6257
6258 IEM_MC_COMMIT_EFLAGS(EFlags);
6259 IEM_MC_ADVANCE_RIP();
6260 IEM_MC_END();
6261 return VINF_SUCCESS;
6262
6263 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6264 }
6265 }
6266
6267}
6268
6269
6270/** Opcode 0x0f 0xbb. */
6271FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6272{
6273 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6274 IEMOP_HLP_MIN_386();
6275 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6276}
6277
6278
6279/** Opcode 0x0f 0xbc. */
6280FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6281{
6282 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6283 IEMOP_HLP_MIN_386();
6284 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6285 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6286}
6287
6288
6289/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6290FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6291
6292
6293/** Opcode 0x0f 0xbd. */
6294FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6295{
6296 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6297 IEMOP_HLP_MIN_386();
6298 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6299 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6300}
6301
6302
6303/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6304FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6305
6306
6307/** Opcode 0x0f 0xbe. */
6308FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6309{
6310 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6311 IEMOP_HLP_MIN_386();
6312
6313 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6314
6315 /*
6316 * If rm is denoting a register, no more instruction bytes.
6317 */
6318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6319 {
6320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6321 switch (pVCpu->iem.s.enmEffOpSize)
6322 {
6323 case IEMMODE_16BIT:
6324 IEM_MC_BEGIN(0, 1);
6325 IEM_MC_LOCAL(uint16_t, u16Value);
6326 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6327 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6328 IEM_MC_ADVANCE_RIP();
6329 IEM_MC_END();
6330 return VINF_SUCCESS;
6331
6332 case IEMMODE_32BIT:
6333 IEM_MC_BEGIN(0, 1);
6334 IEM_MC_LOCAL(uint32_t, u32Value);
6335 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6336 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6337 IEM_MC_ADVANCE_RIP();
6338 IEM_MC_END();
6339 return VINF_SUCCESS;
6340
6341 case IEMMODE_64BIT:
6342 IEM_MC_BEGIN(0, 1);
6343 IEM_MC_LOCAL(uint64_t, u64Value);
6344 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6345 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6346 IEM_MC_ADVANCE_RIP();
6347 IEM_MC_END();
6348 return VINF_SUCCESS;
6349
6350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6351 }
6352 }
6353 else
6354 {
6355 /*
6356 * We're loading a register from memory.
6357 */
6358 switch (pVCpu->iem.s.enmEffOpSize)
6359 {
6360 case IEMMODE_16BIT:
6361 IEM_MC_BEGIN(0, 2);
6362 IEM_MC_LOCAL(uint16_t, u16Value);
6363 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6366 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6367 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6368 IEM_MC_ADVANCE_RIP();
6369 IEM_MC_END();
6370 return VINF_SUCCESS;
6371
6372 case IEMMODE_32BIT:
6373 IEM_MC_BEGIN(0, 2);
6374 IEM_MC_LOCAL(uint32_t, u32Value);
6375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6378 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6379 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6380 IEM_MC_ADVANCE_RIP();
6381 IEM_MC_END();
6382 return VINF_SUCCESS;
6383
6384 case IEMMODE_64BIT:
6385 IEM_MC_BEGIN(0, 2);
6386 IEM_MC_LOCAL(uint64_t, u64Value);
6387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6390 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6391 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6392 IEM_MC_ADVANCE_RIP();
6393 IEM_MC_END();
6394 return VINF_SUCCESS;
6395
6396 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6397 }
6398 }
6399}
6400
6401
6402/** Opcode 0x0f 0xbf. */
6403FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6404{
6405 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6406 IEMOP_HLP_MIN_386();
6407
6408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6409
6410 /** @todo Not entirely sure how the operand size prefix is handled here,
6411 * assuming that it will be ignored. Would be nice to have a few
6412 * test for this. */
6413 /*
6414 * If rm is denoting a register, no more instruction bytes.
6415 */
6416 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6417 {
6418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6419 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6420 {
6421 IEM_MC_BEGIN(0, 1);
6422 IEM_MC_LOCAL(uint32_t, u32Value);
6423 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6424 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6425 IEM_MC_ADVANCE_RIP();
6426 IEM_MC_END();
6427 }
6428 else
6429 {
6430 IEM_MC_BEGIN(0, 1);
6431 IEM_MC_LOCAL(uint64_t, u64Value);
6432 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6433 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6434 IEM_MC_ADVANCE_RIP();
6435 IEM_MC_END();
6436 }
6437 }
6438 else
6439 {
6440 /*
6441 * We're loading a register from memory.
6442 */
6443 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6444 {
6445 IEM_MC_BEGIN(0, 2);
6446 IEM_MC_LOCAL(uint32_t, u32Value);
6447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6450 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6451 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6452 IEM_MC_ADVANCE_RIP();
6453 IEM_MC_END();
6454 }
6455 else
6456 {
6457 IEM_MC_BEGIN(0, 2);
6458 IEM_MC_LOCAL(uint64_t, u64Value);
6459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6462 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6463 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6464 IEM_MC_ADVANCE_RIP();
6465 IEM_MC_END();
6466 }
6467 }
6468 return VINF_SUCCESS;
6469}
6470
6471
6472/** Opcode 0x0f 0xc0. */
6473FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6474{
6475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6476 IEMOP_HLP_MIN_486();
6477 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6478
6479 /*
6480 * If rm is denoting a register, no more instruction bytes.
6481 */
6482 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6483 {
6484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6485
6486 IEM_MC_BEGIN(3, 0);
6487 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6488 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6489 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6490
6491 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6492 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6493 IEM_MC_REF_EFLAGS(pEFlags);
6494 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6495
6496 IEM_MC_ADVANCE_RIP();
6497 IEM_MC_END();
6498 }
6499 else
6500 {
6501 /*
6502 * We're accessing memory.
6503 */
6504 IEM_MC_BEGIN(3, 3);
6505 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6506 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6507 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6508 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6510
6511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6512 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6513 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6514 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6515 IEM_MC_FETCH_EFLAGS(EFlags);
6516 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6517 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6518 else
6519 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6520
6521 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6522 IEM_MC_COMMIT_EFLAGS(EFlags);
6523 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6524 IEM_MC_ADVANCE_RIP();
6525 IEM_MC_END();
6526 return VINF_SUCCESS;
6527 }
6528 return VINF_SUCCESS;
6529}
6530
6531
6532/** Opcode 0x0f 0xc1. */
6533FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6534{
6535 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6536 IEMOP_HLP_MIN_486();
6537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6538
6539 /*
6540 * If rm is denoting a register, no more instruction bytes.
6541 */
6542 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6543 {
6544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6545
6546 switch (pVCpu->iem.s.enmEffOpSize)
6547 {
6548 case IEMMODE_16BIT:
6549 IEM_MC_BEGIN(3, 0);
6550 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6551 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6552 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6553
6554 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6555 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6556 IEM_MC_REF_EFLAGS(pEFlags);
6557 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6558
6559 IEM_MC_ADVANCE_RIP();
6560 IEM_MC_END();
6561 return VINF_SUCCESS;
6562
6563 case IEMMODE_32BIT:
6564 IEM_MC_BEGIN(3, 0);
6565 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6566 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6567 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6568
6569 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6570 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6571 IEM_MC_REF_EFLAGS(pEFlags);
6572 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6573
6574 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6575 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6576 IEM_MC_ADVANCE_RIP();
6577 IEM_MC_END();
6578 return VINF_SUCCESS;
6579
6580 case IEMMODE_64BIT:
6581 IEM_MC_BEGIN(3, 0);
6582 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6583 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6584 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6585
6586 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6587 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6588 IEM_MC_REF_EFLAGS(pEFlags);
6589 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6590
6591 IEM_MC_ADVANCE_RIP();
6592 IEM_MC_END();
6593 return VINF_SUCCESS;
6594
6595 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6596 }
6597 }
6598 else
6599 {
6600 /*
6601 * We're accessing memory.
6602 */
6603 switch (pVCpu->iem.s.enmEffOpSize)
6604 {
6605 case IEMMODE_16BIT:
6606 IEM_MC_BEGIN(3, 3);
6607 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6608 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6609 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6610 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6612
6613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6614 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6615 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6616 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6617 IEM_MC_FETCH_EFLAGS(EFlags);
6618 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6619 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6620 else
6621 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6622
6623 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6624 IEM_MC_COMMIT_EFLAGS(EFlags);
6625 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
6626 IEM_MC_ADVANCE_RIP();
6627 IEM_MC_END();
6628 return VINF_SUCCESS;
6629
6630 case IEMMODE_32BIT:
6631 IEM_MC_BEGIN(3, 3);
6632 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6633 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6634 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6635 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6636 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6637
6638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6639 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6640 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6641 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6642 IEM_MC_FETCH_EFLAGS(EFlags);
6643 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6644 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6645 else
6646 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6647
6648 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6649 IEM_MC_COMMIT_EFLAGS(EFlags);
6650 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
6651 IEM_MC_ADVANCE_RIP();
6652 IEM_MC_END();
6653 return VINF_SUCCESS;
6654
6655 case IEMMODE_64BIT:
6656 IEM_MC_BEGIN(3, 3);
6657 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6658 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6659 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6660 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6662
6663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6664 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6665 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6666 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6667 IEM_MC_FETCH_EFLAGS(EFlags);
6668 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6669 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6670 else
6671 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6672
6673 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6674 IEM_MC_COMMIT_EFLAGS(EFlags);
6675 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
6676 IEM_MC_ADVANCE_RIP();
6677 IEM_MC_END();
6678 return VINF_SUCCESS;
6679
6680 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6681 }
6682 }
6683}
6684
6685
6686/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
6687FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
6688/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
6689FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
6690/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
6691FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
6692/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
6693FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
6694
6695
6696/** Opcode 0x0f 0xc3. */
6697FNIEMOP_DEF(iemOp_movnti_My_Gy)
6698{
6699 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
6700
6701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6702
6703 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6704 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6705 {
6706 switch (pVCpu->iem.s.enmEffOpSize)
6707 {
6708 case IEMMODE_32BIT:
6709 IEM_MC_BEGIN(0, 2);
6710 IEM_MC_LOCAL(uint32_t, u32Value);
6711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6712
6713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6715 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6716 return IEMOP_RAISE_INVALID_OPCODE();
6717
6718 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6719 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6720 IEM_MC_ADVANCE_RIP();
6721 IEM_MC_END();
6722 break;
6723
6724 case IEMMODE_64BIT:
6725 IEM_MC_BEGIN(0, 2);
6726 IEM_MC_LOCAL(uint64_t, u64Value);
6727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6728
6729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6731 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6732 return IEMOP_RAISE_INVALID_OPCODE();
6733
6734 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6735 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
6736 IEM_MC_ADVANCE_RIP();
6737 IEM_MC_END();
6738 break;
6739
6740 case IEMMODE_16BIT:
6741 /** @todo check this form. */
6742 return IEMOP_RAISE_INVALID_OPCODE();
6743 }
6744 }
6745 else
6746 return IEMOP_RAISE_INVALID_OPCODE();
6747 return VINF_SUCCESS;
6748}
6749/* Opcode 0x66 0x0f 0xc3 - invalid */
6750/* Opcode 0xf3 0x0f 0xc3 - invalid */
6751/* Opcode 0xf2 0x0f 0xc3 - invalid */
6752
6753/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
6754FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
6755/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
6756FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
6757/* Opcode 0xf3 0x0f 0xc4 - invalid */
6758/* Opcode 0xf2 0x0f 0xc4 - invalid */
6759
6760/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
6761FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
6762/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
6763FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
6764/* Opcode 0xf3 0x0f 0xc5 - invalid */
6765/* Opcode 0xf2 0x0f 0xc5 - invalid */
6766
6767/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
6768FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
6769/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
6770FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
6771/* Opcode 0xf3 0x0f 0xc6 - invalid */
6772/* Opcode 0xf2 0x0f 0xc6 - invalid */
6773
6774
6775/** Opcode 0x0f 0xc7 !11/1. */
6776FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6777{
6778 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
6779
6780 IEM_MC_BEGIN(4, 3);
6781 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6782 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6783 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6784 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6785 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6786 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6788
6789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6790 IEMOP_HLP_DONE_DECODING();
6791 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6792
6793 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6794 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6795 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6796
6797 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6798 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6799 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6800
6801 IEM_MC_FETCH_EFLAGS(EFlags);
6802 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6803 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6804 else
6805 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6806
6807 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6808 IEM_MC_COMMIT_EFLAGS(EFlags);
6809 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6810 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6811 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6812 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6813 IEM_MC_ENDIF();
6814 IEM_MC_ADVANCE_RIP();
6815
6816 IEM_MC_END();
6817 return VINF_SUCCESS;
6818}
6819
6820
6821/** Opcode REX.W 0x0f 0xc7 !11/1. */
6822FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
6823{
6824 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
6825 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6826 {
6827#if 0
6828 RT_NOREF(bRm);
6829 IEMOP_BITCH_ABOUT_STUB();
6830 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6831#else
6832 IEM_MC_BEGIN(4, 3);
6833 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
6834 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
6835 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
6836 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6837 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
6838 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
6839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6840
6841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6842 IEMOP_HLP_DONE_DECODING();
6843 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
6844 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6845
6846 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
6847 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
6848 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
6849
6850 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
6851 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
6852 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
6853
6854 IEM_MC_FETCH_EFLAGS(EFlags);
6855# ifdef RT_ARCH_AMD64
6856 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6857 {
6858 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6859 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6860 else
6861 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6862 }
6863 else
6864# endif
6865 {
6866 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
6867 accesses and not all all atomic, which works fine on in UNI CPU guest
6868 configuration (ignoring DMA). If guest SMP is active we have no choice
6869 but to use a rendezvous callback here. Sigh. */
6870 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
6871 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6872 else
6873 {
6874 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6875 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
6876 }
6877 }
6878
6879 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
6880 IEM_MC_COMMIT_EFLAGS(EFlags);
6881 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6882 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
6883 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
6884 IEM_MC_ENDIF();
6885 IEM_MC_ADVANCE_RIP();
6886
6887 IEM_MC_END();
6888 return VINF_SUCCESS;
6889#endif
6890 }
6891 Log(("cmpxchg16b -> #UD\n"));
6892 return IEMOP_RAISE_INVALID_OPCODE();
6893}
6894
6895
6896/** Opcode 0x0f 0xc7 11/6. */
6897FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6898
6899/** Opcode 0x0f 0xc7 !11/6. */
6900FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6901
6902/** Opcode 0x66 0x0f 0xc7 !11/6. */
6903FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6904
6905/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6906FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6907
6908/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6909FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6910
6911
6912/** Opcode 0x0f 0xc7. */
6913FNIEMOP_DEF(iemOp_Grp9)
6914{
6915 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6917 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6918 {
6919 case 0: case 2: case 3: case 4: case 5:
6920 return IEMOP_RAISE_INVALID_OPCODE();
6921 case 1:
6922 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6923 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6924 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6925 return IEMOP_RAISE_INVALID_OPCODE();
6926 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6927 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6928 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6929 case 6:
6930 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6931 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6932 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6933 {
6934 case 0:
6935 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6936 case IEM_OP_PRF_SIZE_OP:
6937 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6938 case IEM_OP_PRF_REPZ:
6939 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6940 default:
6941 return IEMOP_RAISE_INVALID_OPCODE();
6942 }
6943 case 7:
6944 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6945 {
6946 case 0:
6947 case IEM_OP_PRF_REPZ:
6948 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6949 default:
6950 return IEMOP_RAISE_INVALID_OPCODE();
6951 }
6952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6953 }
6954}
6955
6956
6957/**
6958 * Common 'bswap register' helper.
6959 */
6960FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6961{
6962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6963 switch (pVCpu->iem.s.enmEffOpSize)
6964 {
6965 case IEMMODE_16BIT:
6966 IEM_MC_BEGIN(1, 0);
6967 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6968 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6969 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6970 IEM_MC_ADVANCE_RIP();
6971 IEM_MC_END();
6972 return VINF_SUCCESS;
6973
6974 case IEMMODE_32BIT:
6975 IEM_MC_BEGIN(1, 0);
6976 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6977 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6978 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6979 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6980 IEM_MC_ADVANCE_RIP();
6981 IEM_MC_END();
6982 return VINF_SUCCESS;
6983
6984 case IEMMODE_64BIT:
6985 IEM_MC_BEGIN(1, 0);
6986 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6987 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6988 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6989 IEM_MC_ADVANCE_RIP();
6990 IEM_MC_END();
6991 return VINF_SUCCESS;
6992
6993 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6994 }
6995}
6996
6997
6998/** Opcode 0x0f 0xc8. */
6999FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7000{
7001 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7002 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7003 prefix. REX.B is the correct prefix it appears. For a parallel
7004 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7005 IEMOP_HLP_MIN_486();
7006 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7007}
7008
7009
7010/** Opcode 0x0f 0xc9. */
7011FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7012{
7013 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7014 IEMOP_HLP_MIN_486();
7015 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7016}
7017
7018
7019/** Opcode 0x0f 0xca. */
7020FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7021{
7022 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7023 IEMOP_HLP_MIN_486();
7024 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7025}
7026
7027
7028/** Opcode 0x0f 0xcb. */
7029FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7030{
7031 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7032 IEMOP_HLP_MIN_486();
7033 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7034}
7035
7036
7037/** Opcode 0x0f 0xcc. */
7038FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7039{
7040 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7041 IEMOP_HLP_MIN_486();
7042 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7043}
7044
7045
7046/** Opcode 0x0f 0xcd. */
7047FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7048{
7049 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7050 IEMOP_HLP_MIN_486();
7051 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7052}
7053
7054
7055/** Opcode 0x0f 0xce. */
7056FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7057{
7058 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7059 IEMOP_HLP_MIN_486();
7060 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7061}
7062
7063
7064/** Opcode 0x0f 0xcf. */
7065FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7066{
7067 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7068 IEMOP_HLP_MIN_486();
7069 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7070}
7071
7072
7073/* Opcode 0x0f 0xd0 - invalid */
7074/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7075FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7076/* Opcode 0xf3 0x0f 0xd0 - invalid */
7077/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7078FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7079
7080/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7081FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7082/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7083FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7084/* Opcode 0xf3 0x0f 0xd1 - invalid */
7085/* Opcode 0xf2 0x0f 0xd1 - invalid */
7086
7087/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7088FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7089/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7090FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7091/* Opcode 0xf3 0x0f 0xd2 - invalid */
7092/* Opcode 0xf2 0x0f 0xd2 - invalid */
7093
7094/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7095FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7096/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7097FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7098/* Opcode 0xf3 0x0f 0xd3 - invalid */
7099/* Opcode 0xf2 0x0f 0xd3 - invalid */
7100
7101/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7102FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7103/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7104FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7105/* Opcode 0xf3 0x0f 0xd4 - invalid */
7106/* Opcode 0xf2 0x0f 0xd4 - invalid */
7107
7108/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7109FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7110/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7111FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7112/* Opcode 0xf3 0x0f 0xd5 - invalid */
7113/* Opcode 0xf2 0x0f 0xd5 - invalid */
7114
7115/* Opcode 0x0f 0xd6 - invalid */
7116/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7117FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7118/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7119FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7120/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7121FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7122#if 0
7123FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7124{
7125 /* Docs says register only. */
7126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7127
7128 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7129 {
7130 case IEM_OP_PRF_SIZE_OP: /* SSE */
7131 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7132 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7133 IEM_MC_BEGIN(2, 0);
7134 IEM_MC_ARG(uint64_t *, pDst, 0);
7135 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7136 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7137 IEM_MC_PREPARE_SSE_USAGE();
7138 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7139 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7140 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7141 IEM_MC_ADVANCE_RIP();
7142 IEM_MC_END();
7143 return VINF_SUCCESS;
7144
7145 case 0: /* MMX */
7146 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7147 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7148 IEM_MC_BEGIN(2, 0);
7149 IEM_MC_ARG(uint64_t *, pDst, 0);
7150 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7151 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7152 IEM_MC_PREPARE_FPU_USAGE();
7153 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7154 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7155 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7156 IEM_MC_ADVANCE_RIP();
7157 IEM_MC_END();
7158 return VINF_SUCCESS;
7159
7160 default:
7161 return IEMOP_RAISE_INVALID_OPCODE();
7162 }
7163}
7164#endif
7165
7166
7167/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7168FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7169{
7170 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7171 /** @todo testcase: Check that the instruction implicitly clears the high
7172 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7173 * and opcode modifications are made to work with the whole width (not
7174 * just 128). */
7175 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
7176 /* Docs says register only. */
7177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7178 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7179 {
7180 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7181 IEM_MC_BEGIN(2, 0);
7182 IEM_MC_ARG(uint64_t *, pDst, 0);
7183 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7184 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7185 IEM_MC_PREPARE_FPU_USAGE();
7186 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7187 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7188 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7189 IEM_MC_ADVANCE_RIP();
7190 IEM_MC_END();
7191 return VINF_SUCCESS;
7192 }
7193 return IEMOP_RAISE_INVALID_OPCODE();
7194}
7195
7196/** Opcode 0x66 0x0f 0xd7 - */
7197FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
7198{
7199 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7200 /** @todo testcase: Check that the instruction implicitly clears the high
7201 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7202 * and opcode modifications are made to work with the whole width (not
7203 * just 128). */
7204 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
7205 /* Docs says register only. */
7206 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7207 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7208 {
7209 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7210 IEM_MC_BEGIN(2, 0);
7211 IEM_MC_ARG(uint64_t *, pDst, 0);
7212 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7213 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7214 IEM_MC_PREPARE_SSE_USAGE();
7215 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7216 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7217 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7218 IEM_MC_ADVANCE_RIP();
7219 IEM_MC_END();
7220 return VINF_SUCCESS;
7221 }
7222 return IEMOP_RAISE_INVALID_OPCODE();
7223}
7224
7225/* Opcode 0xf3 0x0f 0xd7 - invalid */
7226/* Opcode 0xf2 0x0f 0xd7 - invalid */
7227
7228
7229/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7230FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7231/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7232FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7233/* Opcode 0xf3 0x0f 0xd8 - invalid */
7234/* Opcode 0xf2 0x0f 0xd8 - invalid */
7235
7236/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7237FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7238/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7239FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7240/* Opcode 0xf3 0x0f 0xd9 - invalid */
7241/* Opcode 0xf2 0x0f 0xd9 - invalid */
7242
7243/** Opcode 0x0f 0xda - pminub Pq, Qq */
7244FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7245/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7246FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7247/* Opcode 0xf3 0x0f 0xda - invalid */
7248/* Opcode 0xf2 0x0f 0xda - invalid */
7249
7250/** Opcode 0x0f 0xdb - pand Pq, Qq */
7251FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7252/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7253FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7254/* Opcode 0xf3 0x0f 0xdb - invalid */
7255/* Opcode 0xf2 0x0f 0xdb - invalid */
7256
7257/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7258FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7259/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7260FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7261/* Opcode 0xf3 0x0f 0xdc - invalid */
7262/* Opcode 0xf2 0x0f 0xdc - invalid */
7263
7264/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7265FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7266/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7267FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7268/* Opcode 0xf3 0x0f 0xdd - invalid */
7269/* Opcode 0xf2 0x0f 0xdd - invalid */
7270
7271/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7272FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7273/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7274FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7275/* Opcode 0xf3 0x0f 0xde - invalid */
7276/* Opcode 0xf2 0x0f 0xde - invalid */
7277
7278/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7279FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7280/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7281FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7282/* Opcode 0xf3 0x0f 0xdf - invalid */
7283/* Opcode 0xf2 0x0f 0xdf - invalid */
7284
7285/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7286FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7287/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7288FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7289/* Opcode 0xf3 0x0f 0xe0 - invalid */
7290/* Opcode 0xf2 0x0f 0xe0 - invalid */
7291
7292/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7293FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7294/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7295FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7296/* Opcode 0xf3 0x0f 0xe1 - invalid */
7297/* Opcode 0xf2 0x0f 0xe1 - invalid */
7298
7299/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7300FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7301/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7302FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7303/* Opcode 0xf3 0x0f 0xe2 - invalid */
7304/* Opcode 0xf2 0x0f 0xe2 - invalid */
7305
7306/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7307FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7308/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7309FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7310/* Opcode 0xf3 0x0f 0xe3 - invalid */
7311/* Opcode 0xf2 0x0f 0xe3 - invalid */
7312
7313/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7314FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7315/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7316FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7317/* Opcode 0xf3 0x0f 0xe4 - invalid */
7318/* Opcode 0xf2 0x0f 0xe4 - invalid */
7319
7320/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7321FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7322/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7323FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7324/* Opcode 0xf3 0x0f 0xe5 - invalid */
7325/* Opcode 0xf2 0x0f 0xe5 - invalid */
7326
7327/* Opcode 0x0f 0xe6 - invalid */
7328/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7329FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7330/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7331FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7332/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7333FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7334
7335
7336/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
7337FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
7338{
7339 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7341 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7342 {
7343 /* Register, memory. */
7344 IEM_MC_BEGIN(0, 2);
7345 IEM_MC_LOCAL(uint64_t, uSrc);
7346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7347
7348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7350 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7351 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7352
7353 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7354 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7355
7356 IEM_MC_ADVANCE_RIP();
7357 IEM_MC_END();
7358 return VINF_SUCCESS;
7359 }
7360 /* The register, register encoding is invalid. */
7361 return IEMOP_RAISE_INVALID_OPCODE();
7362}
7363
7364/** Opcode 0x66 0x0f 0xe7 - vmovntdq Mx, Vx */
7365FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
7366{
7367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7368 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7369 {
7370 /* Register, memory. */
7371 IEMOP_MNEMONIC(vmovntdq_Mx_Vx, "vmovntdq Mx,Vx");
7372 IEM_MC_BEGIN(0, 2);
7373 IEM_MC_LOCAL(uint128_t, uSrc);
7374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7375
7376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7378 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7379 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7380
7381 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7382 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7383
7384 IEM_MC_ADVANCE_RIP();
7385 IEM_MC_END();
7386 return VINF_SUCCESS;
7387 }
7388
7389 /* The register, register encoding is invalid. */
7390 return IEMOP_RAISE_INVALID_OPCODE();
7391}
7392
7393/* Opcode 0xf3 0x0f 0xe7 - invalid */
7394/* Opcode 0xf2 0x0f 0xe7 - invalid */
7395
7396
7397/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7398FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7399/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7400FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7401/* Opcode 0xf3 0x0f 0xe8 - invalid */
7402/* Opcode 0xf2 0x0f 0xe8 - invalid */
7403
7404/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7405FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7406/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7407FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7408/* Opcode 0xf3 0x0f 0xe9 - invalid */
7409/* Opcode 0xf2 0x0f 0xe9 - invalid */
7410
7411/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7412FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7413/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7414FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7415/* Opcode 0xf3 0x0f 0xea - invalid */
7416/* Opcode 0xf2 0x0f 0xea - invalid */
7417
7418/** Opcode 0x0f 0xeb - por Pq, Qq */
7419FNIEMOP_STUB(iemOp_por_Pq_Qq);
7420/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7421FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7422/* Opcode 0xf3 0x0f 0xeb - invalid */
7423/* Opcode 0xf2 0x0f 0xeb - invalid */
7424
7425/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7426FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7427/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7428FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7429/* Opcode 0xf3 0x0f 0xec - invalid */
7430/* Opcode 0xf2 0x0f 0xec - invalid */
7431
7432/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7433FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7434/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7435FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7436/* Opcode 0xf3 0x0f 0xed - invalid */
7437/* Opcode 0xf2 0x0f 0xed - invalid */
7438
7439/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7440FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7441/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7442FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7443/* Opcode 0xf3 0x0f 0xee - invalid */
7444/* Opcode 0xf2 0x0f 0xee - invalid */
7445
7446
7447/** Opcode 0x0f 0xef - pxor Pq, Qq */
7448FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
7449{
7450 IEMOP_MNEMONIC(pxor, "pxor");
7451 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
7452}
7453
7454/** Opcode 0x66 0x0f 0xef - vpxor Vx, Hx, Wx */
7455FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
7456{
7457 IEMOP_MNEMONIC(vpxor, "vpxor");
7458 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7459}
7460
7461/* Opcode 0xf3 0x0f 0xef - invalid */
7462/* Opcode 0xf2 0x0f 0xef - invalid */
7463
7464/* Opcode 0x0f 0xf0 - invalid */
7465/* Opcode 0x66 0x0f 0xf0 - invalid */
7466/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7467FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7468
7469/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7470FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7471/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7472FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7473/* Opcode 0xf2 0x0f 0xf1 - invalid */
7474
7475/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7476FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7477/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7478FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7479/* Opcode 0xf2 0x0f 0xf2 - invalid */
7480
7481/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7482FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7483/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7484FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7485/* Opcode 0xf2 0x0f 0xf3 - invalid */
7486
7487/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7488FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7489/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7490FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7491/* Opcode 0xf2 0x0f 0xf4 - invalid */
7492
7493/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7494FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7495/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7496FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7497/* Opcode 0xf2 0x0f 0xf5 - invalid */
7498
7499/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7500FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7501/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7502FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7503/* Opcode 0xf2 0x0f 0xf6 - invalid */
7504
7505/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7506FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7507/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7508FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7509/* Opcode 0xf2 0x0f 0xf7 - invalid */
7510
7511/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7512FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7513/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7514FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7515/* Opcode 0xf2 0x0f 0xf8 - invalid */
7516
7517/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7518FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7519/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7520FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7521/* Opcode 0xf2 0x0f 0xf9 - invalid */
7522
7523/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7524FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7525/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7526FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7527/* Opcode 0xf2 0x0f 0xfa - invalid */
7528
7529/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7530FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7531/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7532FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7533/* Opcode 0xf2 0x0f 0xfb - invalid */
7534
7535/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7536FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7537/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7538FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7539/* Opcode 0xf2 0x0f 0xfc - invalid */
7540
7541/** Opcode 0x0f 0xfd - paddw Pq, Qq */
7542FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
7543/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
7544FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
7545/* Opcode 0xf2 0x0f 0xfd - invalid */
7546
7547/** Opcode 0x0f 0xfe - paddd Pq, Qq */
7548FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
7549/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
7550FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
7551/* Opcode 0xf2 0x0f 0xfe - invalid */
7552
7553
7554/** Opcode **** 0x0f 0xff - UD0 */
7555FNIEMOP_DEF(iemOp_ud0)
7556{
7557 IEMOP_MNEMONIC(ud0, "ud0");
7558 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7559 {
7560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7561#ifndef TST_IEM_CHECK_MC
7562 RTGCPTR GCPtrEff;
7563 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
7564 if (rcStrict != VINF_SUCCESS)
7565 return rcStrict;
7566#endif
7567 IEMOP_HLP_DONE_DECODING();
7568 }
7569 return IEMOP_RAISE_INVALID_OPCODE();
7570}
7571
7572
7573
7574/** Repeats a_fn four times. For decoding tables. */
7575#define IEMOP_X4(a_fn) a_fn, a_fn, a_fn, a_fn
7576
7577IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
7578{
7579 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7580 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
7581 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
7582 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
7583 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
7584 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
7585 /* 0x05 */ IEMOP_X4(iemOp_syscall),
7586 /* 0x06 */ IEMOP_X4(iemOp_clts),
7587 /* 0x07 */ IEMOP_X4(iemOp_sysret),
7588 /* 0x08 */ IEMOP_X4(iemOp_invd),
7589 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
7590 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
7591 /* 0x0b */ IEMOP_X4(iemOp_ud2),
7592 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
7593 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
7594 /* 0x0e */ IEMOP_X4(iemOp_femms),
7595 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
7596
7597 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7598 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7599 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7600 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7601 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7602 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7603 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7604 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7605 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
7606 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
7607 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
7608 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
7609 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
7610 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
7611 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
7612 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
7613
7614 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
7615 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
7616 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
7617 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
7618 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
7619 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7620 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
7621 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7622 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7623 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM,
7624 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7625 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7626 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7627 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7628 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7629 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7630
7631 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
7632 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
7633 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
7634 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
7635 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
7636 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
7637 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
7638 /* 0x37 */ IEMOP_X4(iemOp_getsec),
7639 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
7640 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7641 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
7642 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7643 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7644 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7645 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7646 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7647
7648 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
7649 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
7650 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
7651 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
7652 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
7653 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
7654 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
7655 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
7656 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
7657 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
7658 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
7659 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
7660 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
7661 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
7662 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
7663 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
7664
7665 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7666 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
7667 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7668 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7669 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7670 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7671 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7672 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7673 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
7674 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
7675 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
7676 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
7677 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
7678 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
7679 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
7680 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
7681
7682 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7683 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7684 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7685 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7686 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7687 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7688 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7689 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7690 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7691 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7692 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7693 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7694 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7695 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7696 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7697 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
7698
7699 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
7700 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
7701 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
7702 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
7703 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7704 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7705 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7706 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7707
7708 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7709 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7710 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7711 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7712 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
7713 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
7714 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
7715 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
7716
7717 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
7718 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
7719 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
7720 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
7721 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
7722 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
7723 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
7724 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
7725 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
7726 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
7727 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
7728 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
7729 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
7730 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
7731 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
7732 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
7733
7734 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
7735 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
7736 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
7737 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
7738 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
7739 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
7740 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
7741 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
7742 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
7743 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
7744 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
7745 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
7746 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
7747 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
7748 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
7749 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
7750
7751 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
7752 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
7753 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
7754 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
7755 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
7756 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
7757 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
7758 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7759 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
7760 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
7761 /* 0xaa */ IEMOP_X4(iemOp_rsm),
7762 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
7763 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
7764 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
7765 /* 0xae */ IEMOP_X4(iemOp_Grp15),
7766 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
7767
7768 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
7769 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
7770 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
7771 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
7772 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
7773 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
7774 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
7775 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
7776 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
7777 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
7778 /* 0xba */ IEMOP_X4(iemOp_Grp8),
7779 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
7780 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
7781 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
7782 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
7783 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
7784
7785 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
7786 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
7787 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
7788 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7789 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7790 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7791 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
7792 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
7793 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
7794 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
7795 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
7796 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
7797 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
7798 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
7799 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
7800 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
7801
7802 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
7803 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7804 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7805 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7806 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7807 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7808 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
7809 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7810 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7811 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7812 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7813 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7814 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7815 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7816 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7817 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7818
7819 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7820 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7821 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7822 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7823 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7824 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7825 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
7826 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7827 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7828 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7829 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7830 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7831 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7832 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7833 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7834 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7835
7836 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
7837 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7838 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7839 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7840 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7841 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7842 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7843 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7844 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7845 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7846 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7847 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7848 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7849 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7850 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7851 /* 0xff */ IEMOP_X4(iemOp_ud0),
7852};
7853AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
7854/** @} */
7855
7856
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette