VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66474

Last change on this file since 66474 was 66474, checked in by vboxsync, 8 years ago

IEM: Stubbed the three byte opcode tables.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 303.3 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66474 2017-04-07 13:18:29Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.215389.xyz. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 switch (pVCpu->iem.s.enmEffOpSize)
38 {
39 case IEMMODE_16BIT:
40 IEM_MC_BEGIN(0, 1);
41 IEM_MC_LOCAL(uint16_t, u16Ldtr);
42 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
43 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
44 IEM_MC_ADVANCE_RIP();
45 IEM_MC_END();
46 break;
47
48 case IEMMODE_32BIT:
49 IEM_MC_BEGIN(0, 1);
50 IEM_MC_LOCAL(uint32_t, u32Ldtr);
51 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
52 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
53 IEM_MC_ADVANCE_RIP();
54 IEM_MC_END();
55 break;
56
57 case IEMMODE_64BIT:
58 IEM_MC_BEGIN(0, 1);
59 IEM_MC_LOCAL(uint64_t, u64Ldtr);
60 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
61 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
62 IEM_MC_ADVANCE_RIP();
63 IEM_MC_END();
64 break;
65
66 IEM_NOT_REACHED_DEFAULT_CASE_RET();
67 }
68 }
69 else
70 {
71 IEM_MC_BEGIN(0, 2);
72 IEM_MC_LOCAL(uint16_t, u16Ldtr);
73 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
76 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
77 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
78 IEM_MC_ADVANCE_RIP();
79 IEM_MC_END();
80 }
81 return VINF_SUCCESS;
82}
83
84
85/** Opcode 0x0f 0x00 /1. */
86FNIEMOPRM_DEF(iemOp_Grp6_str)
87{
88 IEMOP_MNEMONIC(str, "str Rv/Mw");
89 IEMOP_HLP_MIN_286();
90 IEMOP_HLP_NO_REAL_OR_V86_MODE();
91
92 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
93 {
94 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
95 switch (pVCpu->iem.s.enmEffOpSize)
96 {
97 case IEMMODE_16BIT:
98 IEM_MC_BEGIN(0, 1);
99 IEM_MC_LOCAL(uint16_t, u16Tr);
100 IEM_MC_FETCH_TR_U16(u16Tr);
101 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
102 IEM_MC_ADVANCE_RIP();
103 IEM_MC_END();
104 break;
105
106 case IEMMODE_32BIT:
107 IEM_MC_BEGIN(0, 1);
108 IEM_MC_LOCAL(uint32_t, u32Tr);
109 IEM_MC_FETCH_TR_U32(u32Tr);
110 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
111 IEM_MC_ADVANCE_RIP();
112 IEM_MC_END();
113 break;
114
115 case IEMMODE_64BIT:
116 IEM_MC_BEGIN(0, 1);
117 IEM_MC_LOCAL(uint64_t, u64Tr);
118 IEM_MC_FETCH_TR_U64(u64Tr);
119 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
120 IEM_MC_ADVANCE_RIP();
121 IEM_MC_END();
122 break;
123
124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
125 }
126 }
127 else
128 {
129 IEM_MC_BEGIN(0, 2);
130 IEM_MC_LOCAL(uint16_t, u16Tr);
131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
133 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
134 IEM_MC_FETCH_TR_U16(u16Tr);
135 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
136 IEM_MC_ADVANCE_RIP();
137 IEM_MC_END();
138 }
139 return VINF_SUCCESS;
140}
141
142
143/** Opcode 0x0f 0x00 /2. */
144FNIEMOPRM_DEF(iemOp_Grp6_lldt)
145{
146 IEMOP_MNEMONIC(lldt, "lldt Ew");
147 IEMOP_HLP_MIN_286();
148 IEMOP_HLP_NO_REAL_OR_V86_MODE();
149
150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
151 {
152 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
153 IEM_MC_BEGIN(1, 0);
154 IEM_MC_ARG(uint16_t, u16Sel, 0);
155 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
156 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
157 IEM_MC_END();
158 }
159 else
160 {
161 IEM_MC_BEGIN(1, 1);
162 IEM_MC_ARG(uint16_t, u16Sel, 0);
163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
165 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
166 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
167 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
168 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
169 IEM_MC_END();
170 }
171 return VINF_SUCCESS;
172}
173
174
175/** Opcode 0x0f 0x00 /3. */
176FNIEMOPRM_DEF(iemOp_Grp6_ltr)
177{
178 IEMOP_MNEMONIC(ltr, "ltr Ew");
179 IEMOP_HLP_MIN_286();
180 IEMOP_HLP_NO_REAL_OR_V86_MODE();
181
182 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
183 {
184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
185 IEM_MC_BEGIN(1, 0);
186 IEM_MC_ARG(uint16_t, u16Sel, 0);
187 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
188 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
189 IEM_MC_END();
190 }
191 else
192 {
193 IEM_MC_BEGIN(1, 1);
194 IEM_MC_ARG(uint16_t, u16Sel, 0);
195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
198 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
199 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
200 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
201 IEM_MC_END();
202 }
203 return VINF_SUCCESS;
204}
205
206
207/** Opcode 0x0f 0x00 /3. */
208FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
209{
210 IEMOP_HLP_MIN_286();
211 IEMOP_HLP_NO_REAL_OR_V86_MODE();
212
213 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
214 {
215 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
216 IEM_MC_BEGIN(2, 0);
217 IEM_MC_ARG(uint16_t, u16Sel, 0);
218 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
219 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
220 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
221 IEM_MC_END();
222 }
223 else
224 {
225 IEM_MC_BEGIN(2, 1);
226 IEM_MC_ARG(uint16_t, u16Sel, 0);
227 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
230 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
231 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
232 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
233 IEM_MC_END();
234 }
235 return VINF_SUCCESS;
236}
237
238
239/** Opcode 0x0f 0x00 /4. */
240FNIEMOPRM_DEF(iemOp_Grp6_verr)
241{
242 IEMOP_MNEMONIC(verr, "verr Ew");
243 IEMOP_HLP_MIN_286();
244 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
245}
246
247
248/** Opcode 0x0f 0x00 /5. */
249FNIEMOPRM_DEF(iemOp_Grp6_verw)
250{
251 IEMOP_MNEMONIC(verw, "verw Ew");
252 IEMOP_HLP_MIN_286();
253 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
254}
255
256
257/**
258 * Group 6 jump table.
259 */
260IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
261{
262 iemOp_Grp6_sldt,
263 iemOp_Grp6_str,
264 iemOp_Grp6_lldt,
265 iemOp_Grp6_ltr,
266 iemOp_Grp6_verr,
267 iemOp_Grp6_verw,
268 iemOp_InvalidWithRM,
269 iemOp_InvalidWithRM
270};
271
272/** Opcode 0x0f 0x00. */
273FNIEMOP_DEF(iemOp_Grp6)
274{
275 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
276 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
277}
278
279
280/** Opcode 0x0f 0x01 /0. */
281FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
282{
283 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
284 IEMOP_HLP_MIN_286();
285 IEMOP_HLP_64BIT_OP_SIZE();
286 IEM_MC_BEGIN(2, 1);
287 IEM_MC_ARG(uint8_t, iEffSeg, 0);
288 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
291 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
292 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
293 IEM_MC_END();
294 return VINF_SUCCESS;
295}
296
297
298/** Opcode 0x0f 0x01 /0. */
299FNIEMOP_DEF(iemOp_Grp7_vmcall)
300{
301 IEMOP_BITCH_ABOUT_STUB();
302 return IEMOP_RAISE_INVALID_OPCODE();
303}
304
305
306/** Opcode 0x0f 0x01 /0. */
307FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
308{
309 IEMOP_BITCH_ABOUT_STUB();
310 return IEMOP_RAISE_INVALID_OPCODE();
311}
312
313
314/** Opcode 0x0f 0x01 /0. */
315FNIEMOP_DEF(iemOp_Grp7_vmresume)
316{
317 IEMOP_BITCH_ABOUT_STUB();
318 return IEMOP_RAISE_INVALID_OPCODE();
319}
320
321
322/** Opcode 0x0f 0x01 /0. */
323FNIEMOP_DEF(iemOp_Grp7_vmxoff)
324{
325 IEMOP_BITCH_ABOUT_STUB();
326 return IEMOP_RAISE_INVALID_OPCODE();
327}
328
329
330/** Opcode 0x0f 0x01 /1. */
331FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
332{
333 IEMOP_MNEMONIC(sidt, "sidt Ms");
334 IEMOP_HLP_MIN_286();
335 IEMOP_HLP_64BIT_OP_SIZE();
336 IEM_MC_BEGIN(2, 1);
337 IEM_MC_ARG(uint8_t, iEffSeg, 0);
338 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
341 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
342 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
343 IEM_MC_END();
344 return VINF_SUCCESS;
345}
346
347
348/** Opcode 0x0f 0x01 /1. */
349FNIEMOP_DEF(iemOp_Grp7_monitor)
350{
351 IEMOP_MNEMONIC(monitor, "monitor");
352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
353 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
354}
355
356
357/** Opcode 0x0f 0x01 /1. */
358FNIEMOP_DEF(iemOp_Grp7_mwait)
359{
360 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
362 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
363}
364
365
366/** Opcode 0x0f 0x01 /2. */
367FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
368{
369 IEMOP_MNEMONIC(lgdt, "lgdt");
370 IEMOP_HLP_64BIT_OP_SIZE();
371 IEM_MC_BEGIN(3, 1);
372 IEM_MC_ARG(uint8_t, iEffSeg, 0);
373 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
374 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
377 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
378 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
379 IEM_MC_END();
380 return VINF_SUCCESS;
381}
382
383
384/** Opcode 0x0f 0x01 0xd0. */
385FNIEMOP_DEF(iemOp_Grp7_xgetbv)
386{
387 IEMOP_MNEMONIC(xgetbv, "xgetbv");
388 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
389 {
390 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
391 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
392 }
393 return IEMOP_RAISE_INVALID_OPCODE();
394}
395
396
397/** Opcode 0x0f 0x01 0xd1. */
398FNIEMOP_DEF(iemOp_Grp7_xsetbv)
399{
400 IEMOP_MNEMONIC(xsetbv, "xsetbv");
401 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
402 {
403 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
404 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
405 }
406 return IEMOP_RAISE_INVALID_OPCODE();
407}
408
409
410/** Opcode 0x0f 0x01 /3. */
411FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
412{
413 IEMOP_MNEMONIC(lidt, "lidt");
414 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
415 ? IEMMODE_64BIT
416 : pVCpu->iem.s.enmEffOpSize;
417 IEM_MC_BEGIN(3, 1);
418 IEM_MC_ARG(uint8_t, iEffSeg, 0);
419 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
420 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
423 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
424 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
425 IEM_MC_END();
426 return VINF_SUCCESS;
427}
428
429
430#ifdef VBOX_WITH_NESTED_HWVIRT
431/** Opcode 0x0f 0x01 0xd8. */
432FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
433{
434 IEMOP_MNEMONIC(vmrun, "vmrun");
435 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
436}
437
438/** Opcode 0x0f 0x01 0xd9. */
439FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
440{
441 IEMOP_MNEMONIC(vmmcall, "vmmcall");
442 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
443}
444
445
446/** Opcode 0x0f 0x01 0xda. */
447FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
448{
449 IEMOP_MNEMONIC(vmload, "vmload");
450 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
451}
452
453
454/** Opcode 0x0f 0x01 0xdb. */
455FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
456{
457 IEMOP_MNEMONIC(vmsave, "vmsave");
458 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
459}
460
461
462/** Opcode 0x0f 0x01 0xdc. */
463FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
464{
465 IEMOP_MNEMONIC(stgi, "stgi");
466 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
467}
468
469
470/** Opcode 0x0f 0x01 0xdd. */
471FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
472{
473 IEMOP_MNEMONIC(clgi, "clgi");
474 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
475}
476
477
478/** Opcode 0x0f 0x01 0xdf. */
479FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
480{
481 IEMOP_MNEMONIC(invlpga, "invlpga");
482 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
483}
484#else
485/** Opcode 0x0f 0x01 0xd8. */
486FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
487
488/** Opcode 0x0f 0x01 0xd9. */
489FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
490
491/** Opcode 0x0f 0x01 0xda. */
492FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
493
494/** Opcode 0x0f 0x01 0xdb. */
495FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
496
497/** Opcode 0x0f 0x01 0xdc. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
499
500/** Opcode 0x0f 0x01 0xdd. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
502
503/** Opcode 0x0f 0x01 0xdf. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
505#endif /* VBOX_WITH_NESTED_HWVIRT */
506
507/** Opcode 0x0f 0x01 0xde. */
508FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
509
510/** Opcode 0x0f 0x01 /4. */
511FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
512{
513 IEMOP_MNEMONIC(smsw, "smsw");
514 IEMOP_HLP_MIN_286();
515 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
516 {
517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
518 switch (pVCpu->iem.s.enmEffOpSize)
519 {
520 case IEMMODE_16BIT:
521 IEM_MC_BEGIN(0, 1);
522 IEM_MC_LOCAL(uint16_t, u16Tmp);
523 IEM_MC_FETCH_CR0_U16(u16Tmp);
524 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
525 { /* likely */ }
526 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
527 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
528 else
529 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
530 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
531 IEM_MC_ADVANCE_RIP();
532 IEM_MC_END();
533 return VINF_SUCCESS;
534
535 case IEMMODE_32BIT:
536 IEM_MC_BEGIN(0, 1);
537 IEM_MC_LOCAL(uint32_t, u32Tmp);
538 IEM_MC_FETCH_CR0_U32(u32Tmp);
539 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
540 IEM_MC_ADVANCE_RIP();
541 IEM_MC_END();
542 return VINF_SUCCESS;
543
544 case IEMMODE_64BIT:
545 IEM_MC_BEGIN(0, 1);
546 IEM_MC_LOCAL(uint64_t, u64Tmp);
547 IEM_MC_FETCH_CR0_U64(u64Tmp);
548 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
549 IEM_MC_ADVANCE_RIP();
550 IEM_MC_END();
551 return VINF_SUCCESS;
552
553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
554 }
555 }
556 else
557 {
558 /* Ignore operand size here, memory refs are always 16-bit. */
559 IEM_MC_BEGIN(0, 2);
560 IEM_MC_LOCAL(uint16_t, u16Tmp);
561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
564 IEM_MC_FETCH_CR0_U16(u16Tmp);
565 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
566 { /* likely */ }
567 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
568 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
569 else
570 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
571 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
572 IEM_MC_ADVANCE_RIP();
573 IEM_MC_END();
574 return VINF_SUCCESS;
575 }
576}
577
578
579/** Opcode 0x0f 0x01 /6. */
580FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
581{
582 /* The operand size is effectively ignored, all is 16-bit and only the
583 lower 3-bits are used. */
584 IEMOP_MNEMONIC(lmsw, "lmsw");
585 IEMOP_HLP_MIN_286();
586 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
587 {
588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
589 IEM_MC_BEGIN(1, 0);
590 IEM_MC_ARG(uint16_t, u16Tmp, 0);
591 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
592 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
593 IEM_MC_END();
594 }
595 else
596 {
597 IEM_MC_BEGIN(1, 1);
598 IEM_MC_ARG(uint16_t, u16Tmp, 0);
599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
602 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
603 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
604 IEM_MC_END();
605 }
606 return VINF_SUCCESS;
607}
608
609
610/** Opcode 0x0f 0x01 /7. */
611FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
612{
613 IEMOP_MNEMONIC(invlpg, "invlpg");
614 IEMOP_HLP_MIN_486();
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_BEGIN(1, 1);
617 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
619 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
620 IEM_MC_END();
621 return VINF_SUCCESS;
622}
623
624
625/** Opcode 0x0f 0x01 /7. */
626FNIEMOP_DEF(iemOp_Grp7_swapgs)
627{
628 IEMOP_MNEMONIC(swapgs, "swapgs");
629 IEMOP_HLP_ONLY_64BIT();
630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
631 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
632}
633
634
635/** Opcode 0x0f 0x01 /7. */
636FNIEMOP_DEF(iemOp_Grp7_rdtscp)
637{
638 NOREF(pVCpu);
639 IEMOP_BITCH_ABOUT_STUB();
640 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
641}
642
643
644/**
645 * Group 7 jump table, memory variant.
646 */
647IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
648{
649 iemOp_Grp7_sgdt,
650 iemOp_Grp7_sidt,
651 iemOp_Grp7_lgdt,
652 iemOp_Grp7_lidt,
653 iemOp_Grp7_smsw,
654 iemOp_InvalidWithRM,
655 iemOp_Grp7_lmsw,
656 iemOp_Grp7_invlpg
657};
658
659
660/** Opcode 0x0f 0x01. */
661FNIEMOP_DEF(iemOp_Grp7)
662{
663 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
664 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
665 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
666
667 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
668 {
669 case 0:
670 switch (bRm & X86_MODRM_RM_MASK)
671 {
672 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
673 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
674 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
675 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
676 }
677 return IEMOP_RAISE_INVALID_OPCODE();
678
679 case 1:
680 switch (bRm & X86_MODRM_RM_MASK)
681 {
682 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
683 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
684 }
685 return IEMOP_RAISE_INVALID_OPCODE();
686
687 case 2:
688 switch (bRm & X86_MODRM_RM_MASK)
689 {
690 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
691 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 3:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
700 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
701 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
702 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
703 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
704 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
705 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
706 IEM_NOT_REACHED_DEFAULT_CASE_RET();
707 }
708
709 case 4:
710 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
711
712 case 5:
713 return IEMOP_RAISE_INVALID_OPCODE();
714
715 case 6:
716 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
717
718 case 7:
719 switch (bRm & X86_MODRM_RM_MASK)
720 {
721 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
722 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
723 }
724 return IEMOP_RAISE_INVALID_OPCODE();
725
726 IEM_NOT_REACHED_DEFAULT_CASE_RET();
727 }
728}
729
730/** Opcode 0x0f 0x00 /3. */
731FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
732{
733 IEMOP_HLP_NO_REAL_OR_V86_MODE();
734 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
735
736 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
737 {
738 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
739 switch (pVCpu->iem.s.enmEffOpSize)
740 {
741 case IEMMODE_16BIT:
742 {
743 IEM_MC_BEGIN(3, 0);
744 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
745 IEM_MC_ARG(uint16_t, u16Sel, 1);
746 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
747
748 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
749 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
750 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
751
752 IEM_MC_END();
753 return VINF_SUCCESS;
754 }
755
756 case IEMMODE_32BIT:
757 case IEMMODE_64BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 IEM_NOT_REACHED_DEFAULT_CASE_RET();
773 }
774 }
775 else
776 {
777 switch (pVCpu->iem.s.enmEffOpSize)
778 {
779 case IEMMODE_16BIT:
780 {
781 IEM_MC_BEGIN(3, 1);
782 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
783 IEM_MC_ARG(uint16_t, u16Sel, 1);
784 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
786
787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
788 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
789
790 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
791 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
792 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
793
794 IEM_MC_END();
795 return VINF_SUCCESS;
796 }
797
798 case IEMMODE_32BIT:
799 case IEMMODE_64BIT:
800 {
801 IEM_MC_BEGIN(3, 1);
802 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
803 IEM_MC_ARG(uint16_t, u16Sel, 1);
804 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
806
807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
808 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
809/** @todo testcase: make sure it's a 16-bit read. */
810
811 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
812 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
813 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
814
815 IEM_MC_END();
816 return VINF_SUCCESS;
817 }
818
819 IEM_NOT_REACHED_DEFAULT_CASE_RET();
820 }
821 }
822}
823
824
825
826/** Opcode 0x0f 0x02. */
827FNIEMOP_DEF(iemOp_lar_Gv_Ew)
828{
829 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
830 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
831}
832
833
834/** Opcode 0x0f 0x03. */
835FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
836{
837 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
838 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
839}
840
841
842/** Opcode 0x0f 0x05. */
843FNIEMOP_DEF(iemOp_syscall)
844{
845 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
847 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
848}
849
850
851/** Opcode 0x0f 0x06. */
852FNIEMOP_DEF(iemOp_clts)
853{
854 IEMOP_MNEMONIC(clts, "clts");
855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
856 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
857}
858
859
860/** Opcode 0x0f 0x07. */
861FNIEMOP_DEF(iemOp_sysret)
862{
863 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
865 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
866}
867
868
869/** Opcode 0x0f 0x08. */
870FNIEMOP_STUB(iemOp_invd);
871// IEMOP_HLP_MIN_486();
872
873
874/** Opcode 0x0f 0x09. */
875FNIEMOP_DEF(iemOp_wbinvd)
876{
877 IEMOP_MNEMONIC(wbinvd, "wbinvd");
878 IEMOP_HLP_MIN_486();
879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
880 IEM_MC_BEGIN(0, 0);
881 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
882 IEM_MC_ADVANCE_RIP();
883 IEM_MC_END();
884 return VINF_SUCCESS; /* ignore for now */
885}
886
887
888/** Opcode 0x0f 0x0b. */
889FNIEMOP_DEF(iemOp_ud2)
890{
891 IEMOP_MNEMONIC(ud2, "ud2");
892 return IEMOP_RAISE_INVALID_OPCODE();
893}
894
895/** Opcode 0x0f 0x0d. */
896FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
897{
898 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
899 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
900 {
901 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
902 return IEMOP_RAISE_INVALID_OPCODE();
903 }
904
905 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
906 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
907 {
908 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
909 return IEMOP_RAISE_INVALID_OPCODE();
910 }
911
912 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
913 {
914 case 2: /* Aliased to /0 for the time being. */
915 case 4: /* Aliased to /0 for the time being. */
916 case 5: /* Aliased to /0 for the time being. */
917 case 6: /* Aliased to /0 for the time being. */
918 case 7: /* Aliased to /0 for the time being. */
919 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
920 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
921 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
922 IEM_NOT_REACHED_DEFAULT_CASE_RET();
923 }
924
925 IEM_MC_BEGIN(0, 1);
926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
929 /* Currently a NOP. */
930 NOREF(GCPtrEffSrc);
931 IEM_MC_ADVANCE_RIP();
932 IEM_MC_END();
933 return VINF_SUCCESS;
934}
935
936
937/** Opcode 0x0f 0x0e. */
938FNIEMOP_STUB(iemOp_femms);
939
940
941/** Opcode 0x0f 0x0f. */
942FNIEMOP_DEF(iemOp_3Dnow)
943{
944 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
945 {
946 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
947 return IEMOP_RAISE_INVALID_OPCODE();
948 }
949
950#ifdef IEM_WITH_3DNOW
951 /* This is pretty sparse, use switch instead of table. */
952 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
953 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
954#else
955 IEMOP_BITCH_ABOUT_STUB();
956 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
957#endif
958}
959
960
961/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
962FNIEMOP_STUB(iemOp_movups_Vps_Wps);
963/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
964FNIEMOP_STUB(iemOp_movupd_Vpd_Wpd);
965
966
967/**
968 * @opcode 0x10
969 * @oppfx 0xf3
970 * @opcpuid sse
971 * @opgroup og_sse_simdfp_datamove
972 * @opxcpttype 5
973 * @optest op1=1 op2=2 -> op1=2
974 * @optest op1=0 op2=-22 -> op1=-22
975 */
976FNIEMOP_DEF(iemOp_movss_Vss_Wss)
977{
978 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZxReg, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
979 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
980 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
981 {
982 /*
983 * Register, register.
984 */
985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
986 IEM_MC_BEGIN(0, 1);
987 IEM_MC_LOCAL(uint32_t, uSrc);
988
989 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
990 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
991 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
992 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
993
994 IEM_MC_ADVANCE_RIP();
995 IEM_MC_END();
996 }
997 else
998 {
999 /*
1000 * Memory, register.
1001 */
1002 IEM_MC_BEGIN(0, 2);
1003 IEM_MC_LOCAL(uint32_t, uSrc);
1004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1005
1006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1008 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1009 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1010
1011 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1012 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1013
1014 IEM_MC_ADVANCE_RIP();
1015 IEM_MC_END();
1016 }
1017 return VINF_SUCCESS;
1018}
1019
1020
1021/** Opcode 0xf2 0x0f 0x10 - movsd Vx, Wsd */
1022FNIEMOP_STUB(iemOp_movsd_Vx_Wsd);
1023
1024
1025/**
1026 * @opcode 0x11
1027 * @oppfx none
1028 * @opcpuid sse
1029 * @opgroup og_sse_simdfp_datamove
1030 * @opxcpttype 4UA
1031 * @optest op1=1 op2=2 -> op1=2
1032 * @optest op1=0 op2=-42 -> op1=-42
1033 */
1034FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1035{
1036 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1039 {
1040 /*
1041 * Register, register.
1042 */
1043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1044 IEM_MC_BEGIN(0, 0);
1045 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1046 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1047 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1048 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1049 IEM_MC_ADVANCE_RIP();
1050 IEM_MC_END();
1051 }
1052 else
1053 {
1054 /*
1055 * Memory, register.
1056 */
1057 IEM_MC_BEGIN(0, 2);
1058 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1060
1061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1063 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1064 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1065
1066 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1067 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1068
1069 IEM_MC_ADVANCE_RIP();
1070 IEM_MC_END();
1071 }
1072 return VINF_SUCCESS;
1073}
1074
1075
1076/**
1077 * @opcode 0x11
1078 * @oppfx 0x66
1079 * @opcpuid sse2
1080 * @opgroup og_sse2_pcksclr_datamove
1081 * @opxcpttype 4UA
1082 * @optest op1=1 op2=2 -> op1=2
1083 * @optest op1=0 op2=-42 -> op1=-42
1084 */
1085FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1086{
1087 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1088 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1089 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1090 {
1091 /*
1092 * Register, register.
1093 */
1094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1095 IEM_MC_BEGIN(0, 0);
1096 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1097 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1098 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1099 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1100 IEM_MC_ADVANCE_RIP();
1101 IEM_MC_END();
1102 }
1103 else
1104 {
1105 /*
1106 * Memory, register.
1107 */
1108 IEM_MC_BEGIN(0, 2);
1109 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1111
1112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1114 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1115 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1116
1117 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1118 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1119
1120 IEM_MC_ADVANCE_RIP();
1121 IEM_MC_END();
1122 }
1123 return VINF_SUCCESS;
1124}
1125
1126
1127/**
1128 * @opcode 0x11
1129 * @oppfx 0xf3
1130 * @opcpuid sse
1131 * @opgroup og_sse_simdfp_datamove
1132 * @opxcpttype 5
1133 * @optest op1=1 op2=2 -> op1=2
1134 * @optest op1=0 op2=-22 -> op1=-22
1135 */
1136FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1137{
1138 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1140 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1141 {
1142 /*
1143 * Register, register.
1144 */
1145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1146 IEM_MC_BEGIN(0, 1);
1147 IEM_MC_LOCAL(uint32_t, uSrc);
1148
1149 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1150 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1151 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1152 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1153
1154 IEM_MC_ADVANCE_RIP();
1155 IEM_MC_END();
1156 }
1157 else
1158 {
1159 /*
1160 * Memory, register.
1161 */
1162 IEM_MC_BEGIN(0, 2);
1163 IEM_MC_LOCAL(uint32_t, uSrc);
1164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1165
1166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1168 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1169 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1170
1171 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1172 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1173
1174 IEM_MC_ADVANCE_RIP();
1175 IEM_MC_END();
1176 }
1177 return VINF_SUCCESS;
1178}
1179
1180
1181/**
1182 * @opcode 0x11
1183 * @oppfx 0xf2
1184 * @opcpuid sse2
1185 * @opgroup og_sse2_pcksclr_datamove
1186 * @opxcpttype 5
1187 * @optest op1=1 op2=2 -> op1=2
1188 * @optest op1=0 op2=-42 -> op1=-42
1189 */
1190FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1191{
1192 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1193 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1194 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1195 {
1196 /*
1197 * Register, register.
1198 */
1199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1200 IEM_MC_BEGIN(0, 1);
1201 IEM_MC_LOCAL(uint64_t, uSrc);
1202
1203 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1204 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1205 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1206 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1207
1208 IEM_MC_ADVANCE_RIP();
1209 IEM_MC_END();
1210 }
1211 else
1212 {
1213 /*
1214 * Memory, register.
1215 */
1216 IEM_MC_BEGIN(0, 2);
1217 IEM_MC_LOCAL(uint64_t, uSrc);
1218 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1219
1220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1222 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1223 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1224
1225 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1226 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1227
1228 IEM_MC_ADVANCE_RIP();
1229 IEM_MC_END();
1230 }
1231 return VINF_SUCCESS;
1232}
1233
1234
1235FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1236{
1237 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1238 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1239 {
1240 /**
1241 * @opcode 0x12
1242 * @opcodesub 11 mr/reg
1243 * @oppfx none
1244 * @opcpuid sse
1245 * @opgroup og_sse_simdfp_datamove
1246 * @opxcpttype 5
1247 * @optest op1=1 op2=2 -> op1=2
1248 * @optest op1=0 op2=-42 -> op1=-42
1249 */
1250 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1251
1252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1253 IEM_MC_BEGIN(0, 1);
1254 IEM_MC_LOCAL(uint64_t, uSrc);
1255
1256 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1257 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1258 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1259 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1260
1261 IEM_MC_ADVANCE_RIP();
1262 IEM_MC_END();
1263 }
1264 else
1265 {
1266 /**
1267 * @opdone
1268 * @opcode 0x12
1269 * @opcodesub !11 mr/reg
1270 * @oppfx none
1271 * @opcpuid sse
1272 * @opgroup og_sse_simdfp_datamove
1273 * @opxcpttype 5
1274 * @optest op1=1 op2=2 -> op1=2
1275 * @optest op1=0 op2=-42 -> op1=-42
1276 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1277 */
1278 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1279
1280 IEM_MC_BEGIN(0, 2);
1281 IEM_MC_LOCAL(uint64_t, uSrc);
1282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1283
1284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1286 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1287 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1288
1289 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1290 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1291
1292 IEM_MC_ADVANCE_RIP();
1293 IEM_MC_END();
1294 }
1295 return VINF_SUCCESS;
1296}
1297
1298
1299/**
1300 * @opcode 0x12
1301 * @opcodesub !11 mr/reg
1302 * @oppfx 0x66
1303 * @opcpuid sse2
1304 * @opgroup og_sse2_pcksclr_datamove
1305 * @opxcpttype 5
1306 * @optest op1=1 op2=2 -> op1=2
1307 * @optest op1=0 op2=-42 -> op1=-42
1308 */
1309FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1310{
1311 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1312 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1313 {
1314 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1315
1316 IEM_MC_BEGIN(0, 2);
1317 IEM_MC_LOCAL(uint64_t, uSrc);
1318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1319
1320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1322 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1323 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1324
1325 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1326 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1327
1328 IEM_MC_ADVANCE_RIP();
1329 IEM_MC_END();
1330 return VINF_SUCCESS;
1331 }
1332
1333 /**
1334 * @opdone
1335 * @opmnemonic ud660f12m3
1336 * @opcode 0x12
1337 * @opcodesub 11 mr/reg
1338 * @oppfx 0x66
1339 * @opunused immediate
1340 * @opcpuid sse
1341 * @optest ->
1342 */
1343 return IEMOP_RAISE_INVALID_OPCODE();
1344}
1345
1346
1347/**
1348 * @opcode 0x12
1349 * @oppfx 0xf3
1350 * @opcpuid sse3
1351 * @opgroup og_sse3_pcksclr_datamove
1352 * @opxcpttype 4
1353 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1354 * op1=0x00000002000000020000000100000001
1355 */
1356FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1357{
1358 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1359 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1360 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1361 {
1362 /*
1363 * Register, register.
1364 */
1365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1366 IEM_MC_BEGIN(2, 0);
1367 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1368 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1369
1370 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1371 IEM_MC_PREPARE_SSE_USAGE();
1372
1373 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1374 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1375 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1376
1377 IEM_MC_ADVANCE_RIP();
1378 IEM_MC_END();
1379 }
1380 else
1381 {
1382 /*
1383 * Register, memory.
1384 */
1385 IEM_MC_BEGIN(2, 2);
1386 IEM_MC_LOCAL(RTUINT128U, uSrc);
1387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1388 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1389 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1390
1391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1393 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1394 IEM_MC_PREPARE_SSE_USAGE();
1395
1396 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1397 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1398 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1399
1400 IEM_MC_ADVANCE_RIP();
1401 IEM_MC_END();
1402 }
1403 return VINF_SUCCESS;
1404}
1405
1406
1407/**
1408 * @opcode 0x12
1409 * @oppfx 0xf2
1410 * @opcpuid sse3
1411 * @opgroup og_sse3_pcksclr_datamove
1412 * @opxcpttype 5
1413 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1414 * op1=0x22222222111111112222222211111111
1415 */
1416FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1417{
1418 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1419 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1420 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1421 {
1422 /*
1423 * Register, register.
1424 */
1425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1426 IEM_MC_BEGIN(2, 0);
1427 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1428 IEM_MC_ARG(uint64_t, uSrc, 1);
1429
1430 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1431 IEM_MC_PREPARE_SSE_USAGE();
1432
1433 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1434 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1435 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1436
1437 IEM_MC_ADVANCE_RIP();
1438 IEM_MC_END();
1439 }
1440 else
1441 {
1442 /*
1443 * Register, memory.
1444 */
1445 IEM_MC_BEGIN(2, 2);
1446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1447 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1448 IEM_MC_ARG(uint64_t, uSrc, 1);
1449
1450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1452 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1453 IEM_MC_PREPARE_SSE_USAGE();
1454
1455 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1456 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1457 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1458
1459 IEM_MC_ADVANCE_RIP();
1460 IEM_MC_END();
1461 }
1462 return VINF_SUCCESS;
1463}
1464
1465
1466/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1467FNIEMOP_STUB(iemOp_movlps_Mq_Vq);
1468
1469/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1470FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1471{
1472 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1474 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1475 {
1476#if 0
1477 /*
1478 * Register, register.
1479 */
1480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1481 IEM_MC_BEGIN(0, 1);
1482 IEM_MC_LOCAL(uint64_t, uSrc);
1483 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1484 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1485 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1486 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1487 IEM_MC_ADVANCE_RIP();
1488 IEM_MC_END();
1489#else
1490 return IEMOP_RAISE_INVALID_OPCODE();
1491#endif
1492 }
1493 else
1494 {
1495 /*
1496 * Memory, register.
1497 */
1498 IEM_MC_BEGIN(0, 2);
1499 IEM_MC_LOCAL(uint64_t, uSrc);
1500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1501
1502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1504 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1505 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1506
1507 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1508 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1509
1510 IEM_MC_ADVANCE_RIP();
1511 IEM_MC_END();
1512 }
1513 return VINF_SUCCESS;
1514}
1515
1516/* Opcode 0xf3 0x0f 0x13 - invalid */
1517/* Opcode 0xf2 0x0f 0x13 - invalid */
1518
1519/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1520FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1521/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1522FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1523/* Opcode 0xf3 0x0f 0x14 - invalid */
1524/* Opcode 0xf2 0x0f 0x14 - invalid */
1525/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1526FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1527/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1528FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1529/* Opcode 0xf3 0x0f 0x15 - invalid */
1530/* Opcode 0xf2 0x0f 0x15 - invalid */
1531/** Opcode 0x0f 0x16 - movhpsv1 Vdq, Mq movlhps Vdq, Uq */
1532FNIEMOP_STUB(iemOp_movhpsv1_Vdq_Mq__movlhps_Vdq_Uq); //NEXT
1533/** Opcode 0x66 0x0f 0x16 - movhpdv1 Vdq, Mq */
1534FNIEMOP_STUB(iemOp_movhpdv1_Vdq_Mq); //NEXT
1535/** Opcode 0xf3 0x0f 0x16 - movshdup Vx, Wx */
1536FNIEMOP_STUB(iemOp_movshdup_Vx_Wx); //NEXT
1537/* Opcode 0xf2 0x0f 0x16 - invalid */
1538/** Opcode 0x0f 0x17 - movhpsv1 Mq, Vq */
1539FNIEMOP_STUB(iemOp_movhpsv1_Mq_Vq); //NEXT
1540/** Opcode 0x66 0x0f 0x17 - movhpdv1 Mq, Vq */
1541FNIEMOP_STUB(iemOp_movhpdv1_Mq_Vq); //NEXT
1542/* Opcode 0xf3 0x0f 0x17 - invalid */
1543/* Opcode 0xf2 0x0f 0x17 - invalid */
1544
1545
1546/** Opcode 0x0f 0x18. */
1547FNIEMOP_DEF(iemOp_prefetch_Grp16)
1548{
1549 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1550 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1551 {
1552 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1553 {
1554 case 4: /* Aliased to /0 for the time being according to AMD. */
1555 case 5: /* Aliased to /0 for the time being according to AMD. */
1556 case 6: /* Aliased to /0 for the time being according to AMD. */
1557 case 7: /* Aliased to /0 for the time being according to AMD. */
1558 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1559 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1560 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1561 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1563 }
1564
1565 IEM_MC_BEGIN(0, 1);
1566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1569 /* Currently a NOP. */
1570 NOREF(GCPtrEffSrc);
1571 IEM_MC_ADVANCE_RIP();
1572 IEM_MC_END();
1573 return VINF_SUCCESS;
1574 }
1575
1576 return IEMOP_RAISE_INVALID_OPCODE();
1577}
1578
1579
1580/** Opcode 0x0f 0x19..0x1f. */
1581FNIEMOP_DEF(iemOp_nop_Ev)
1582{
1583 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1585 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1586 {
1587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1588 IEM_MC_BEGIN(0, 0);
1589 IEM_MC_ADVANCE_RIP();
1590 IEM_MC_END();
1591 }
1592 else
1593 {
1594 IEM_MC_BEGIN(0, 1);
1595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1598 /* Currently a NOP. */
1599 NOREF(GCPtrEffSrc);
1600 IEM_MC_ADVANCE_RIP();
1601 IEM_MC_END();
1602 }
1603 return VINF_SUCCESS;
1604}
1605
1606
1607/** Opcode 0x0f 0x20. */
1608FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1609{
1610 /* mod is ignored, as is operand size overrides. */
1611 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1612 IEMOP_HLP_MIN_386();
1613 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1614 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1615 else
1616 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1617
1618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1619 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1620 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1621 {
1622 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1623 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1624 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1625 iCrReg |= 8;
1626 }
1627 switch (iCrReg)
1628 {
1629 case 0: case 2: case 3: case 4: case 8:
1630 break;
1631 default:
1632 return IEMOP_RAISE_INVALID_OPCODE();
1633 }
1634 IEMOP_HLP_DONE_DECODING();
1635
1636 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1637}
1638
1639
1640/** Opcode 0x0f 0x21. */
1641FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1642{
1643 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1644 IEMOP_HLP_MIN_386();
1645 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1647 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1648 return IEMOP_RAISE_INVALID_OPCODE();
1649 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1650 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1651 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1652}
1653
1654
1655/** Opcode 0x0f 0x22. */
1656FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1657{
1658 /* mod is ignored, as is operand size overrides. */
1659 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1660 IEMOP_HLP_MIN_386();
1661 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1662 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1663 else
1664 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1665
1666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1667 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1668 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1669 {
1670 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1671 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1672 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1673 iCrReg |= 8;
1674 }
1675 switch (iCrReg)
1676 {
1677 case 0: case 2: case 3: case 4: case 8:
1678 break;
1679 default:
1680 return IEMOP_RAISE_INVALID_OPCODE();
1681 }
1682 IEMOP_HLP_DONE_DECODING();
1683
1684 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1685}
1686
1687
1688/** Opcode 0x0f 0x23. */
1689FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1690{
1691 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1692 IEMOP_HLP_MIN_386();
1693 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1695 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1696 return IEMOP_RAISE_INVALID_OPCODE();
1697 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1698 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1699 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1700}
1701
1702
1703/** Opcode 0x0f 0x24. */
1704FNIEMOP_DEF(iemOp_mov_Rd_Td)
1705{
1706 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1707 /** @todo works on 386 and 486. */
1708 /* The RM byte is not considered, see testcase. */
1709 return IEMOP_RAISE_INVALID_OPCODE();
1710}
1711
1712
1713/** Opcode 0x0f 0x26. */
1714FNIEMOP_DEF(iemOp_mov_Td_Rd)
1715{
1716 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1717 /** @todo works on 386 and 486. */
1718 /* The RM byte is not considered, see testcase. */
1719 return IEMOP_RAISE_INVALID_OPCODE();
1720}
1721
1722
1723/** Opcode 0x0f 0x28 - movaps Vps, Wps */
1724FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
1725{
1726 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1727 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1728 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1729 {
1730 /*
1731 * Register, register.
1732 */
1733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1734 IEM_MC_BEGIN(0, 0);
1735 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1736 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1737 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1738 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1739 IEM_MC_ADVANCE_RIP();
1740 IEM_MC_END();
1741 }
1742 else
1743 {
1744 /*
1745 * Register, memory.
1746 */
1747 IEM_MC_BEGIN(0, 2);
1748 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1750
1751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1753 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1754 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1755
1756 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1757 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1758
1759 IEM_MC_ADVANCE_RIP();
1760 IEM_MC_END();
1761 }
1762 return VINF_SUCCESS;
1763}
1764
1765/** Opcode 0x66 0x0f 0x28 - movapd Vpd, Wpd */
1766FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
1767{
1768 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1770 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1771 {
1772 /*
1773 * Register, register.
1774 */
1775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1776 IEM_MC_BEGIN(0, 0);
1777 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1778 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1779 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1780 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1781 IEM_MC_ADVANCE_RIP();
1782 IEM_MC_END();
1783 }
1784 else
1785 {
1786 /*
1787 * Register, memory.
1788 */
1789 IEM_MC_BEGIN(0, 2);
1790 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1792
1793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1795 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1796 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1797
1798 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1799 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1800
1801 IEM_MC_ADVANCE_RIP();
1802 IEM_MC_END();
1803 }
1804 return VINF_SUCCESS;
1805}
1806
1807/* Opcode 0xf3 0x0f 0x28 - invalid */
1808/* Opcode 0xf2 0x0f 0x28 - invalid */
1809
1810/** Opcode 0x0f 0x29 - movaps Wps, Vps */
1811FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
1812{
1813 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1815 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1816 {
1817 /*
1818 * Register, register.
1819 */
1820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1821 IEM_MC_BEGIN(0, 0);
1822 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1823 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1824 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1825 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1826 IEM_MC_ADVANCE_RIP();
1827 IEM_MC_END();
1828 }
1829 else
1830 {
1831 /*
1832 * Memory, register.
1833 */
1834 IEM_MC_BEGIN(0, 2);
1835 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1837
1838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1840 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1841 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1842
1843 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1844 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1845
1846 IEM_MC_ADVANCE_RIP();
1847 IEM_MC_END();
1848 }
1849 return VINF_SUCCESS;
1850}
1851
1852/** Opcode 0x66 0x0f 0x29 - movapd Wpd,Vpd */
1853FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
1854{
1855 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1856 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1857 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1858 {
1859 /*
1860 * Register, register.
1861 */
1862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1863 IEM_MC_BEGIN(0, 0);
1864 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1866 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1867 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1868 IEM_MC_ADVANCE_RIP();
1869 IEM_MC_END();
1870 }
1871 else
1872 {
1873 /*
1874 * Memory, register.
1875 */
1876 IEM_MC_BEGIN(0, 2);
1877 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1879
1880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1882 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1883 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1884
1885 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1886 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1887
1888 IEM_MC_ADVANCE_RIP();
1889 IEM_MC_END();
1890 }
1891 return VINF_SUCCESS;
1892}
1893
1894/* Opcode 0xf3 0x0f 0x29 - invalid */
1895/* Opcode 0xf2 0x0f 0x29 - invalid */
1896
1897
1898/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
1899FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
1900/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
1901FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
1902/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
1903FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
1904/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1905FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
1906
1907
1908/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
1909FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
1910{
1911 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1912 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1913 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1914 {
1915 /*
1916 * memory, register.
1917 */
1918 IEM_MC_BEGIN(0, 2);
1919 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1921
1922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1924 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1925 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1926
1927 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1928 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1929
1930 IEM_MC_ADVANCE_RIP();
1931 IEM_MC_END();
1932 }
1933 /* The register, register encoding is invalid. */
1934 else
1935 return IEMOP_RAISE_INVALID_OPCODE();
1936 return VINF_SUCCESS;
1937}
1938
1939/** Opcode 0x66 0x0f 0x2b - movntpd Mpd, Vpd */
1940FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
1941{
1942 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1943 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1944 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1945 {
1946 /*
1947 * memory, register.
1948 */
1949 IEM_MC_BEGIN(0, 2);
1950 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1952
1953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1955 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1956 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1957
1958 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1959 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1960
1961 IEM_MC_ADVANCE_RIP();
1962 IEM_MC_END();
1963 }
1964 /* The register, register encoding is invalid. */
1965 else
1966 return IEMOP_RAISE_INVALID_OPCODE();
1967 return VINF_SUCCESS;
1968}
1969/* Opcode 0xf3 0x0f 0x2b - invalid */
1970/* Opcode 0xf2 0x0f 0x2b - invalid */
1971
1972
1973/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
1974FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
1975/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
1976FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
1977/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
1978FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
1979/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
1980FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
1981
1982/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
1983FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
1984/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
1985FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
1986/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
1987FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
1988/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
1989FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
1990
1991/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
1992FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
1993/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
1994FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
1995/* Opcode 0xf3 0x0f 0x2e - invalid */
1996/* Opcode 0xf2 0x0f 0x2e - invalid */
1997
1998/** Opcode 0x0f 0x2f - comiss Vss, Wss */
1999FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2000/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2001FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2002/* Opcode 0xf3 0x0f 0x2f - invalid */
2003/* Opcode 0xf2 0x0f 0x2f - invalid */
2004
2005/** Opcode 0x0f 0x30. */
2006FNIEMOP_DEF(iemOp_wrmsr)
2007{
2008 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2010 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2011}
2012
2013
2014/** Opcode 0x0f 0x31. */
2015FNIEMOP_DEF(iemOp_rdtsc)
2016{
2017 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2019 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2020}
2021
2022
2023/** Opcode 0x0f 0x33. */
2024FNIEMOP_DEF(iemOp_rdmsr)
2025{
2026 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2028 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2029}
2030
2031
2032/** Opcode 0x0f 0x34. */
2033FNIEMOP_STUB(iemOp_rdpmc);
2034/** Opcode 0x0f 0x34. */
2035FNIEMOP_STUB(iemOp_sysenter);
2036/** Opcode 0x0f 0x35. */
2037FNIEMOP_STUB(iemOp_sysexit);
2038/** Opcode 0x0f 0x37. */
2039FNIEMOP_STUB(iemOp_getsec);
2040
2041
2042/** Opcode 0x0f 0x38. */
2043FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2044{
2045#ifdef IEM_WITH_THREE_0F_38
2046 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2047 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2048#else
2049 IEMOP_BITCH_ABOUT_STUB();
2050 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2051#endif
2052}
2053
2054
2055/** Opcode 0x0f 0x3a. */
2056FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2057{
2058#ifdef IEM_WITH_THREE_0F_3A
2059 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2060 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2061#else
2062 IEMOP_BITCH_ABOUT_STUB();
2063 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2064#endif
2065}
2066
2067
2068/**
2069 * Implements a conditional move.
2070 *
2071 * Wish there was an obvious way to do this where we could share and reduce
2072 * code bloat.
2073 *
2074 * @param a_Cnd The conditional "microcode" operation.
2075 */
2076#define CMOV_X(a_Cnd) \
2077 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2078 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2079 { \
2080 switch (pVCpu->iem.s.enmEffOpSize) \
2081 { \
2082 case IEMMODE_16BIT: \
2083 IEM_MC_BEGIN(0, 1); \
2084 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2085 a_Cnd { \
2086 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2087 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2088 } IEM_MC_ENDIF(); \
2089 IEM_MC_ADVANCE_RIP(); \
2090 IEM_MC_END(); \
2091 return VINF_SUCCESS; \
2092 \
2093 case IEMMODE_32BIT: \
2094 IEM_MC_BEGIN(0, 1); \
2095 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2096 a_Cnd { \
2097 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2098 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2099 } IEM_MC_ELSE() { \
2100 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2101 } IEM_MC_ENDIF(); \
2102 IEM_MC_ADVANCE_RIP(); \
2103 IEM_MC_END(); \
2104 return VINF_SUCCESS; \
2105 \
2106 case IEMMODE_64BIT: \
2107 IEM_MC_BEGIN(0, 1); \
2108 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2109 a_Cnd { \
2110 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2111 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2112 } IEM_MC_ENDIF(); \
2113 IEM_MC_ADVANCE_RIP(); \
2114 IEM_MC_END(); \
2115 return VINF_SUCCESS; \
2116 \
2117 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2118 } \
2119 } \
2120 else \
2121 { \
2122 switch (pVCpu->iem.s.enmEffOpSize) \
2123 { \
2124 case IEMMODE_16BIT: \
2125 IEM_MC_BEGIN(0, 2); \
2126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2127 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2129 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2130 a_Cnd { \
2131 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2132 } IEM_MC_ENDIF(); \
2133 IEM_MC_ADVANCE_RIP(); \
2134 IEM_MC_END(); \
2135 return VINF_SUCCESS; \
2136 \
2137 case IEMMODE_32BIT: \
2138 IEM_MC_BEGIN(0, 2); \
2139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2140 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2142 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2143 a_Cnd { \
2144 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2145 } IEM_MC_ELSE() { \
2146 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2147 } IEM_MC_ENDIF(); \
2148 IEM_MC_ADVANCE_RIP(); \
2149 IEM_MC_END(); \
2150 return VINF_SUCCESS; \
2151 \
2152 case IEMMODE_64BIT: \
2153 IEM_MC_BEGIN(0, 2); \
2154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2155 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2157 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2158 a_Cnd { \
2159 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2160 } IEM_MC_ENDIF(); \
2161 IEM_MC_ADVANCE_RIP(); \
2162 IEM_MC_END(); \
2163 return VINF_SUCCESS; \
2164 \
2165 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2166 } \
2167 } do {} while (0)
2168
2169
2170
2171/** Opcode 0x0f 0x40. */
2172FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2173{
2174 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2175 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2176}
2177
2178
2179/** Opcode 0x0f 0x41. */
2180FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2181{
2182 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2183 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2184}
2185
2186
2187/** Opcode 0x0f 0x42. */
2188FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2189{
2190 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2191 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2192}
2193
2194
2195/** Opcode 0x0f 0x43. */
2196FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2197{
2198 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2199 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2200}
2201
2202
2203/** Opcode 0x0f 0x44. */
2204FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2205{
2206 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2207 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2208}
2209
2210
2211/** Opcode 0x0f 0x45. */
2212FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2213{
2214 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2215 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2216}
2217
2218
2219/** Opcode 0x0f 0x46. */
2220FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2221{
2222 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2223 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2224}
2225
2226
2227/** Opcode 0x0f 0x47. */
2228FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2229{
2230 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2231 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2232}
2233
2234
2235/** Opcode 0x0f 0x48. */
2236FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2237{
2238 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2239 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2240}
2241
2242
2243/** Opcode 0x0f 0x49. */
2244FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2245{
2246 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2247 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2248}
2249
2250
2251/** Opcode 0x0f 0x4a. */
2252FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2253{
2254 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2255 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2256}
2257
2258
2259/** Opcode 0x0f 0x4b. */
2260FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2261{
2262 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2263 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2264}
2265
2266
2267/** Opcode 0x0f 0x4c. */
2268FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2269{
2270 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2271 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2272}
2273
2274
2275/** Opcode 0x0f 0x4d. */
2276FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2277{
2278 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2279 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2280}
2281
2282
2283/** Opcode 0x0f 0x4e. */
2284FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2285{
2286 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2287 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2288}
2289
2290
2291/** Opcode 0x0f 0x4f. */
2292FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2293{
2294 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2295 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2296}
2297
2298#undef CMOV_X
2299
2300/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2301FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2302/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2303FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2304/* Opcode 0xf3 0x0f 0x50 - invalid */
2305/* Opcode 0xf2 0x0f 0x50 - invalid */
2306
2307/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2308FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2309/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2310FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2311/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2312FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2313/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2314FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2315
2316/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2317FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2318/* Opcode 0x66 0x0f 0x52 - invalid */
2319/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2320FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2321/* Opcode 0xf2 0x0f 0x52 - invalid */
2322
2323/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2324FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2325/* Opcode 0x66 0x0f 0x53 - invalid */
2326/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2327FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2328/* Opcode 0xf2 0x0f 0x53 - invalid */
2329
2330/** Opcode 0x0f 0x54 - andps Vps, Wps */
2331FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2332/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2333FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2334/* Opcode 0xf3 0x0f 0x54 - invalid */
2335/* Opcode 0xf2 0x0f 0x54 - invalid */
2336
2337/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2338FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2339/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2340FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2341/* Opcode 0xf3 0x0f 0x55 - invalid */
2342/* Opcode 0xf2 0x0f 0x55 - invalid */
2343
2344/** Opcode 0x0f 0x56 - orps Vps, Wps */
2345FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2346/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2347FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2348/* Opcode 0xf3 0x0f 0x56 - invalid */
2349/* Opcode 0xf2 0x0f 0x56 - invalid */
2350
2351/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2352FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2353/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2354FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2355/* Opcode 0xf3 0x0f 0x57 - invalid */
2356/* Opcode 0xf2 0x0f 0x57 - invalid */
2357
2358/** Opcode 0x0f 0x58 - addps Vps, Wps */
2359FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2360/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2361FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2362/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2363FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2364/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2365FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2366
2367/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2368FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2369/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2370FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2371/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2372FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2373/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2374FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2375
2376/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2377FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2378/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2379FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2380/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2381FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2382/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2383FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2384
2385/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2386FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2387/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2388FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2389/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2390FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2391/* Opcode 0xf2 0x0f 0x5b - invalid */
2392
2393/** Opcode 0x0f 0x5c - subps Vps, Wps */
2394FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2395/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2396FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2397/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2398FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2399/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2400FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2401
2402/** Opcode 0x0f 0x5d - minps Vps, Wps */
2403FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2404/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2405FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2406/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2407FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2408/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2409FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2410
2411/** Opcode 0x0f 0x5e - divps Vps, Wps */
2412FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2413/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2414FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2415/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2416FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2417/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2418FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2419
2420/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2421FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2422/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2423FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2424/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2425FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2426/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2427FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2428
2429/**
2430 * Common worker for MMX instructions on the forms:
2431 * pxxxx mm1, mm2/mem32
2432 *
2433 * The 2nd operand is the first half of a register, which in the memory case
2434 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2435 * memory accessed for MMX.
2436 *
2437 * Exceptions type 4.
2438 */
2439FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2440{
2441 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2442 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2443 {
2444 /*
2445 * Register, register.
2446 */
2447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2448 IEM_MC_BEGIN(2, 0);
2449 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2450 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2451 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2452 IEM_MC_PREPARE_SSE_USAGE();
2453 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2454 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2455 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2456 IEM_MC_ADVANCE_RIP();
2457 IEM_MC_END();
2458 }
2459 else
2460 {
2461 /*
2462 * Register, memory.
2463 */
2464 IEM_MC_BEGIN(2, 2);
2465 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2466 IEM_MC_LOCAL(uint64_t, uSrc);
2467 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2469
2470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2472 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2473 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2474
2475 IEM_MC_PREPARE_SSE_USAGE();
2476 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2477 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2478
2479 IEM_MC_ADVANCE_RIP();
2480 IEM_MC_END();
2481 }
2482 return VINF_SUCCESS;
2483}
2484
2485
2486/**
2487 * Common worker for SSE2 instructions on the forms:
2488 * pxxxx xmm1, xmm2/mem128
2489 *
2490 * The 2nd operand is the first half of a register, which in the memory case
2491 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2492 * memory accessed for MMX.
2493 *
2494 * Exceptions type 4.
2495 */
2496FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2497{
2498 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2499 if (!pImpl->pfnU64)
2500 return IEMOP_RAISE_INVALID_OPCODE();
2501 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2502 {
2503 /*
2504 * Register, register.
2505 */
2506 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2507 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2509 IEM_MC_BEGIN(2, 0);
2510 IEM_MC_ARG(uint64_t *, pDst, 0);
2511 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2512 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2513 IEM_MC_PREPARE_FPU_USAGE();
2514 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2515 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2516 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2517 IEM_MC_ADVANCE_RIP();
2518 IEM_MC_END();
2519 }
2520 else
2521 {
2522 /*
2523 * Register, memory.
2524 */
2525 IEM_MC_BEGIN(2, 2);
2526 IEM_MC_ARG(uint64_t *, pDst, 0);
2527 IEM_MC_LOCAL(uint32_t, uSrc);
2528 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2530
2531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2533 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2534 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2535
2536 IEM_MC_PREPARE_FPU_USAGE();
2537 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2538 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2539
2540 IEM_MC_ADVANCE_RIP();
2541 IEM_MC_END();
2542 }
2543 return VINF_SUCCESS;
2544}
2545
2546
2547/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2548FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2549{
2550 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2551 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2552}
2553
2554/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
2555FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
2556{
2557 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
2558 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2559}
2560
2561/* Opcode 0xf3 0x0f 0x60 - invalid */
2562
2563
2564/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2565FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2566{
2567 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2568 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2569}
2570
2571/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
2572FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
2573{
2574 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
2575 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2576}
2577
2578/* Opcode 0xf3 0x0f 0x61 - invalid */
2579
2580
2581/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2582FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2583{
2584 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2585 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2586}
2587
2588/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
2589FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
2590{
2591 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
2592 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2593}
2594
2595/* Opcode 0xf3 0x0f 0x62 - invalid */
2596
2597
2598
2599/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2600FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2601/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
2602FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
2603/* Opcode 0xf3 0x0f 0x63 - invalid */
2604
2605/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2606FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2607/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
2608FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
2609/* Opcode 0xf3 0x0f 0x64 - invalid */
2610
2611/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2612FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2613/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
2614FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
2615/* Opcode 0xf3 0x0f 0x65 - invalid */
2616
2617/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2618FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2619/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
2620FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
2621/* Opcode 0xf3 0x0f 0x66 - invalid */
2622
2623/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2624FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2625/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
2626FNIEMOP_STUB(iemOp_packuswb_Vx_W);
2627/* Opcode 0xf3 0x0f 0x67 - invalid */
2628
2629
2630/**
2631 * Common worker for MMX instructions on the form:
2632 * pxxxx mm1, mm2/mem64
2633 *
2634 * The 2nd operand is the second half of a register, which in the memory case
2635 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2636 * where it may read the full 128 bits or only the upper 64 bits.
2637 *
2638 * Exceptions type 4.
2639 */
2640FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2641{
2642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2643 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2644 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2645 {
2646 /*
2647 * Register, register.
2648 */
2649 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2650 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2652 IEM_MC_BEGIN(2, 0);
2653 IEM_MC_ARG(uint64_t *, pDst, 0);
2654 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2655 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2656 IEM_MC_PREPARE_FPU_USAGE();
2657 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2658 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2659 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2660 IEM_MC_ADVANCE_RIP();
2661 IEM_MC_END();
2662 }
2663 else
2664 {
2665 /*
2666 * Register, memory.
2667 */
2668 IEM_MC_BEGIN(2, 2);
2669 IEM_MC_ARG(uint64_t *, pDst, 0);
2670 IEM_MC_LOCAL(uint64_t, uSrc);
2671 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2673
2674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2676 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2677 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2678
2679 IEM_MC_PREPARE_FPU_USAGE();
2680 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2681 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2682
2683 IEM_MC_ADVANCE_RIP();
2684 IEM_MC_END();
2685 }
2686 return VINF_SUCCESS;
2687}
2688
2689
2690/**
2691 * Common worker for SSE2 instructions on the form:
2692 * pxxxx xmm1, xmm2/mem128
2693 *
2694 * The 2nd operand is the second half of a register, which in the memory case
2695 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2696 * where it may read the full 128 bits or only the upper 64 bits.
2697 *
2698 * Exceptions type 4.
2699 */
2700FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2701{
2702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2703 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2704 {
2705 /*
2706 * Register, register.
2707 */
2708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2709 IEM_MC_BEGIN(2, 0);
2710 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2711 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2712 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2713 IEM_MC_PREPARE_SSE_USAGE();
2714 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2715 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2716 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2717 IEM_MC_ADVANCE_RIP();
2718 IEM_MC_END();
2719 }
2720 else
2721 {
2722 /*
2723 * Register, memory.
2724 */
2725 IEM_MC_BEGIN(2, 2);
2726 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2727 IEM_MC_LOCAL(RTUINT128U, uSrc);
2728 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2730
2731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2733 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2734 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2735
2736 IEM_MC_PREPARE_SSE_USAGE();
2737 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2738 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2739
2740 IEM_MC_ADVANCE_RIP();
2741 IEM_MC_END();
2742 }
2743 return VINF_SUCCESS;
2744}
2745
2746
2747/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2748FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2749{
2750 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2751 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2752}
2753
2754/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
2755FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
2756{
2757 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
2758 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2759}
2760/* Opcode 0xf3 0x0f 0x68 - invalid */
2761
2762
2763/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2764FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2765{
2766 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2767 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2768}
2769
2770/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
2771FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
2772{
2773 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
2774 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2775
2776}
2777/* Opcode 0xf3 0x0f 0x69 - invalid */
2778
2779
2780/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2781FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2782{
2783 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2784 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2785}
2786
2787/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
2788FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
2789{
2790 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
2791 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2792}
2793/* Opcode 0xf3 0x0f 0x6a - invalid */
2794
2795
2796/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2797FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2798/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
2799FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
2800/* Opcode 0xf3 0x0f 0x6b - invalid */
2801
2802
2803/* Opcode 0x0f 0x6c - invalid */
2804
2805/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
2806FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
2807{
2808 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
2809 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2810}
2811
2812/* Opcode 0xf3 0x0f 0x6c - invalid */
2813/* Opcode 0xf2 0x0f 0x6c - invalid */
2814
2815
2816/* Opcode 0x0f 0x6d - invalid */
2817
2818/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
2819FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
2820{
2821 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
2822 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2823}
2824
2825/* Opcode 0xf3 0x0f 0x6d - invalid */
2826
2827
2828/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
2829FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
2830{
2831 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2832 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2833 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2834 else
2835 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2836 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2837 {
2838 /* MMX, greg */
2839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2840 IEM_MC_BEGIN(0, 1);
2841 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2842 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2843 IEM_MC_LOCAL(uint64_t, u64Tmp);
2844 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2845 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2846 else
2847 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2848 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2849 IEM_MC_ADVANCE_RIP();
2850 IEM_MC_END();
2851 }
2852 else
2853 {
2854 /* MMX, [mem] */
2855 IEM_MC_BEGIN(0, 2);
2856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2857 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2860 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2861 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2862 {
2863 IEM_MC_LOCAL(uint64_t, u64Tmp);
2864 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2865 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2866 }
2867 else
2868 {
2869 IEM_MC_LOCAL(uint32_t, u32Tmp);
2870 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2871 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2872 }
2873 IEM_MC_ADVANCE_RIP();
2874 IEM_MC_END();
2875 }
2876 return VINF_SUCCESS;
2877}
2878
2879/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
2880FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
2881{
2882 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2883 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2884 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
2885 else
2886 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
2887 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2888 {
2889 /* XMM, greg*/
2890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2891 IEM_MC_BEGIN(0, 1);
2892 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2893 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2894 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2895 {
2896 IEM_MC_LOCAL(uint64_t, u64Tmp);
2897 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2898 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2899 }
2900 else
2901 {
2902 IEM_MC_LOCAL(uint32_t, u32Tmp);
2903 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2904 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2905 }
2906 IEM_MC_ADVANCE_RIP();
2907 IEM_MC_END();
2908 }
2909 else
2910 {
2911 /* XMM, [mem] */
2912 IEM_MC_BEGIN(0, 2);
2913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2914 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2917 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2918 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2919 {
2920 IEM_MC_LOCAL(uint64_t, u64Tmp);
2921 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2922 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2923 }
2924 else
2925 {
2926 IEM_MC_LOCAL(uint32_t, u32Tmp);
2927 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2928 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2929 }
2930 IEM_MC_ADVANCE_RIP();
2931 IEM_MC_END();
2932 }
2933 return VINF_SUCCESS;
2934}
2935
2936/* Opcode 0xf3 0x0f 0x6e - invalid */
2937
2938
2939/** Opcode 0x0f 0x6f - movq Pq, Qq */
2940FNIEMOP_DEF(iemOp_movq_Pq_Qq)
2941{
2942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2943 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2944 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2945 {
2946 /*
2947 * Register, register.
2948 */
2949 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2950 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2952 IEM_MC_BEGIN(0, 1);
2953 IEM_MC_LOCAL(uint64_t, u64Tmp);
2954 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2955 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2956 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2957 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2958 IEM_MC_ADVANCE_RIP();
2959 IEM_MC_END();
2960 }
2961 else
2962 {
2963 /*
2964 * Register, memory.
2965 */
2966 IEM_MC_BEGIN(0, 2);
2967 IEM_MC_LOCAL(uint64_t, u64Tmp);
2968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2969
2970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2972 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2973 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2974 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2975 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2976
2977 IEM_MC_ADVANCE_RIP();
2978 IEM_MC_END();
2979 }
2980 return VINF_SUCCESS;
2981}
2982
2983/** Opcode 0x66 0x0f 0x6f - movdqa Vx, Wx */
2984FNIEMOP_DEF(iemOp_movdqa_Vx_Wx)
2985{
2986 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2987 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
2988 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2989 {
2990 /*
2991 * Register, register.
2992 */
2993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2994 IEM_MC_BEGIN(0, 0);
2995 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2996 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2997 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2998 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2999 IEM_MC_ADVANCE_RIP();
3000 IEM_MC_END();
3001 }
3002 else
3003 {
3004 /*
3005 * Register, memory.
3006 */
3007 IEM_MC_BEGIN(0, 2);
3008 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3010
3011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3013 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3014 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3015 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3016 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3017
3018 IEM_MC_ADVANCE_RIP();
3019 IEM_MC_END();
3020 }
3021 return VINF_SUCCESS;
3022}
3023
3024/** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */
3025FNIEMOP_DEF(iemOp_movdqu_Vx_Wx)
3026{
3027 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3028 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3029 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3030 {
3031 /*
3032 * Register, register.
3033 */
3034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3035 IEM_MC_BEGIN(0, 0);
3036 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3037 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3038 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3039 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3040 IEM_MC_ADVANCE_RIP();
3041 IEM_MC_END();
3042 }
3043 else
3044 {
3045 /*
3046 * Register, memory.
3047 */
3048 IEM_MC_BEGIN(0, 2);
3049 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3051
3052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3054 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3055 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3056 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3057 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3058
3059 IEM_MC_ADVANCE_RIP();
3060 IEM_MC_END();
3061 }
3062 return VINF_SUCCESS;
3063}
3064
3065
3066/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3067FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3068{
3069 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3071 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3072 {
3073 /*
3074 * Register, register.
3075 */
3076 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3078
3079 IEM_MC_BEGIN(3, 0);
3080 IEM_MC_ARG(uint64_t *, pDst, 0);
3081 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3082 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3083 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3084 IEM_MC_PREPARE_FPU_USAGE();
3085 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3086 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3087 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3088 IEM_MC_ADVANCE_RIP();
3089 IEM_MC_END();
3090 }
3091 else
3092 {
3093 /*
3094 * Register, memory.
3095 */
3096 IEM_MC_BEGIN(3, 2);
3097 IEM_MC_ARG(uint64_t *, pDst, 0);
3098 IEM_MC_LOCAL(uint64_t, uSrc);
3099 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3101
3102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3103 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3104 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3106 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3107
3108 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3109 IEM_MC_PREPARE_FPU_USAGE();
3110 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3111 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3112
3113 IEM_MC_ADVANCE_RIP();
3114 IEM_MC_END();
3115 }
3116 return VINF_SUCCESS;
3117}
3118
3119/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3120FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3121{
3122 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3123 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3124 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3125 {
3126 /*
3127 * Register, register.
3128 */
3129 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3131
3132 IEM_MC_BEGIN(3, 0);
3133 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3134 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3135 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3136 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3137 IEM_MC_PREPARE_SSE_USAGE();
3138 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3139 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3140 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3141 IEM_MC_ADVANCE_RIP();
3142 IEM_MC_END();
3143 }
3144 else
3145 {
3146 /*
3147 * Register, memory.
3148 */
3149 IEM_MC_BEGIN(3, 2);
3150 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3151 IEM_MC_LOCAL(RTUINT128U, uSrc);
3152 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3154
3155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3156 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3157 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3159 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3160
3161 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3162 IEM_MC_PREPARE_SSE_USAGE();
3163 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3164 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3165
3166 IEM_MC_ADVANCE_RIP();
3167 IEM_MC_END();
3168 }
3169 return VINF_SUCCESS;
3170}
3171
3172/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3173FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3174{
3175 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3177 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3178 {
3179 /*
3180 * Register, register.
3181 */
3182 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3184
3185 IEM_MC_BEGIN(3, 0);
3186 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3187 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3188 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3189 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3190 IEM_MC_PREPARE_SSE_USAGE();
3191 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3192 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3193 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3194 IEM_MC_ADVANCE_RIP();
3195 IEM_MC_END();
3196 }
3197 else
3198 {
3199 /*
3200 * Register, memory.
3201 */
3202 IEM_MC_BEGIN(3, 2);
3203 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3204 IEM_MC_LOCAL(RTUINT128U, uSrc);
3205 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3207
3208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3209 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3210 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3212 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3213
3214 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3215 IEM_MC_PREPARE_SSE_USAGE();
3216 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3217 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3218
3219 IEM_MC_ADVANCE_RIP();
3220 IEM_MC_END();
3221 }
3222 return VINF_SUCCESS;
3223}
3224
3225/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3226FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3227{
3228 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3230 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3231 {
3232 /*
3233 * Register, register.
3234 */
3235 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3237
3238 IEM_MC_BEGIN(3, 0);
3239 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3240 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3241 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3242 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3243 IEM_MC_PREPARE_SSE_USAGE();
3244 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3245 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3246 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3247 IEM_MC_ADVANCE_RIP();
3248 IEM_MC_END();
3249 }
3250 else
3251 {
3252 /*
3253 * Register, memory.
3254 */
3255 IEM_MC_BEGIN(3, 2);
3256 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3257 IEM_MC_LOCAL(RTUINT128U, uSrc);
3258 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3260
3261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3262 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3263 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3265 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3266
3267 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3268 IEM_MC_PREPARE_SSE_USAGE();
3269 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3270 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3271
3272 IEM_MC_ADVANCE_RIP();
3273 IEM_MC_END();
3274 }
3275 return VINF_SUCCESS;
3276}
3277
3278
3279/** Opcode 0x0f 0x71 11/2. */
3280FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3281
3282/** Opcode 0x66 0x0f 0x71 11/2. */
3283FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3284
3285/** Opcode 0x0f 0x71 11/4. */
3286FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3287
3288/** Opcode 0x66 0x0f 0x71 11/4. */
3289FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3290
3291/** Opcode 0x0f 0x71 11/6. */
3292FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3293
3294/** Opcode 0x66 0x0f 0x71 11/6. */
3295FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3296
3297
3298/**
3299 * Group 12 jump table for register variant.
3300 */
3301IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3302{
3303 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3304 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3305 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3306 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3307 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3308 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3309 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3310 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3311};
3312AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3313
3314
3315/** Opcode 0x0f 0x71. */
3316FNIEMOP_DEF(iemOp_Grp12)
3317{
3318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3319 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3320 /* register, register */
3321 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3322 + pVCpu->iem.s.idxPrefix], bRm);
3323 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3324}
3325
3326
3327/** Opcode 0x0f 0x72 11/2. */
3328FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3329
3330/** Opcode 0x66 0x0f 0x72 11/2. */
3331FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
3332
3333/** Opcode 0x0f 0x72 11/4. */
3334FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3335
3336/** Opcode 0x66 0x0f 0x72 11/4. */
3337FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
3338
3339/** Opcode 0x0f 0x72 11/6. */
3340FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3341
3342/** Opcode 0x66 0x0f 0x72 11/6. */
3343FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
3344
3345
3346/**
3347 * Group 13 jump table for register variant.
3348 */
3349IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3350{
3351 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3352 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3353 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3354 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3355 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3356 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3357 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3358 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3359};
3360AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3361
3362/** Opcode 0x0f 0x72. */
3363FNIEMOP_DEF(iemOp_Grp13)
3364{
3365 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3366 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3367 /* register, register */
3368 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3369 + pVCpu->iem.s.idxPrefix], bRm);
3370 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3371}
3372
3373
3374/** Opcode 0x0f 0x73 11/2. */
3375FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3376
3377/** Opcode 0x66 0x0f 0x73 11/2. */
3378FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
3379
3380/** Opcode 0x66 0x0f 0x73 11/3. */
3381FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
3382
3383/** Opcode 0x0f 0x73 11/6. */
3384FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3385
3386/** Opcode 0x66 0x0f 0x73 11/6. */
3387FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
3388
3389/** Opcode 0x66 0x0f 0x73 11/7. */
3390FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
3391
3392/**
3393 * Group 14 jump table for register variant.
3394 */
3395IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
3396{
3397 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3398 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3399 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3400 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3401 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3402 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3403 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3404 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3405};
3406AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
3407
3408
3409/** Opcode 0x0f 0x73. */
3410FNIEMOP_DEF(iemOp_Grp14)
3411{
3412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3413 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3414 /* register, register */
3415 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3416 + pVCpu->iem.s.idxPrefix], bRm);
3417 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3418}
3419
3420
3421/**
3422 * Common worker for MMX instructions on the form:
3423 * pxxx mm1, mm2/mem64
3424 */
3425FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3426{
3427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3428 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3429 {
3430 /*
3431 * Register, register.
3432 */
3433 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3434 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3436 IEM_MC_BEGIN(2, 0);
3437 IEM_MC_ARG(uint64_t *, pDst, 0);
3438 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3439 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3440 IEM_MC_PREPARE_FPU_USAGE();
3441 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3442 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3443 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3444 IEM_MC_ADVANCE_RIP();
3445 IEM_MC_END();
3446 }
3447 else
3448 {
3449 /*
3450 * Register, memory.
3451 */
3452 IEM_MC_BEGIN(2, 2);
3453 IEM_MC_ARG(uint64_t *, pDst, 0);
3454 IEM_MC_LOCAL(uint64_t, uSrc);
3455 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3457
3458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3460 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3461 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3462
3463 IEM_MC_PREPARE_FPU_USAGE();
3464 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3465 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3466
3467 IEM_MC_ADVANCE_RIP();
3468 IEM_MC_END();
3469 }
3470 return VINF_SUCCESS;
3471}
3472
3473
3474/**
3475 * Common worker for SSE2 instructions on the forms:
3476 * pxxx xmm1, xmm2/mem128
3477 *
3478 * Proper alignment of the 128-bit operand is enforced.
3479 * Exceptions type 4. SSE2 cpuid checks.
3480 */
3481FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3482{
3483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3484 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3485 {
3486 /*
3487 * Register, register.
3488 */
3489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3490 IEM_MC_BEGIN(2, 0);
3491 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3492 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3493 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3494 IEM_MC_PREPARE_SSE_USAGE();
3495 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3496 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3497 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3498 IEM_MC_ADVANCE_RIP();
3499 IEM_MC_END();
3500 }
3501 else
3502 {
3503 /*
3504 * Register, memory.
3505 */
3506 IEM_MC_BEGIN(2, 2);
3507 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3508 IEM_MC_LOCAL(RTUINT128U, uSrc);
3509 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3511
3512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3514 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3515 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3516
3517 IEM_MC_PREPARE_SSE_USAGE();
3518 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3519 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3520
3521 IEM_MC_ADVANCE_RIP();
3522 IEM_MC_END();
3523 }
3524 return VINF_SUCCESS;
3525}
3526
3527
3528/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3529FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3530{
3531 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3532 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3533}
3534
3535/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
3536FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
3537{
3538 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
3539 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3540}
3541
3542/* Opcode 0xf3 0x0f 0x74 - invalid */
3543/* Opcode 0xf2 0x0f 0x74 - invalid */
3544
3545
3546/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3547FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3548{
3549 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3550 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3551}
3552
3553/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
3554FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
3555{
3556 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
3557 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3558}
3559
3560/* Opcode 0xf3 0x0f 0x75 - invalid */
3561/* Opcode 0xf2 0x0f 0x75 - invalid */
3562
3563
3564/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3565FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3566{
3567 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3568 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3569}
3570
3571/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
3572FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
3573{
3574 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
3575 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3576}
3577
3578/* Opcode 0xf3 0x0f 0x76 - invalid */
3579/* Opcode 0xf2 0x0f 0x76 - invalid */
3580
3581
3582/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
3583FNIEMOP_STUB(iemOp_emms);
3584/* Opcode 0x66 0x0f 0x77 - invalid */
3585/* Opcode 0xf3 0x0f 0x77 - invalid */
3586/* Opcode 0xf2 0x0f 0x77 - invalid */
3587
3588/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3589FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3590/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3591FNIEMOP_STUB(iemOp_AmdGrp17);
3592/* Opcode 0xf3 0x0f 0x78 - invalid */
3593/* Opcode 0xf2 0x0f 0x78 - invalid */
3594
3595/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3596FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3597/* Opcode 0x66 0x0f 0x79 - invalid */
3598/* Opcode 0xf3 0x0f 0x79 - invalid */
3599/* Opcode 0xf2 0x0f 0x79 - invalid */
3600
3601/* Opcode 0x0f 0x7a - invalid */
3602/* Opcode 0x66 0x0f 0x7a - invalid */
3603/* Opcode 0xf3 0x0f 0x7a - invalid */
3604/* Opcode 0xf2 0x0f 0x7a - invalid */
3605
3606/* Opcode 0x0f 0x7b - invalid */
3607/* Opcode 0x66 0x0f 0x7b - invalid */
3608/* Opcode 0xf3 0x0f 0x7b - invalid */
3609/* Opcode 0xf2 0x0f 0x7b - invalid */
3610
3611/* Opcode 0x0f 0x7c - invalid */
3612/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
3613FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
3614/* Opcode 0xf3 0x0f 0x7c - invalid */
3615/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
3616FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
3617
3618/* Opcode 0x0f 0x7d - invalid */
3619/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
3620FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
3621/* Opcode 0xf3 0x0f 0x7d - invalid */
3622/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
3623FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
3624
3625
3626/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3627FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3628{
3629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3630 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3631 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3632 else
3633 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3634 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3635 {
3636 /* greg, MMX */
3637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3638 IEM_MC_BEGIN(0, 1);
3639 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3640 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3641 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3642 {
3643 IEM_MC_LOCAL(uint64_t, u64Tmp);
3644 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3645 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3646 }
3647 else
3648 {
3649 IEM_MC_LOCAL(uint32_t, u32Tmp);
3650 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3651 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3652 }
3653 IEM_MC_ADVANCE_RIP();
3654 IEM_MC_END();
3655 }
3656 else
3657 {
3658 /* [mem], MMX */
3659 IEM_MC_BEGIN(0, 2);
3660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3661 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3664 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3665 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3666 {
3667 IEM_MC_LOCAL(uint64_t, u64Tmp);
3668 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3669 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3670 }
3671 else
3672 {
3673 IEM_MC_LOCAL(uint32_t, u32Tmp);
3674 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3675 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3676 }
3677 IEM_MC_ADVANCE_RIP();
3678 IEM_MC_END();
3679 }
3680 return VINF_SUCCESS;
3681}
3682
3683/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
3684FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
3685{
3686 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3687 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3688 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3689 else
3690 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3691 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3692 {
3693 /* greg, XMM */
3694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3695 IEM_MC_BEGIN(0, 1);
3696 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3697 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3698 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3699 {
3700 IEM_MC_LOCAL(uint64_t, u64Tmp);
3701 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3702 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3703 }
3704 else
3705 {
3706 IEM_MC_LOCAL(uint32_t, u32Tmp);
3707 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3708 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3709 }
3710 IEM_MC_ADVANCE_RIP();
3711 IEM_MC_END();
3712 }
3713 else
3714 {
3715 /* [mem], XMM */
3716 IEM_MC_BEGIN(0, 2);
3717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3718 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3721 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3722 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3723 {
3724 IEM_MC_LOCAL(uint64_t, u64Tmp);
3725 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3726 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3727 }
3728 else
3729 {
3730 IEM_MC_LOCAL(uint32_t, u32Tmp);
3731 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3732 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3733 }
3734 IEM_MC_ADVANCE_RIP();
3735 IEM_MC_END();
3736 }
3737 return VINF_SUCCESS;
3738}
3739
3740/** Opcode 0xf3 0x0f 0x7e - movq Vq, Wq */
3741FNIEMOP_STUB(iemOp_movq_Vq_Wq);
3742/* Opcode 0xf2 0x0f 0x7e - invalid */
3743
3744
3745/** Opcode 0x0f 0x7f - movq Qq, Pq */
3746FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3747{
3748 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3750 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3751 {
3752 /*
3753 * Register, register.
3754 */
3755 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3756 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3758 IEM_MC_BEGIN(0, 1);
3759 IEM_MC_LOCAL(uint64_t, u64Tmp);
3760 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3761 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3762 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3763 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3764 IEM_MC_ADVANCE_RIP();
3765 IEM_MC_END();
3766 }
3767 else
3768 {
3769 /*
3770 * Register, memory.
3771 */
3772 IEM_MC_BEGIN(0, 2);
3773 IEM_MC_LOCAL(uint64_t, u64Tmp);
3774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3775
3776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3778 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3779 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3780
3781 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3782 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3783
3784 IEM_MC_ADVANCE_RIP();
3785 IEM_MC_END();
3786 }
3787 return VINF_SUCCESS;
3788}
3789
3790/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
3791FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
3792{
3793 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
3794 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3795 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3796 {
3797 /*
3798 * Register, register.
3799 */
3800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3801 IEM_MC_BEGIN(0, 0);
3802 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3803 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3804 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3805 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3806 IEM_MC_ADVANCE_RIP();
3807 IEM_MC_END();
3808 }
3809 else
3810 {
3811 /*
3812 * Register, memory.
3813 */
3814 IEM_MC_BEGIN(0, 2);
3815 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3817
3818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3820 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3821 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3822
3823 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3824 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3825
3826 IEM_MC_ADVANCE_RIP();
3827 IEM_MC_END();
3828 }
3829 return VINF_SUCCESS;
3830}
3831
3832/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
3833FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
3834{
3835 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3836 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
3837 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3838 {
3839 /*
3840 * Register, register.
3841 */
3842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3843 IEM_MC_BEGIN(0, 0);
3844 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3845 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3846 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3847 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3848 IEM_MC_ADVANCE_RIP();
3849 IEM_MC_END();
3850 }
3851 else
3852 {
3853 /*
3854 * Register, memory.
3855 */
3856 IEM_MC_BEGIN(0, 2);
3857 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3859
3860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3862 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3863 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3864
3865 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3866 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3867
3868 IEM_MC_ADVANCE_RIP();
3869 IEM_MC_END();
3870 }
3871 return VINF_SUCCESS;
3872}
3873
3874/* Opcode 0xf2 0x0f 0x7f - invalid */
3875
3876
3877
3878/** Opcode 0x0f 0x80. */
3879FNIEMOP_DEF(iemOp_jo_Jv)
3880{
3881 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3882 IEMOP_HLP_MIN_386();
3883 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3884 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3885 {
3886 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3888
3889 IEM_MC_BEGIN(0, 0);
3890 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3891 IEM_MC_REL_JMP_S16(i16Imm);
3892 } IEM_MC_ELSE() {
3893 IEM_MC_ADVANCE_RIP();
3894 } IEM_MC_ENDIF();
3895 IEM_MC_END();
3896 }
3897 else
3898 {
3899 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3901
3902 IEM_MC_BEGIN(0, 0);
3903 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3904 IEM_MC_REL_JMP_S32(i32Imm);
3905 } IEM_MC_ELSE() {
3906 IEM_MC_ADVANCE_RIP();
3907 } IEM_MC_ENDIF();
3908 IEM_MC_END();
3909 }
3910 return VINF_SUCCESS;
3911}
3912
3913
3914/** Opcode 0x0f 0x81. */
3915FNIEMOP_DEF(iemOp_jno_Jv)
3916{
3917 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3918 IEMOP_HLP_MIN_386();
3919 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3920 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3921 {
3922 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3924
3925 IEM_MC_BEGIN(0, 0);
3926 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3927 IEM_MC_ADVANCE_RIP();
3928 } IEM_MC_ELSE() {
3929 IEM_MC_REL_JMP_S16(i16Imm);
3930 } IEM_MC_ENDIF();
3931 IEM_MC_END();
3932 }
3933 else
3934 {
3935 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3937
3938 IEM_MC_BEGIN(0, 0);
3939 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3940 IEM_MC_ADVANCE_RIP();
3941 } IEM_MC_ELSE() {
3942 IEM_MC_REL_JMP_S32(i32Imm);
3943 } IEM_MC_ENDIF();
3944 IEM_MC_END();
3945 }
3946 return VINF_SUCCESS;
3947}
3948
3949
3950/** Opcode 0x0f 0x82. */
3951FNIEMOP_DEF(iemOp_jc_Jv)
3952{
3953 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3954 IEMOP_HLP_MIN_386();
3955 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3956 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3957 {
3958 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3960
3961 IEM_MC_BEGIN(0, 0);
3962 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3963 IEM_MC_REL_JMP_S16(i16Imm);
3964 } IEM_MC_ELSE() {
3965 IEM_MC_ADVANCE_RIP();
3966 } IEM_MC_ENDIF();
3967 IEM_MC_END();
3968 }
3969 else
3970 {
3971 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3973
3974 IEM_MC_BEGIN(0, 0);
3975 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3976 IEM_MC_REL_JMP_S32(i32Imm);
3977 } IEM_MC_ELSE() {
3978 IEM_MC_ADVANCE_RIP();
3979 } IEM_MC_ENDIF();
3980 IEM_MC_END();
3981 }
3982 return VINF_SUCCESS;
3983}
3984
3985
3986/** Opcode 0x0f 0x83. */
3987FNIEMOP_DEF(iemOp_jnc_Jv)
3988{
3989 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3990 IEMOP_HLP_MIN_386();
3991 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3992 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3993 {
3994 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3996
3997 IEM_MC_BEGIN(0, 0);
3998 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3999 IEM_MC_ADVANCE_RIP();
4000 } IEM_MC_ELSE() {
4001 IEM_MC_REL_JMP_S16(i16Imm);
4002 } IEM_MC_ENDIF();
4003 IEM_MC_END();
4004 }
4005 else
4006 {
4007 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4009
4010 IEM_MC_BEGIN(0, 0);
4011 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4012 IEM_MC_ADVANCE_RIP();
4013 } IEM_MC_ELSE() {
4014 IEM_MC_REL_JMP_S32(i32Imm);
4015 } IEM_MC_ENDIF();
4016 IEM_MC_END();
4017 }
4018 return VINF_SUCCESS;
4019}
4020
4021
4022/** Opcode 0x0f 0x84. */
4023FNIEMOP_DEF(iemOp_je_Jv)
4024{
4025 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4026 IEMOP_HLP_MIN_386();
4027 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4028 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4029 {
4030 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4032
4033 IEM_MC_BEGIN(0, 0);
4034 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4035 IEM_MC_REL_JMP_S16(i16Imm);
4036 } IEM_MC_ELSE() {
4037 IEM_MC_ADVANCE_RIP();
4038 } IEM_MC_ENDIF();
4039 IEM_MC_END();
4040 }
4041 else
4042 {
4043 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4045
4046 IEM_MC_BEGIN(0, 0);
4047 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4048 IEM_MC_REL_JMP_S32(i32Imm);
4049 } IEM_MC_ELSE() {
4050 IEM_MC_ADVANCE_RIP();
4051 } IEM_MC_ENDIF();
4052 IEM_MC_END();
4053 }
4054 return VINF_SUCCESS;
4055}
4056
4057
4058/** Opcode 0x0f 0x85. */
4059FNIEMOP_DEF(iemOp_jne_Jv)
4060{
4061 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4062 IEMOP_HLP_MIN_386();
4063 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4064 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4065 {
4066 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4068
4069 IEM_MC_BEGIN(0, 0);
4070 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4071 IEM_MC_ADVANCE_RIP();
4072 } IEM_MC_ELSE() {
4073 IEM_MC_REL_JMP_S16(i16Imm);
4074 } IEM_MC_ENDIF();
4075 IEM_MC_END();
4076 }
4077 else
4078 {
4079 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4081
4082 IEM_MC_BEGIN(0, 0);
4083 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4084 IEM_MC_ADVANCE_RIP();
4085 } IEM_MC_ELSE() {
4086 IEM_MC_REL_JMP_S32(i32Imm);
4087 } IEM_MC_ENDIF();
4088 IEM_MC_END();
4089 }
4090 return VINF_SUCCESS;
4091}
4092
4093
4094/** Opcode 0x0f 0x86. */
4095FNIEMOP_DEF(iemOp_jbe_Jv)
4096{
4097 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4098 IEMOP_HLP_MIN_386();
4099 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4100 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4101 {
4102 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4104
4105 IEM_MC_BEGIN(0, 0);
4106 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4107 IEM_MC_REL_JMP_S16(i16Imm);
4108 } IEM_MC_ELSE() {
4109 IEM_MC_ADVANCE_RIP();
4110 } IEM_MC_ENDIF();
4111 IEM_MC_END();
4112 }
4113 else
4114 {
4115 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4117
4118 IEM_MC_BEGIN(0, 0);
4119 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4120 IEM_MC_REL_JMP_S32(i32Imm);
4121 } IEM_MC_ELSE() {
4122 IEM_MC_ADVANCE_RIP();
4123 } IEM_MC_ENDIF();
4124 IEM_MC_END();
4125 }
4126 return VINF_SUCCESS;
4127}
4128
4129
4130/** Opcode 0x0f 0x87. */
4131FNIEMOP_DEF(iemOp_jnbe_Jv)
4132{
4133 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4134 IEMOP_HLP_MIN_386();
4135 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4136 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4137 {
4138 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4140
4141 IEM_MC_BEGIN(0, 0);
4142 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4143 IEM_MC_ADVANCE_RIP();
4144 } IEM_MC_ELSE() {
4145 IEM_MC_REL_JMP_S16(i16Imm);
4146 } IEM_MC_ENDIF();
4147 IEM_MC_END();
4148 }
4149 else
4150 {
4151 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4153
4154 IEM_MC_BEGIN(0, 0);
4155 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4156 IEM_MC_ADVANCE_RIP();
4157 } IEM_MC_ELSE() {
4158 IEM_MC_REL_JMP_S32(i32Imm);
4159 } IEM_MC_ENDIF();
4160 IEM_MC_END();
4161 }
4162 return VINF_SUCCESS;
4163}
4164
4165
4166/** Opcode 0x0f 0x88. */
4167FNIEMOP_DEF(iemOp_js_Jv)
4168{
4169 IEMOP_MNEMONIC(js_Jv, "js Jv");
4170 IEMOP_HLP_MIN_386();
4171 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4172 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4173 {
4174 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4176
4177 IEM_MC_BEGIN(0, 0);
4178 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4179 IEM_MC_REL_JMP_S16(i16Imm);
4180 } IEM_MC_ELSE() {
4181 IEM_MC_ADVANCE_RIP();
4182 } IEM_MC_ENDIF();
4183 IEM_MC_END();
4184 }
4185 else
4186 {
4187 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4189
4190 IEM_MC_BEGIN(0, 0);
4191 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4192 IEM_MC_REL_JMP_S32(i32Imm);
4193 } IEM_MC_ELSE() {
4194 IEM_MC_ADVANCE_RIP();
4195 } IEM_MC_ENDIF();
4196 IEM_MC_END();
4197 }
4198 return VINF_SUCCESS;
4199}
4200
4201
4202/** Opcode 0x0f 0x89. */
4203FNIEMOP_DEF(iemOp_jns_Jv)
4204{
4205 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4206 IEMOP_HLP_MIN_386();
4207 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4208 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4209 {
4210 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4212
4213 IEM_MC_BEGIN(0, 0);
4214 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4215 IEM_MC_ADVANCE_RIP();
4216 } IEM_MC_ELSE() {
4217 IEM_MC_REL_JMP_S16(i16Imm);
4218 } IEM_MC_ENDIF();
4219 IEM_MC_END();
4220 }
4221 else
4222 {
4223 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4225
4226 IEM_MC_BEGIN(0, 0);
4227 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4228 IEM_MC_ADVANCE_RIP();
4229 } IEM_MC_ELSE() {
4230 IEM_MC_REL_JMP_S32(i32Imm);
4231 } IEM_MC_ENDIF();
4232 IEM_MC_END();
4233 }
4234 return VINF_SUCCESS;
4235}
4236
4237
4238/** Opcode 0x0f 0x8a. */
4239FNIEMOP_DEF(iemOp_jp_Jv)
4240{
4241 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4242 IEMOP_HLP_MIN_386();
4243 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4244 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4245 {
4246 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4248
4249 IEM_MC_BEGIN(0, 0);
4250 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4251 IEM_MC_REL_JMP_S16(i16Imm);
4252 } IEM_MC_ELSE() {
4253 IEM_MC_ADVANCE_RIP();
4254 } IEM_MC_ENDIF();
4255 IEM_MC_END();
4256 }
4257 else
4258 {
4259 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4261
4262 IEM_MC_BEGIN(0, 0);
4263 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4264 IEM_MC_REL_JMP_S32(i32Imm);
4265 } IEM_MC_ELSE() {
4266 IEM_MC_ADVANCE_RIP();
4267 } IEM_MC_ENDIF();
4268 IEM_MC_END();
4269 }
4270 return VINF_SUCCESS;
4271}
4272
4273
4274/** Opcode 0x0f 0x8b. */
4275FNIEMOP_DEF(iemOp_jnp_Jv)
4276{
4277 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4278 IEMOP_HLP_MIN_386();
4279 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4280 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4281 {
4282 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4284
4285 IEM_MC_BEGIN(0, 0);
4286 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4287 IEM_MC_ADVANCE_RIP();
4288 } IEM_MC_ELSE() {
4289 IEM_MC_REL_JMP_S16(i16Imm);
4290 } IEM_MC_ENDIF();
4291 IEM_MC_END();
4292 }
4293 else
4294 {
4295 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4297
4298 IEM_MC_BEGIN(0, 0);
4299 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4300 IEM_MC_ADVANCE_RIP();
4301 } IEM_MC_ELSE() {
4302 IEM_MC_REL_JMP_S32(i32Imm);
4303 } IEM_MC_ENDIF();
4304 IEM_MC_END();
4305 }
4306 return VINF_SUCCESS;
4307}
4308
4309
4310/** Opcode 0x0f 0x8c. */
4311FNIEMOP_DEF(iemOp_jl_Jv)
4312{
4313 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4314 IEMOP_HLP_MIN_386();
4315 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4316 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4317 {
4318 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4320
4321 IEM_MC_BEGIN(0, 0);
4322 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4323 IEM_MC_REL_JMP_S16(i16Imm);
4324 } IEM_MC_ELSE() {
4325 IEM_MC_ADVANCE_RIP();
4326 } IEM_MC_ENDIF();
4327 IEM_MC_END();
4328 }
4329 else
4330 {
4331 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4333
4334 IEM_MC_BEGIN(0, 0);
4335 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4336 IEM_MC_REL_JMP_S32(i32Imm);
4337 } IEM_MC_ELSE() {
4338 IEM_MC_ADVANCE_RIP();
4339 } IEM_MC_ENDIF();
4340 IEM_MC_END();
4341 }
4342 return VINF_SUCCESS;
4343}
4344
4345
4346/** Opcode 0x0f 0x8d. */
4347FNIEMOP_DEF(iemOp_jnl_Jv)
4348{
4349 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4350 IEMOP_HLP_MIN_386();
4351 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4352 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4353 {
4354 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4356
4357 IEM_MC_BEGIN(0, 0);
4358 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4359 IEM_MC_ADVANCE_RIP();
4360 } IEM_MC_ELSE() {
4361 IEM_MC_REL_JMP_S16(i16Imm);
4362 } IEM_MC_ENDIF();
4363 IEM_MC_END();
4364 }
4365 else
4366 {
4367 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4369
4370 IEM_MC_BEGIN(0, 0);
4371 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4372 IEM_MC_ADVANCE_RIP();
4373 } IEM_MC_ELSE() {
4374 IEM_MC_REL_JMP_S32(i32Imm);
4375 } IEM_MC_ENDIF();
4376 IEM_MC_END();
4377 }
4378 return VINF_SUCCESS;
4379}
4380
4381
4382/** Opcode 0x0f 0x8e. */
4383FNIEMOP_DEF(iemOp_jle_Jv)
4384{
4385 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4386 IEMOP_HLP_MIN_386();
4387 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4388 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4389 {
4390 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4392
4393 IEM_MC_BEGIN(0, 0);
4394 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4395 IEM_MC_REL_JMP_S16(i16Imm);
4396 } IEM_MC_ELSE() {
4397 IEM_MC_ADVANCE_RIP();
4398 } IEM_MC_ENDIF();
4399 IEM_MC_END();
4400 }
4401 else
4402 {
4403 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4405
4406 IEM_MC_BEGIN(0, 0);
4407 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4408 IEM_MC_REL_JMP_S32(i32Imm);
4409 } IEM_MC_ELSE() {
4410 IEM_MC_ADVANCE_RIP();
4411 } IEM_MC_ENDIF();
4412 IEM_MC_END();
4413 }
4414 return VINF_SUCCESS;
4415}
4416
4417
4418/** Opcode 0x0f 0x8f. */
4419FNIEMOP_DEF(iemOp_jnle_Jv)
4420{
4421 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4422 IEMOP_HLP_MIN_386();
4423 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4424 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4425 {
4426 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4428
4429 IEM_MC_BEGIN(0, 0);
4430 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4431 IEM_MC_ADVANCE_RIP();
4432 } IEM_MC_ELSE() {
4433 IEM_MC_REL_JMP_S16(i16Imm);
4434 } IEM_MC_ENDIF();
4435 IEM_MC_END();
4436 }
4437 else
4438 {
4439 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4441
4442 IEM_MC_BEGIN(0, 0);
4443 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4444 IEM_MC_ADVANCE_RIP();
4445 } IEM_MC_ELSE() {
4446 IEM_MC_REL_JMP_S32(i32Imm);
4447 } IEM_MC_ENDIF();
4448 IEM_MC_END();
4449 }
4450 return VINF_SUCCESS;
4451}
4452
4453
4454/** Opcode 0x0f 0x90. */
4455FNIEMOP_DEF(iemOp_seto_Eb)
4456{
4457 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4458 IEMOP_HLP_MIN_386();
4459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4460
4461 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4462 * any way. AMD says it's "unused", whatever that means. We're
4463 * ignoring for now. */
4464 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4465 {
4466 /* register target */
4467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4468 IEM_MC_BEGIN(0, 0);
4469 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4470 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4471 } IEM_MC_ELSE() {
4472 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4473 } IEM_MC_ENDIF();
4474 IEM_MC_ADVANCE_RIP();
4475 IEM_MC_END();
4476 }
4477 else
4478 {
4479 /* memory target */
4480 IEM_MC_BEGIN(0, 1);
4481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4484 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4485 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4486 } IEM_MC_ELSE() {
4487 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4488 } IEM_MC_ENDIF();
4489 IEM_MC_ADVANCE_RIP();
4490 IEM_MC_END();
4491 }
4492 return VINF_SUCCESS;
4493}
4494
4495
4496/** Opcode 0x0f 0x91. */
4497FNIEMOP_DEF(iemOp_setno_Eb)
4498{
4499 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4500 IEMOP_HLP_MIN_386();
4501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4502
4503 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4504 * any way. AMD says it's "unused", whatever that means. We're
4505 * ignoring for now. */
4506 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4507 {
4508 /* register target */
4509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4510 IEM_MC_BEGIN(0, 0);
4511 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4512 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4513 } IEM_MC_ELSE() {
4514 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4515 } IEM_MC_ENDIF();
4516 IEM_MC_ADVANCE_RIP();
4517 IEM_MC_END();
4518 }
4519 else
4520 {
4521 /* memory target */
4522 IEM_MC_BEGIN(0, 1);
4523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4526 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4527 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4528 } IEM_MC_ELSE() {
4529 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4530 } IEM_MC_ENDIF();
4531 IEM_MC_ADVANCE_RIP();
4532 IEM_MC_END();
4533 }
4534 return VINF_SUCCESS;
4535}
4536
4537
4538/** Opcode 0x0f 0x92. */
4539FNIEMOP_DEF(iemOp_setc_Eb)
4540{
4541 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4542 IEMOP_HLP_MIN_386();
4543 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4544
4545 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4546 * any way. AMD says it's "unused", whatever that means. We're
4547 * ignoring for now. */
4548 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4549 {
4550 /* register target */
4551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4552 IEM_MC_BEGIN(0, 0);
4553 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4554 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4555 } IEM_MC_ELSE() {
4556 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4557 } IEM_MC_ENDIF();
4558 IEM_MC_ADVANCE_RIP();
4559 IEM_MC_END();
4560 }
4561 else
4562 {
4563 /* memory target */
4564 IEM_MC_BEGIN(0, 1);
4565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4568 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4569 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4570 } IEM_MC_ELSE() {
4571 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4572 } IEM_MC_ENDIF();
4573 IEM_MC_ADVANCE_RIP();
4574 IEM_MC_END();
4575 }
4576 return VINF_SUCCESS;
4577}
4578
4579
4580/** Opcode 0x0f 0x93. */
4581FNIEMOP_DEF(iemOp_setnc_Eb)
4582{
4583 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4584 IEMOP_HLP_MIN_386();
4585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4586
4587 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4588 * any way. AMD says it's "unused", whatever that means. We're
4589 * ignoring for now. */
4590 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4591 {
4592 /* register target */
4593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4594 IEM_MC_BEGIN(0, 0);
4595 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4596 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4597 } IEM_MC_ELSE() {
4598 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4599 } IEM_MC_ENDIF();
4600 IEM_MC_ADVANCE_RIP();
4601 IEM_MC_END();
4602 }
4603 else
4604 {
4605 /* memory target */
4606 IEM_MC_BEGIN(0, 1);
4607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4610 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4611 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4612 } IEM_MC_ELSE() {
4613 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4614 } IEM_MC_ENDIF();
4615 IEM_MC_ADVANCE_RIP();
4616 IEM_MC_END();
4617 }
4618 return VINF_SUCCESS;
4619}
4620
4621
4622/** Opcode 0x0f 0x94. */
4623FNIEMOP_DEF(iemOp_sete_Eb)
4624{
4625 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4626 IEMOP_HLP_MIN_386();
4627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4628
4629 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4630 * any way. AMD says it's "unused", whatever that means. We're
4631 * ignoring for now. */
4632 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4633 {
4634 /* register target */
4635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4636 IEM_MC_BEGIN(0, 0);
4637 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4638 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4639 } IEM_MC_ELSE() {
4640 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4641 } IEM_MC_ENDIF();
4642 IEM_MC_ADVANCE_RIP();
4643 IEM_MC_END();
4644 }
4645 else
4646 {
4647 /* memory target */
4648 IEM_MC_BEGIN(0, 1);
4649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4652 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4653 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4654 } IEM_MC_ELSE() {
4655 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4656 } IEM_MC_ENDIF();
4657 IEM_MC_ADVANCE_RIP();
4658 IEM_MC_END();
4659 }
4660 return VINF_SUCCESS;
4661}
4662
4663
4664/** Opcode 0x0f 0x95. */
4665FNIEMOP_DEF(iemOp_setne_Eb)
4666{
4667 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4668 IEMOP_HLP_MIN_386();
4669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4670
4671 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4672 * any way. AMD says it's "unused", whatever that means. We're
4673 * ignoring for now. */
4674 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4675 {
4676 /* register target */
4677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4678 IEM_MC_BEGIN(0, 0);
4679 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4680 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4681 } IEM_MC_ELSE() {
4682 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4683 } IEM_MC_ENDIF();
4684 IEM_MC_ADVANCE_RIP();
4685 IEM_MC_END();
4686 }
4687 else
4688 {
4689 /* memory target */
4690 IEM_MC_BEGIN(0, 1);
4691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4694 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4695 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4696 } IEM_MC_ELSE() {
4697 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4698 } IEM_MC_ENDIF();
4699 IEM_MC_ADVANCE_RIP();
4700 IEM_MC_END();
4701 }
4702 return VINF_SUCCESS;
4703}
4704
4705
4706/** Opcode 0x0f 0x96. */
4707FNIEMOP_DEF(iemOp_setbe_Eb)
4708{
4709 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4710 IEMOP_HLP_MIN_386();
4711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4712
4713 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4714 * any way. AMD says it's "unused", whatever that means. We're
4715 * ignoring for now. */
4716 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4717 {
4718 /* register target */
4719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4720 IEM_MC_BEGIN(0, 0);
4721 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4722 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4723 } IEM_MC_ELSE() {
4724 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4725 } IEM_MC_ENDIF();
4726 IEM_MC_ADVANCE_RIP();
4727 IEM_MC_END();
4728 }
4729 else
4730 {
4731 /* memory target */
4732 IEM_MC_BEGIN(0, 1);
4733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4736 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4737 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4738 } IEM_MC_ELSE() {
4739 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4740 } IEM_MC_ENDIF();
4741 IEM_MC_ADVANCE_RIP();
4742 IEM_MC_END();
4743 }
4744 return VINF_SUCCESS;
4745}
4746
4747
4748/** Opcode 0x0f 0x97. */
4749FNIEMOP_DEF(iemOp_setnbe_Eb)
4750{
4751 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4752 IEMOP_HLP_MIN_386();
4753 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4754
4755 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4756 * any way. AMD says it's "unused", whatever that means. We're
4757 * ignoring for now. */
4758 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4759 {
4760 /* register target */
4761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4762 IEM_MC_BEGIN(0, 0);
4763 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4764 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4765 } IEM_MC_ELSE() {
4766 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4767 } IEM_MC_ENDIF();
4768 IEM_MC_ADVANCE_RIP();
4769 IEM_MC_END();
4770 }
4771 else
4772 {
4773 /* memory target */
4774 IEM_MC_BEGIN(0, 1);
4775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4778 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4779 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4780 } IEM_MC_ELSE() {
4781 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4782 } IEM_MC_ENDIF();
4783 IEM_MC_ADVANCE_RIP();
4784 IEM_MC_END();
4785 }
4786 return VINF_SUCCESS;
4787}
4788
4789
4790/** Opcode 0x0f 0x98. */
4791FNIEMOP_DEF(iemOp_sets_Eb)
4792{
4793 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4794 IEMOP_HLP_MIN_386();
4795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4796
4797 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4798 * any way. AMD says it's "unused", whatever that means. We're
4799 * ignoring for now. */
4800 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4801 {
4802 /* register target */
4803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4804 IEM_MC_BEGIN(0, 0);
4805 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4806 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4807 } IEM_MC_ELSE() {
4808 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4809 } IEM_MC_ENDIF();
4810 IEM_MC_ADVANCE_RIP();
4811 IEM_MC_END();
4812 }
4813 else
4814 {
4815 /* memory target */
4816 IEM_MC_BEGIN(0, 1);
4817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4820 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4821 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4822 } IEM_MC_ELSE() {
4823 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4824 } IEM_MC_ENDIF();
4825 IEM_MC_ADVANCE_RIP();
4826 IEM_MC_END();
4827 }
4828 return VINF_SUCCESS;
4829}
4830
4831
4832/** Opcode 0x0f 0x99. */
4833FNIEMOP_DEF(iemOp_setns_Eb)
4834{
4835 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4836 IEMOP_HLP_MIN_386();
4837 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4838
4839 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4840 * any way. AMD says it's "unused", whatever that means. We're
4841 * ignoring for now. */
4842 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4843 {
4844 /* register target */
4845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4846 IEM_MC_BEGIN(0, 0);
4847 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4848 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4849 } IEM_MC_ELSE() {
4850 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4851 } IEM_MC_ENDIF();
4852 IEM_MC_ADVANCE_RIP();
4853 IEM_MC_END();
4854 }
4855 else
4856 {
4857 /* memory target */
4858 IEM_MC_BEGIN(0, 1);
4859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4862 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4863 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4864 } IEM_MC_ELSE() {
4865 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4866 } IEM_MC_ENDIF();
4867 IEM_MC_ADVANCE_RIP();
4868 IEM_MC_END();
4869 }
4870 return VINF_SUCCESS;
4871}
4872
4873
4874/** Opcode 0x0f 0x9a. */
4875FNIEMOP_DEF(iemOp_setp_Eb)
4876{
4877 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4878 IEMOP_HLP_MIN_386();
4879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4880
4881 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4882 * any way. AMD says it's "unused", whatever that means. We're
4883 * ignoring for now. */
4884 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4885 {
4886 /* register target */
4887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4888 IEM_MC_BEGIN(0, 0);
4889 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4890 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4891 } IEM_MC_ELSE() {
4892 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4893 } IEM_MC_ENDIF();
4894 IEM_MC_ADVANCE_RIP();
4895 IEM_MC_END();
4896 }
4897 else
4898 {
4899 /* memory target */
4900 IEM_MC_BEGIN(0, 1);
4901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4904 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4905 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4906 } IEM_MC_ELSE() {
4907 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4908 } IEM_MC_ENDIF();
4909 IEM_MC_ADVANCE_RIP();
4910 IEM_MC_END();
4911 }
4912 return VINF_SUCCESS;
4913}
4914
4915
4916/** Opcode 0x0f 0x9b. */
4917FNIEMOP_DEF(iemOp_setnp_Eb)
4918{
4919 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4920 IEMOP_HLP_MIN_386();
4921 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4922
4923 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4924 * any way. AMD says it's "unused", whatever that means. We're
4925 * ignoring for now. */
4926 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4927 {
4928 /* register target */
4929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4930 IEM_MC_BEGIN(0, 0);
4931 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4932 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4933 } IEM_MC_ELSE() {
4934 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4935 } IEM_MC_ENDIF();
4936 IEM_MC_ADVANCE_RIP();
4937 IEM_MC_END();
4938 }
4939 else
4940 {
4941 /* memory target */
4942 IEM_MC_BEGIN(0, 1);
4943 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4946 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4947 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4948 } IEM_MC_ELSE() {
4949 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4950 } IEM_MC_ENDIF();
4951 IEM_MC_ADVANCE_RIP();
4952 IEM_MC_END();
4953 }
4954 return VINF_SUCCESS;
4955}
4956
4957
4958/** Opcode 0x0f 0x9c. */
4959FNIEMOP_DEF(iemOp_setl_Eb)
4960{
4961 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4962 IEMOP_HLP_MIN_386();
4963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4964
4965 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4966 * any way. AMD says it's "unused", whatever that means. We're
4967 * ignoring for now. */
4968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4969 {
4970 /* register target */
4971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4972 IEM_MC_BEGIN(0, 0);
4973 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4974 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4975 } IEM_MC_ELSE() {
4976 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4977 } IEM_MC_ENDIF();
4978 IEM_MC_ADVANCE_RIP();
4979 IEM_MC_END();
4980 }
4981 else
4982 {
4983 /* memory target */
4984 IEM_MC_BEGIN(0, 1);
4985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4988 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4989 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4990 } IEM_MC_ELSE() {
4991 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4992 } IEM_MC_ENDIF();
4993 IEM_MC_ADVANCE_RIP();
4994 IEM_MC_END();
4995 }
4996 return VINF_SUCCESS;
4997}
4998
4999
5000/** Opcode 0x0f 0x9d. */
5001FNIEMOP_DEF(iemOp_setnl_Eb)
5002{
5003 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5004 IEMOP_HLP_MIN_386();
5005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5006
5007 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5008 * any way. AMD says it's "unused", whatever that means. We're
5009 * ignoring for now. */
5010 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5011 {
5012 /* register target */
5013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5014 IEM_MC_BEGIN(0, 0);
5015 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5016 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5017 } IEM_MC_ELSE() {
5018 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5019 } IEM_MC_ENDIF();
5020 IEM_MC_ADVANCE_RIP();
5021 IEM_MC_END();
5022 }
5023 else
5024 {
5025 /* memory target */
5026 IEM_MC_BEGIN(0, 1);
5027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5030 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5031 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5032 } IEM_MC_ELSE() {
5033 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5034 } IEM_MC_ENDIF();
5035 IEM_MC_ADVANCE_RIP();
5036 IEM_MC_END();
5037 }
5038 return VINF_SUCCESS;
5039}
5040
5041
5042/** Opcode 0x0f 0x9e. */
5043FNIEMOP_DEF(iemOp_setle_Eb)
5044{
5045 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5046 IEMOP_HLP_MIN_386();
5047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5048
5049 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5050 * any way. AMD says it's "unused", whatever that means. We're
5051 * ignoring for now. */
5052 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5053 {
5054 /* register target */
5055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5056 IEM_MC_BEGIN(0, 0);
5057 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5058 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5059 } IEM_MC_ELSE() {
5060 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5061 } IEM_MC_ENDIF();
5062 IEM_MC_ADVANCE_RIP();
5063 IEM_MC_END();
5064 }
5065 else
5066 {
5067 /* memory target */
5068 IEM_MC_BEGIN(0, 1);
5069 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5072 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5073 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5074 } IEM_MC_ELSE() {
5075 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5076 } IEM_MC_ENDIF();
5077 IEM_MC_ADVANCE_RIP();
5078 IEM_MC_END();
5079 }
5080 return VINF_SUCCESS;
5081}
5082
5083
5084/** Opcode 0x0f 0x9f. */
5085FNIEMOP_DEF(iemOp_setnle_Eb)
5086{
5087 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5088 IEMOP_HLP_MIN_386();
5089 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5090
5091 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5092 * any way. AMD says it's "unused", whatever that means. We're
5093 * ignoring for now. */
5094 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5095 {
5096 /* register target */
5097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5098 IEM_MC_BEGIN(0, 0);
5099 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5100 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5101 } IEM_MC_ELSE() {
5102 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5103 } IEM_MC_ENDIF();
5104 IEM_MC_ADVANCE_RIP();
5105 IEM_MC_END();
5106 }
5107 else
5108 {
5109 /* memory target */
5110 IEM_MC_BEGIN(0, 1);
5111 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5114 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5115 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5116 } IEM_MC_ELSE() {
5117 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5118 } IEM_MC_ENDIF();
5119 IEM_MC_ADVANCE_RIP();
5120 IEM_MC_END();
5121 }
5122 return VINF_SUCCESS;
5123}
5124
5125
5126/**
5127 * Common 'push segment-register' helper.
5128 */
5129FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5130{
5131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5132 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5133 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5134
5135 switch (pVCpu->iem.s.enmEffOpSize)
5136 {
5137 case IEMMODE_16BIT:
5138 IEM_MC_BEGIN(0, 1);
5139 IEM_MC_LOCAL(uint16_t, u16Value);
5140 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5141 IEM_MC_PUSH_U16(u16Value);
5142 IEM_MC_ADVANCE_RIP();
5143 IEM_MC_END();
5144 break;
5145
5146 case IEMMODE_32BIT:
5147 IEM_MC_BEGIN(0, 1);
5148 IEM_MC_LOCAL(uint32_t, u32Value);
5149 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5150 IEM_MC_PUSH_U32_SREG(u32Value);
5151 IEM_MC_ADVANCE_RIP();
5152 IEM_MC_END();
5153 break;
5154
5155 case IEMMODE_64BIT:
5156 IEM_MC_BEGIN(0, 1);
5157 IEM_MC_LOCAL(uint64_t, u64Value);
5158 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5159 IEM_MC_PUSH_U64(u64Value);
5160 IEM_MC_ADVANCE_RIP();
5161 IEM_MC_END();
5162 break;
5163 }
5164
5165 return VINF_SUCCESS;
5166}
5167
5168
5169/** Opcode 0x0f 0xa0. */
5170FNIEMOP_DEF(iemOp_push_fs)
5171{
5172 IEMOP_MNEMONIC(push_fs, "push fs");
5173 IEMOP_HLP_MIN_386();
5174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5175 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5176}
5177
5178
5179/** Opcode 0x0f 0xa1. */
5180FNIEMOP_DEF(iemOp_pop_fs)
5181{
5182 IEMOP_MNEMONIC(pop_fs, "pop fs");
5183 IEMOP_HLP_MIN_386();
5184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5185 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5186}
5187
5188
5189/** Opcode 0x0f 0xa2. */
5190FNIEMOP_DEF(iemOp_cpuid)
5191{
5192 IEMOP_MNEMONIC(cpuid, "cpuid");
5193 IEMOP_HLP_MIN_486(); /* not all 486es. */
5194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5195 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5196}
5197
5198
5199/**
5200 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5201 * iemOp_bts_Ev_Gv.
5202 */
5203FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5204{
5205 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5206 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5207
5208 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5209 {
5210 /* register destination. */
5211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5212 switch (pVCpu->iem.s.enmEffOpSize)
5213 {
5214 case IEMMODE_16BIT:
5215 IEM_MC_BEGIN(3, 0);
5216 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5217 IEM_MC_ARG(uint16_t, u16Src, 1);
5218 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5219
5220 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5221 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5222 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5223 IEM_MC_REF_EFLAGS(pEFlags);
5224 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5225
5226 IEM_MC_ADVANCE_RIP();
5227 IEM_MC_END();
5228 return VINF_SUCCESS;
5229
5230 case IEMMODE_32BIT:
5231 IEM_MC_BEGIN(3, 0);
5232 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5233 IEM_MC_ARG(uint32_t, u32Src, 1);
5234 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5235
5236 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5237 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5238 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5239 IEM_MC_REF_EFLAGS(pEFlags);
5240 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5241
5242 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5243 IEM_MC_ADVANCE_RIP();
5244 IEM_MC_END();
5245 return VINF_SUCCESS;
5246
5247 case IEMMODE_64BIT:
5248 IEM_MC_BEGIN(3, 0);
5249 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5250 IEM_MC_ARG(uint64_t, u64Src, 1);
5251 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5252
5253 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5254 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5255 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5256 IEM_MC_REF_EFLAGS(pEFlags);
5257 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5258
5259 IEM_MC_ADVANCE_RIP();
5260 IEM_MC_END();
5261 return VINF_SUCCESS;
5262
5263 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5264 }
5265 }
5266 else
5267 {
5268 /* memory destination. */
5269
5270 uint32_t fAccess;
5271 if (pImpl->pfnLockedU16)
5272 fAccess = IEM_ACCESS_DATA_RW;
5273 else /* BT */
5274 fAccess = IEM_ACCESS_DATA_R;
5275
5276 /** @todo test negative bit offsets! */
5277 switch (pVCpu->iem.s.enmEffOpSize)
5278 {
5279 case IEMMODE_16BIT:
5280 IEM_MC_BEGIN(3, 2);
5281 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5282 IEM_MC_ARG(uint16_t, u16Src, 1);
5283 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5285 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5286
5287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5288 if (pImpl->pfnLockedU16)
5289 IEMOP_HLP_DONE_DECODING();
5290 else
5291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5292 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5293 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5294 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5295 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5296 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5297 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5298 IEM_MC_FETCH_EFLAGS(EFlags);
5299
5300 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5301 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5302 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5303 else
5304 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5305 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5306
5307 IEM_MC_COMMIT_EFLAGS(EFlags);
5308 IEM_MC_ADVANCE_RIP();
5309 IEM_MC_END();
5310 return VINF_SUCCESS;
5311
5312 case IEMMODE_32BIT:
5313 IEM_MC_BEGIN(3, 2);
5314 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5315 IEM_MC_ARG(uint32_t, u32Src, 1);
5316 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5318 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5319
5320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5321 if (pImpl->pfnLockedU16)
5322 IEMOP_HLP_DONE_DECODING();
5323 else
5324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5325 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5326 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5327 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5328 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5329 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5330 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5331 IEM_MC_FETCH_EFLAGS(EFlags);
5332
5333 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5334 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5335 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5336 else
5337 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5338 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5339
5340 IEM_MC_COMMIT_EFLAGS(EFlags);
5341 IEM_MC_ADVANCE_RIP();
5342 IEM_MC_END();
5343 return VINF_SUCCESS;
5344
5345 case IEMMODE_64BIT:
5346 IEM_MC_BEGIN(3, 2);
5347 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5348 IEM_MC_ARG(uint64_t, u64Src, 1);
5349 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5351 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5352
5353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5354 if (pImpl->pfnLockedU16)
5355 IEMOP_HLP_DONE_DECODING();
5356 else
5357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5358 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5359 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5360 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5361 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5362 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5363 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5364 IEM_MC_FETCH_EFLAGS(EFlags);
5365
5366 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5367 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5368 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5369 else
5370 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5371 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5372
5373 IEM_MC_COMMIT_EFLAGS(EFlags);
5374 IEM_MC_ADVANCE_RIP();
5375 IEM_MC_END();
5376 return VINF_SUCCESS;
5377
5378 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5379 }
5380 }
5381}
5382
5383
5384/** Opcode 0x0f 0xa3. */
5385FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5386{
5387 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5388 IEMOP_HLP_MIN_386();
5389 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5390}
5391
5392
5393/**
5394 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5395 */
5396FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5397{
5398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5399 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5400
5401 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5402 {
5403 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5405
5406 switch (pVCpu->iem.s.enmEffOpSize)
5407 {
5408 case IEMMODE_16BIT:
5409 IEM_MC_BEGIN(4, 0);
5410 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5411 IEM_MC_ARG(uint16_t, u16Src, 1);
5412 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5413 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5414
5415 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5416 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5417 IEM_MC_REF_EFLAGS(pEFlags);
5418 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5419
5420 IEM_MC_ADVANCE_RIP();
5421 IEM_MC_END();
5422 return VINF_SUCCESS;
5423
5424 case IEMMODE_32BIT:
5425 IEM_MC_BEGIN(4, 0);
5426 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5427 IEM_MC_ARG(uint32_t, u32Src, 1);
5428 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5429 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5430
5431 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5432 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5433 IEM_MC_REF_EFLAGS(pEFlags);
5434 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5435
5436 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5437 IEM_MC_ADVANCE_RIP();
5438 IEM_MC_END();
5439 return VINF_SUCCESS;
5440
5441 case IEMMODE_64BIT:
5442 IEM_MC_BEGIN(4, 0);
5443 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5444 IEM_MC_ARG(uint64_t, u64Src, 1);
5445 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5446 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5447
5448 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5449 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5450 IEM_MC_REF_EFLAGS(pEFlags);
5451 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5452
5453 IEM_MC_ADVANCE_RIP();
5454 IEM_MC_END();
5455 return VINF_SUCCESS;
5456
5457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5458 }
5459 }
5460 else
5461 {
5462 switch (pVCpu->iem.s.enmEffOpSize)
5463 {
5464 case IEMMODE_16BIT:
5465 IEM_MC_BEGIN(4, 2);
5466 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5467 IEM_MC_ARG(uint16_t, u16Src, 1);
5468 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5469 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5471
5472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5473 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5474 IEM_MC_ASSIGN(cShiftArg, cShift);
5475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5476 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5477 IEM_MC_FETCH_EFLAGS(EFlags);
5478 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5479 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5480
5481 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5482 IEM_MC_COMMIT_EFLAGS(EFlags);
5483 IEM_MC_ADVANCE_RIP();
5484 IEM_MC_END();
5485 return VINF_SUCCESS;
5486
5487 case IEMMODE_32BIT:
5488 IEM_MC_BEGIN(4, 2);
5489 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5490 IEM_MC_ARG(uint32_t, u32Src, 1);
5491 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5492 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5494
5495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5496 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5497 IEM_MC_ASSIGN(cShiftArg, cShift);
5498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5499 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5500 IEM_MC_FETCH_EFLAGS(EFlags);
5501 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5502 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5503
5504 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5505 IEM_MC_COMMIT_EFLAGS(EFlags);
5506 IEM_MC_ADVANCE_RIP();
5507 IEM_MC_END();
5508 return VINF_SUCCESS;
5509
5510 case IEMMODE_64BIT:
5511 IEM_MC_BEGIN(4, 2);
5512 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5513 IEM_MC_ARG(uint64_t, u64Src, 1);
5514 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5515 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5517
5518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5519 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5520 IEM_MC_ASSIGN(cShiftArg, cShift);
5521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5522 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5523 IEM_MC_FETCH_EFLAGS(EFlags);
5524 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5525 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5526
5527 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5528 IEM_MC_COMMIT_EFLAGS(EFlags);
5529 IEM_MC_ADVANCE_RIP();
5530 IEM_MC_END();
5531 return VINF_SUCCESS;
5532
5533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5534 }
5535 }
5536}
5537
5538
5539/**
5540 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5541 */
5542FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5543{
5544 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5545 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5546
5547 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5548 {
5549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5550
5551 switch (pVCpu->iem.s.enmEffOpSize)
5552 {
5553 case IEMMODE_16BIT:
5554 IEM_MC_BEGIN(4, 0);
5555 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5556 IEM_MC_ARG(uint16_t, u16Src, 1);
5557 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5558 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5559
5560 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5561 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5562 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5563 IEM_MC_REF_EFLAGS(pEFlags);
5564 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5565
5566 IEM_MC_ADVANCE_RIP();
5567 IEM_MC_END();
5568 return VINF_SUCCESS;
5569
5570 case IEMMODE_32BIT:
5571 IEM_MC_BEGIN(4, 0);
5572 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5573 IEM_MC_ARG(uint32_t, u32Src, 1);
5574 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5575 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5576
5577 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5578 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5579 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5580 IEM_MC_REF_EFLAGS(pEFlags);
5581 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5582
5583 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5584 IEM_MC_ADVANCE_RIP();
5585 IEM_MC_END();
5586 return VINF_SUCCESS;
5587
5588 case IEMMODE_64BIT:
5589 IEM_MC_BEGIN(4, 0);
5590 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5591 IEM_MC_ARG(uint64_t, u64Src, 1);
5592 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5593 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5594
5595 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5596 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5597 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5598 IEM_MC_REF_EFLAGS(pEFlags);
5599 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5600
5601 IEM_MC_ADVANCE_RIP();
5602 IEM_MC_END();
5603 return VINF_SUCCESS;
5604
5605 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5606 }
5607 }
5608 else
5609 {
5610 switch (pVCpu->iem.s.enmEffOpSize)
5611 {
5612 case IEMMODE_16BIT:
5613 IEM_MC_BEGIN(4, 2);
5614 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5615 IEM_MC_ARG(uint16_t, u16Src, 1);
5616 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5617 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5619
5620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5622 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5623 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5624 IEM_MC_FETCH_EFLAGS(EFlags);
5625 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5626 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5627
5628 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5629 IEM_MC_COMMIT_EFLAGS(EFlags);
5630 IEM_MC_ADVANCE_RIP();
5631 IEM_MC_END();
5632 return VINF_SUCCESS;
5633
5634 case IEMMODE_32BIT:
5635 IEM_MC_BEGIN(4, 2);
5636 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5637 IEM_MC_ARG(uint32_t, u32Src, 1);
5638 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5639 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5641
5642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5644 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5645 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5646 IEM_MC_FETCH_EFLAGS(EFlags);
5647 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5648 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5649
5650 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5651 IEM_MC_COMMIT_EFLAGS(EFlags);
5652 IEM_MC_ADVANCE_RIP();
5653 IEM_MC_END();
5654 return VINF_SUCCESS;
5655
5656 case IEMMODE_64BIT:
5657 IEM_MC_BEGIN(4, 2);
5658 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5659 IEM_MC_ARG(uint64_t, u64Src, 1);
5660 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5661 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5663
5664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5666 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5667 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5668 IEM_MC_FETCH_EFLAGS(EFlags);
5669 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5670 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5671
5672 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5673 IEM_MC_COMMIT_EFLAGS(EFlags);
5674 IEM_MC_ADVANCE_RIP();
5675 IEM_MC_END();
5676 return VINF_SUCCESS;
5677
5678 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5679 }
5680 }
5681}
5682
5683
5684
5685/** Opcode 0x0f 0xa4. */
5686FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5687{
5688 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5689 IEMOP_HLP_MIN_386();
5690 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5691}
5692
5693
5694/** Opcode 0x0f 0xa5. */
5695FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5696{
5697 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5698 IEMOP_HLP_MIN_386();
5699 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5700}
5701
5702
5703/** Opcode 0x0f 0xa8. */
5704FNIEMOP_DEF(iemOp_push_gs)
5705{
5706 IEMOP_MNEMONIC(push_gs, "push gs");
5707 IEMOP_HLP_MIN_386();
5708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5709 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5710}
5711
5712
5713/** Opcode 0x0f 0xa9. */
5714FNIEMOP_DEF(iemOp_pop_gs)
5715{
5716 IEMOP_MNEMONIC(pop_gs, "pop gs");
5717 IEMOP_HLP_MIN_386();
5718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5719 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5720}
5721
5722
5723/** Opcode 0x0f 0xaa. */
5724FNIEMOP_STUB(iemOp_rsm);
5725//IEMOP_HLP_MIN_386();
5726
5727
5728/** Opcode 0x0f 0xab. */
5729FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5730{
5731 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5732 IEMOP_HLP_MIN_386();
5733 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5734}
5735
5736
5737/** Opcode 0x0f 0xac. */
5738FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5739{
5740 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5741 IEMOP_HLP_MIN_386();
5742 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5743}
5744
5745
5746/** Opcode 0x0f 0xad. */
5747FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5748{
5749 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5750 IEMOP_HLP_MIN_386();
5751 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5752}
5753
5754
5755/** Opcode 0x0f 0xae mem/0. */
5756FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5757{
5758 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5759 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5760 return IEMOP_RAISE_INVALID_OPCODE();
5761
5762 IEM_MC_BEGIN(3, 1);
5763 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5764 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5765 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5768 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
5769 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5770 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5771 IEM_MC_END();
5772 return VINF_SUCCESS;
5773}
5774
5775
5776/** Opcode 0x0f 0xae mem/1. */
5777FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5778{
5779 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5780 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5781 return IEMOP_RAISE_INVALID_OPCODE();
5782
5783 IEM_MC_BEGIN(3, 1);
5784 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5785 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5786 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5789 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5790 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5791 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5792 IEM_MC_END();
5793 return VINF_SUCCESS;
5794}
5795
5796
5797/**
5798 * @opmaps grp15
5799 * @opcode !11/2
5800 * @oppfx none
5801 * @opcpuid sse
5802 * @opgroup og_sse_mxcsrsm
5803 * @opxcpttype 5
5804 * @optest op1=0 -> mxcsr=0
5805 * @optest op1=0x2083 -> mxcsr=0x2083
5806 * @optest op1=0xfffffffe -> value.xcpt=0xd
5807 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
5808 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
5809 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
5810 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
5811 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
5812 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5813 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5814 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5815 */
5816FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
5817{
5818 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
5819 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
5820 return IEMOP_RAISE_INVALID_OPCODE();
5821
5822 IEM_MC_BEGIN(2, 0);
5823 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5824 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5827 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5828 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5829 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
5830 IEM_MC_END();
5831 return VINF_SUCCESS;
5832}
5833
5834
5835/**
5836 * @opmaps grp15
5837 * @opcode !11/3
5838 * @oppfx none
5839 * @opcpuid sse
5840 * @opgroup og_sse_mxcsrsm
5841 * @opxcpttype 5
5842 * @optest mxcsr=0 -> op1=0
5843 * @optest mxcsr=0x2083 -> op1=0x2083
5844 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
5845 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
5846 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
5847 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
5848 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
5849 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5850 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5851 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5852 */
5853FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
5854{
5855 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, MdWO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
5856 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
5857 return IEMOP_RAISE_INVALID_OPCODE();
5858
5859 IEM_MC_BEGIN(2, 0);
5860 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5861 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5864 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5865 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5866 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
5867 IEM_MC_END();
5868 return VINF_SUCCESS;
5869}
5870
5871
5872/**
5873 * @opmaps grp15
5874 * @opcode !11/4
5875 * @oppfx none
5876 * @opcpuid xsave
5877 * @opgroup og_system
5878 * @opxcpttype none
5879 */
5880FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
5881{
5882 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, MRW, DISOPTYPE_HARMLESS, 0);
5883 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
5884 return IEMOP_RAISE_INVALID_OPCODE();
5885
5886 IEM_MC_BEGIN(3, 0);
5887 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5888 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5889 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5890 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5892 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
5893 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5894 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
5895 IEM_MC_END();
5896 return VINF_SUCCESS;
5897}
5898
5899
5900/**
5901 * @opmaps grp15
5902 * @opcode !11/5
5903 * @oppfx none
5904 * @opcpuid xsave
5905 * @opgroup og_system
5906 * @opxcpttype none
5907 */
5908FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
5909{
5910 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, MRO, DISOPTYPE_HARMLESS, 0);
5911 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
5912 return IEMOP_RAISE_INVALID_OPCODE();
5913
5914 IEM_MC_BEGIN(3, 0);
5915 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5916 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5917 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5920 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
5921 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5922 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5923 IEM_MC_END();
5924 return VINF_SUCCESS;
5925}
5926
5927/** Opcode 0x0f 0xae mem/6. */
5928FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5929
5930/**
5931 * @opmaps grp15
5932 * @opcode !11/7
5933 * @oppfx none
5934 * @opcpuid clfsh
5935 * @opgroup og_cachectl
5936 * @optest op1=1 ->
5937 */
5938FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
5939{
5940 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
5941 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
5942 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
5943
5944 IEM_MC_BEGIN(2, 0);
5945 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5946 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5949 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5950 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
5951 IEM_MC_END();
5952 return VINF_SUCCESS;
5953}
5954
5955/**
5956 * @opmaps grp15
5957 * @opcode !11/7
5958 * @oppfx 0x66
5959 * @opcpuid clflushopt
5960 * @opgroup og_cachectl
5961 * @optest op1=1 ->
5962 */
5963FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
5964{
5965 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
5966 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
5967 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
5968
5969 IEM_MC_BEGIN(2, 0);
5970 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5971 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5974 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5975 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
5976 IEM_MC_END();
5977 return VINF_SUCCESS;
5978}
5979
5980
5981/** Opcode 0x0f 0xae 11b/5. */
5982FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5983{
5984 RT_NOREF_PV(bRm);
5985 IEMOP_MNEMONIC(lfence, "lfence");
5986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5987 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5988 return IEMOP_RAISE_INVALID_OPCODE();
5989
5990 IEM_MC_BEGIN(0, 0);
5991 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5992 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5993 else
5994 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5995 IEM_MC_ADVANCE_RIP();
5996 IEM_MC_END();
5997 return VINF_SUCCESS;
5998}
5999
6000
6001/** Opcode 0x0f 0xae 11b/6. */
6002FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6003{
6004 RT_NOREF_PV(bRm);
6005 IEMOP_MNEMONIC(mfence, "mfence");
6006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6007 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6008 return IEMOP_RAISE_INVALID_OPCODE();
6009
6010 IEM_MC_BEGIN(0, 0);
6011 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6012 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6013 else
6014 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6015 IEM_MC_ADVANCE_RIP();
6016 IEM_MC_END();
6017 return VINF_SUCCESS;
6018}
6019
6020
6021/** Opcode 0x0f 0xae 11b/7. */
6022FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6023{
6024 RT_NOREF_PV(bRm);
6025 IEMOP_MNEMONIC(sfence, "sfence");
6026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6027 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6028 return IEMOP_RAISE_INVALID_OPCODE();
6029
6030 IEM_MC_BEGIN(0, 0);
6031 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6032 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6033 else
6034 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6035 IEM_MC_ADVANCE_RIP();
6036 IEM_MC_END();
6037 return VINF_SUCCESS;
6038}
6039
6040
6041/** Opcode 0xf3 0x0f 0xae 11b/0. */
6042FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6043
6044/** Opcode 0xf3 0x0f 0xae 11b/1. */
6045FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6046
6047/** Opcode 0xf3 0x0f 0xae 11b/2. */
6048FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6049
6050/** Opcode 0xf3 0x0f 0xae 11b/3. */
6051FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6052
6053
6054/**
6055 * Group 15 jump table for register variant.
6056 */
6057IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6058{ /* pfx: none, 066h, 0f3h, 0f2h */
6059 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6060 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6061 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6062 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6063 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6064 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6065 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6066 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6067};
6068AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6069
6070
6071/**
6072 * Group 15 jump table for memory variant.
6073 */
6074IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6075{ /* pfx: none, 066h, 0f3h, 0f2h */
6076 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6077 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6078 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6079 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6080 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6081 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6082 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6083 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6084};
6085AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6086
6087
6088/** Opcode 0x0f 0xae. */
6089FNIEMOP_DEF(iemOp_Grp15)
6090{
6091 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6092 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6093 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6094 /* register, register */
6095 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6096 + pVCpu->iem.s.idxPrefix], bRm);
6097 /* memory, register */
6098 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6099 + pVCpu->iem.s.idxPrefix], bRm);
6100}
6101
6102
6103/** Opcode 0x0f 0xaf. */
6104FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6105{
6106 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6107 IEMOP_HLP_MIN_386();
6108 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6109 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6110}
6111
6112
6113/** Opcode 0x0f 0xb0. */
6114FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6115{
6116 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6117 IEMOP_HLP_MIN_486();
6118 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6119
6120 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6121 {
6122 IEMOP_HLP_DONE_DECODING();
6123 IEM_MC_BEGIN(4, 0);
6124 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6125 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6126 IEM_MC_ARG(uint8_t, u8Src, 2);
6127 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6128
6129 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6130 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6131 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6132 IEM_MC_REF_EFLAGS(pEFlags);
6133 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6134 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6135 else
6136 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6137
6138 IEM_MC_ADVANCE_RIP();
6139 IEM_MC_END();
6140 }
6141 else
6142 {
6143 IEM_MC_BEGIN(4, 3);
6144 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6145 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6146 IEM_MC_ARG(uint8_t, u8Src, 2);
6147 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6149 IEM_MC_LOCAL(uint8_t, u8Al);
6150
6151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6152 IEMOP_HLP_DONE_DECODING();
6153 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6154 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6155 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6156 IEM_MC_FETCH_EFLAGS(EFlags);
6157 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6158 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6159 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6160 else
6161 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6162
6163 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6164 IEM_MC_COMMIT_EFLAGS(EFlags);
6165 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6166 IEM_MC_ADVANCE_RIP();
6167 IEM_MC_END();
6168 }
6169 return VINF_SUCCESS;
6170}
6171
6172/** Opcode 0x0f 0xb1. */
6173FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6174{
6175 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6176 IEMOP_HLP_MIN_486();
6177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6178
6179 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6180 {
6181 IEMOP_HLP_DONE_DECODING();
6182 switch (pVCpu->iem.s.enmEffOpSize)
6183 {
6184 case IEMMODE_16BIT:
6185 IEM_MC_BEGIN(4, 0);
6186 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6187 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6188 IEM_MC_ARG(uint16_t, u16Src, 2);
6189 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6190
6191 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6192 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6193 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6194 IEM_MC_REF_EFLAGS(pEFlags);
6195 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6196 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6197 else
6198 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6199
6200 IEM_MC_ADVANCE_RIP();
6201 IEM_MC_END();
6202 return VINF_SUCCESS;
6203
6204 case IEMMODE_32BIT:
6205 IEM_MC_BEGIN(4, 0);
6206 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6207 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6208 IEM_MC_ARG(uint32_t, u32Src, 2);
6209 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6210
6211 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6212 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6213 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6214 IEM_MC_REF_EFLAGS(pEFlags);
6215 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6216 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6217 else
6218 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6219
6220 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6221 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6222 IEM_MC_ADVANCE_RIP();
6223 IEM_MC_END();
6224 return VINF_SUCCESS;
6225
6226 case IEMMODE_64BIT:
6227 IEM_MC_BEGIN(4, 0);
6228 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6229 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6230#ifdef RT_ARCH_X86
6231 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6232#else
6233 IEM_MC_ARG(uint64_t, u64Src, 2);
6234#endif
6235 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6236
6237 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6238 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6239 IEM_MC_REF_EFLAGS(pEFlags);
6240#ifdef RT_ARCH_X86
6241 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6242 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6243 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6244 else
6245 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6246#else
6247 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6248 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6249 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6250 else
6251 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6252#endif
6253
6254 IEM_MC_ADVANCE_RIP();
6255 IEM_MC_END();
6256 return VINF_SUCCESS;
6257
6258 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6259 }
6260 }
6261 else
6262 {
6263 switch (pVCpu->iem.s.enmEffOpSize)
6264 {
6265 case IEMMODE_16BIT:
6266 IEM_MC_BEGIN(4, 3);
6267 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6268 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6269 IEM_MC_ARG(uint16_t, u16Src, 2);
6270 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6272 IEM_MC_LOCAL(uint16_t, u16Ax);
6273
6274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6275 IEMOP_HLP_DONE_DECODING();
6276 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6277 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6278 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6279 IEM_MC_FETCH_EFLAGS(EFlags);
6280 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6281 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6282 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6283 else
6284 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6285
6286 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6287 IEM_MC_COMMIT_EFLAGS(EFlags);
6288 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6289 IEM_MC_ADVANCE_RIP();
6290 IEM_MC_END();
6291 return VINF_SUCCESS;
6292
6293 case IEMMODE_32BIT:
6294 IEM_MC_BEGIN(4, 3);
6295 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6296 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6297 IEM_MC_ARG(uint32_t, u32Src, 2);
6298 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6300 IEM_MC_LOCAL(uint32_t, u32Eax);
6301
6302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6303 IEMOP_HLP_DONE_DECODING();
6304 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6305 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6306 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6307 IEM_MC_FETCH_EFLAGS(EFlags);
6308 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6309 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6310 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6311 else
6312 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6313
6314 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6315 IEM_MC_COMMIT_EFLAGS(EFlags);
6316 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6317 IEM_MC_ADVANCE_RIP();
6318 IEM_MC_END();
6319 return VINF_SUCCESS;
6320
6321 case IEMMODE_64BIT:
6322 IEM_MC_BEGIN(4, 3);
6323 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6324 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6325#ifdef RT_ARCH_X86
6326 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6327#else
6328 IEM_MC_ARG(uint64_t, u64Src, 2);
6329#endif
6330 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6332 IEM_MC_LOCAL(uint64_t, u64Rax);
6333
6334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6335 IEMOP_HLP_DONE_DECODING();
6336 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6337 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6338 IEM_MC_FETCH_EFLAGS(EFlags);
6339 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6340#ifdef RT_ARCH_X86
6341 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6342 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6343 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6344 else
6345 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6346#else
6347 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6348 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6349 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6350 else
6351 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6352#endif
6353
6354 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6355 IEM_MC_COMMIT_EFLAGS(EFlags);
6356 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6357 IEM_MC_ADVANCE_RIP();
6358 IEM_MC_END();
6359 return VINF_SUCCESS;
6360
6361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6362 }
6363 }
6364}
6365
6366
6367FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6368{
6369 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6370 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6371
6372 switch (pVCpu->iem.s.enmEffOpSize)
6373 {
6374 case IEMMODE_16BIT:
6375 IEM_MC_BEGIN(5, 1);
6376 IEM_MC_ARG(uint16_t, uSel, 0);
6377 IEM_MC_ARG(uint16_t, offSeg, 1);
6378 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6379 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6380 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6381 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6384 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6385 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6386 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6387 IEM_MC_END();
6388 return VINF_SUCCESS;
6389
6390 case IEMMODE_32BIT:
6391 IEM_MC_BEGIN(5, 1);
6392 IEM_MC_ARG(uint16_t, uSel, 0);
6393 IEM_MC_ARG(uint32_t, offSeg, 1);
6394 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6395 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6396 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6397 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6400 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6401 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6402 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6403 IEM_MC_END();
6404 return VINF_SUCCESS;
6405
6406 case IEMMODE_64BIT:
6407 IEM_MC_BEGIN(5, 1);
6408 IEM_MC_ARG(uint16_t, uSel, 0);
6409 IEM_MC_ARG(uint64_t, offSeg, 1);
6410 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6411 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6412 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6413 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6416 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6417 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6418 else
6419 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6420 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6421 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6422 IEM_MC_END();
6423 return VINF_SUCCESS;
6424
6425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6426 }
6427}
6428
6429
6430/** Opcode 0x0f 0xb2. */
6431FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6432{
6433 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6434 IEMOP_HLP_MIN_386();
6435 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6436 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6437 return IEMOP_RAISE_INVALID_OPCODE();
6438 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6439}
6440
6441
6442/** Opcode 0x0f 0xb3. */
6443FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6444{
6445 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6446 IEMOP_HLP_MIN_386();
6447 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6448}
6449
6450
6451/** Opcode 0x0f 0xb4. */
6452FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6453{
6454 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6455 IEMOP_HLP_MIN_386();
6456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6457 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6458 return IEMOP_RAISE_INVALID_OPCODE();
6459 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6460}
6461
6462
6463/** Opcode 0x0f 0xb5. */
6464FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6465{
6466 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6467 IEMOP_HLP_MIN_386();
6468 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6469 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6470 return IEMOP_RAISE_INVALID_OPCODE();
6471 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6472}
6473
6474
6475/** Opcode 0x0f 0xb6. */
6476FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6477{
6478 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6479 IEMOP_HLP_MIN_386();
6480
6481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6482
6483 /*
6484 * If rm is denoting a register, no more instruction bytes.
6485 */
6486 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6487 {
6488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6489 switch (pVCpu->iem.s.enmEffOpSize)
6490 {
6491 case IEMMODE_16BIT:
6492 IEM_MC_BEGIN(0, 1);
6493 IEM_MC_LOCAL(uint16_t, u16Value);
6494 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6495 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6496 IEM_MC_ADVANCE_RIP();
6497 IEM_MC_END();
6498 return VINF_SUCCESS;
6499
6500 case IEMMODE_32BIT:
6501 IEM_MC_BEGIN(0, 1);
6502 IEM_MC_LOCAL(uint32_t, u32Value);
6503 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6504 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6505 IEM_MC_ADVANCE_RIP();
6506 IEM_MC_END();
6507 return VINF_SUCCESS;
6508
6509 case IEMMODE_64BIT:
6510 IEM_MC_BEGIN(0, 1);
6511 IEM_MC_LOCAL(uint64_t, u64Value);
6512 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6513 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6514 IEM_MC_ADVANCE_RIP();
6515 IEM_MC_END();
6516 return VINF_SUCCESS;
6517
6518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6519 }
6520 }
6521 else
6522 {
6523 /*
6524 * We're loading a register from memory.
6525 */
6526 switch (pVCpu->iem.s.enmEffOpSize)
6527 {
6528 case IEMMODE_16BIT:
6529 IEM_MC_BEGIN(0, 2);
6530 IEM_MC_LOCAL(uint16_t, u16Value);
6531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6534 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6535 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6536 IEM_MC_ADVANCE_RIP();
6537 IEM_MC_END();
6538 return VINF_SUCCESS;
6539
6540 case IEMMODE_32BIT:
6541 IEM_MC_BEGIN(0, 2);
6542 IEM_MC_LOCAL(uint32_t, u32Value);
6543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6546 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6547 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6548 IEM_MC_ADVANCE_RIP();
6549 IEM_MC_END();
6550 return VINF_SUCCESS;
6551
6552 case IEMMODE_64BIT:
6553 IEM_MC_BEGIN(0, 2);
6554 IEM_MC_LOCAL(uint64_t, u64Value);
6555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6558 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6559 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6560 IEM_MC_ADVANCE_RIP();
6561 IEM_MC_END();
6562 return VINF_SUCCESS;
6563
6564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6565 }
6566 }
6567}
6568
6569
6570/** Opcode 0x0f 0xb7. */
6571FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6572{
6573 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6574 IEMOP_HLP_MIN_386();
6575
6576 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6577
6578 /** @todo Not entirely sure how the operand size prefix is handled here,
6579 * assuming that it will be ignored. Would be nice to have a few
6580 * test for this. */
6581 /*
6582 * If rm is denoting a register, no more instruction bytes.
6583 */
6584 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6585 {
6586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6587 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6588 {
6589 IEM_MC_BEGIN(0, 1);
6590 IEM_MC_LOCAL(uint32_t, u32Value);
6591 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6592 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6593 IEM_MC_ADVANCE_RIP();
6594 IEM_MC_END();
6595 }
6596 else
6597 {
6598 IEM_MC_BEGIN(0, 1);
6599 IEM_MC_LOCAL(uint64_t, u64Value);
6600 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6601 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6602 IEM_MC_ADVANCE_RIP();
6603 IEM_MC_END();
6604 }
6605 }
6606 else
6607 {
6608 /*
6609 * We're loading a register from memory.
6610 */
6611 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6612 {
6613 IEM_MC_BEGIN(0, 2);
6614 IEM_MC_LOCAL(uint32_t, u32Value);
6615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6618 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6619 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6620 IEM_MC_ADVANCE_RIP();
6621 IEM_MC_END();
6622 }
6623 else
6624 {
6625 IEM_MC_BEGIN(0, 2);
6626 IEM_MC_LOCAL(uint64_t, u64Value);
6627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6630 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6631 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6632 IEM_MC_ADVANCE_RIP();
6633 IEM_MC_END();
6634 }
6635 }
6636 return VINF_SUCCESS;
6637}
6638
6639
6640/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6641FNIEMOP_UD_STUB(iemOp_jmpe);
6642/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6643FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6644
6645
6646/**
6647 * @opcode 0xb9
6648 * @opinvalid intel-modrm
6649 * @optest ->
6650 */
6651FNIEMOP_DEF(iemOp_Grp10)
6652{
6653 /*
6654 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
6655 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
6656 */
6657 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
6658 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZE); /* just picked Gb,Eb here. */
6659 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
6660}
6661
6662
6663/** Opcode 0x0f 0xba. */
6664FNIEMOP_DEF(iemOp_Grp8)
6665{
6666 IEMOP_HLP_MIN_386();
6667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6668 PCIEMOPBINSIZES pImpl;
6669 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6670 {
6671 case 0: case 1: case 2: case 3:
6672 /* Both AMD and Intel want full modr/m decoding and imm8. */
6673 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
6674 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6675 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6676 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6677 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6678 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6679 }
6680 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6681
6682 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6683 {
6684 /* register destination. */
6685 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6687
6688 switch (pVCpu->iem.s.enmEffOpSize)
6689 {
6690 case IEMMODE_16BIT:
6691 IEM_MC_BEGIN(3, 0);
6692 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6693 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6694 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6695
6696 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6697 IEM_MC_REF_EFLAGS(pEFlags);
6698 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6699
6700 IEM_MC_ADVANCE_RIP();
6701 IEM_MC_END();
6702 return VINF_SUCCESS;
6703
6704 case IEMMODE_32BIT:
6705 IEM_MC_BEGIN(3, 0);
6706 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6707 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6708 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6709
6710 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6711 IEM_MC_REF_EFLAGS(pEFlags);
6712 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6713
6714 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6715 IEM_MC_ADVANCE_RIP();
6716 IEM_MC_END();
6717 return VINF_SUCCESS;
6718
6719 case IEMMODE_64BIT:
6720 IEM_MC_BEGIN(3, 0);
6721 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6722 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6723 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6724
6725 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6726 IEM_MC_REF_EFLAGS(pEFlags);
6727 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6728
6729 IEM_MC_ADVANCE_RIP();
6730 IEM_MC_END();
6731 return VINF_SUCCESS;
6732
6733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6734 }
6735 }
6736 else
6737 {
6738 /* memory destination. */
6739
6740 uint32_t fAccess;
6741 if (pImpl->pfnLockedU16)
6742 fAccess = IEM_ACCESS_DATA_RW;
6743 else /* BT */
6744 fAccess = IEM_ACCESS_DATA_R;
6745
6746 /** @todo test negative bit offsets! */
6747 switch (pVCpu->iem.s.enmEffOpSize)
6748 {
6749 case IEMMODE_16BIT:
6750 IEM_MC_BEGIN(3, 1);
6751 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6752 IEM_MC_ARG(uint16_t, u16Src, 1);
6753 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6755
6756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6757 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6758 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6759 if (pImpl->pfnLockedU16)
6760 IEMOP_HLP_DONE_DECODING();
6761 else
6762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6763 IEM_MC_FETCH_EFLAGS(EFlags);
6764 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6765 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6766 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6767 else
6768 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6769 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6770
6771 IEM_MC_COMMIT_EFLAGS(EFlags);
6772 IEM_MC_ADVANCE_RIP();
6773 IEM_MC_END();
6774 return VINF_SUCCESS;
6775
6776 case IEMMODE_32BIT:
6777 IEM_MC_BEGIN(3, 1);
6778 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6779 IEM_MC_ARG(uint32_t, u32Src, 1);
6780 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6782
6783 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6784 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6785 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6786 if (pImpl->pfnLockedU16)
6787 IEMOP_HLP_DONE_DECODING();
6788 else
6789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6790 IEM_MC_FETCH_EFLAGS(EFlags);
6791 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6792 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6793 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6794 else
6795 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6796 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6797
6798 IEM_MC_COMMIT_EFLAGS(EFlags);
6799 IEM_MC_ADVANCE_RIP();
6800 IEM_MC_END();
6801 return VINF_SUCCESS;
6802
6803 case IEMMODE_64BIT:
6804 IEM_MC_BEGIN(3, 1);
6805 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6806 IEM_MC_ARG(uint64_t, u64Src, 1);
6807 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6809
6810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6811 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6812 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6813 if (pImpl->pfnLockedU16)
6814 IEMOP_HLP_DONE_DECODING();
6815 else
6816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6817 IEM_MC_FETCH_EFLAGS(EFlags);
6818 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6819 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6820 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6821 else
6822 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6823 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6824
6825 IEM_MC_COMMIT_EFLAGS(EFlags);
6826 IEM_MC_ADVANCE_RIP();
6827 IEM_MC_END();
6828 return VINF_SUCCESS;
6829
6830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6831 }
6832 }
6833}
6834
6835
6836/** Opcode 0x0f 0xbb. */
6837FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6838{
6839 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6840 IEMOP_HLP_MIN_386();
6841 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6842}
6843
6844
6845/** Opcode 0x0f 0xbc. */
6846FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6847{
6848 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6849 IEMOP_HLP_MIN_386();
6850 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6851 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6852}
6853
6854
6855/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6856FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6857
6858
6859/** Opcode 0x0f 0xbd. */
6860FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6861{
6862 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6863 IEMOP_HLP_MIN_386();
6864 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6865 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6866}
6867
6868
6869/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6870FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6871
6872
6873/** Opcode 0x0f 0xbe. */
6874FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6875{
6876 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6877 IEMOP_HLP_MIN_386();
6878
6879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6880
6881 /*
6882 * If rm is denoting a register, no more instruction bytes.
6883 */
6884 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6885 {
6886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6887 switch (pVCpu->iem.s.enmEffOpSize)
6888 {
6889 case IEMMODE_16BIT:
6890 IEM_MC_BEGIN(0, 1);
6891 IEM_MC_LOCAL(uint16_t, u16Value);
6892 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6893 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6894 IEM_MC_ADVANCE_RIP();
6895 IEM_MC_END();
6896 return VINF_SUCCESS;
6897
6898 case IEMMODE_32BIT:
6899 IEM_MC_BEGIN(0, 1);
6900 IEM_MC_LOCAL(uint32_t, u32Value);
6901 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6902 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6903 IEM_MC_ADVANCE_RIP();
6904 IEM_MC_END();
6905 return VINF_SUCCESS;
6906
6907 case IEMMODE_64BIT:
6908 IEM_MC_BEGIN(0, 1);
6909 IEM_MC_LOCAL(uint64_t, u64Value);
6910 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6911 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6912 IEM_MC_ADVANCE_RIP();
6913 IEM_MC_END();
6914 return VINF_SUCCESS;
6915
6916 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6917 }
6918 }
6919 else
6920 {
6921 /*
6922 * We're loading a register from memory.
6923 */
6924 switch (pVCpu->iem.s.enmEffOpSize)
6925 {
6926 case IEMMODE_16BIT:
6927 IEM_MC_BEGIN(0, 2);
6928 IEM_MC_LOCAL(uint16_t, u16Value);
6929 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6932 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6933 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6934 IEM_MC_ADVANCE_RIP();
6935 IEM_MC_END();
6936 return VINF_SUCCESS;
6937
6938 case IEMMODE_32BIT:
6939 IEM_MC_BEGIN(0, 2);
6940 IEM_MC_LOCAL(uint32_t, u32Value);
6941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6944 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6945 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6946 IEM_MC_ADVANCE_RIP();
6947 IEM_MC_END();
6948 return VINF_SUCCESS;
6949
6950 case IEMMODE_64BIT:
6951 IEM_MC_BEGIN(0, 2);
6952 IEM_MC_LOCAL(uint64_t, u64Value);
6953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6956 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6957 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6958 IEM_MC_ADVANCE_RIP();
6959 IEM_MC_END();
6960 return VINF_SUCCESS;
6961
6962 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6963 }
6964 }
6965}
6966
6967
6968/** Opcode 0x0f 0xbf. */
6969FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6970{
6971 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6972 IEMOP_HLP_MIN_386();
6973
6974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6975
6976 /** @todo Not entirely sure how the operand size prefix is handled here,
6977 * assuming that it will be ignored. Would be nice to have a few
6978 * test for this. */
6979 /*
6980 * If rm is denoting a register, no more instruction bytes.
6981 */
6982 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6983 {
6984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6985 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6986 {
6987 IEM_MC_BEGIN(0, 1);
6988 IEM_MC_LOCAL(uint32_t, u32Value);
6989 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6990 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6991 IEM_MC_ADVANCE_RIP();
6992 IEM_MC_END();
6993 }
6994 else
6995 {
6996 IEM_MC_BEGIN(0, 1);
6997 IEM_MC_LOCAL(uint64_t, u64Value);
6998 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6999 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7000 IEM_MC_ADVANCE_RIP();
7001 IEM_MC_END();
7002 }
7003 }
7004 else
7005 {
7006 /*
7007 * We're loading a register from memory.
7008 */
7009 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7010 {
7011 IEM_MC_BEGIN(0, 2);
7012 IEM_MC_LOCAL(uint32_t, u32Value);
7013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7016 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7017 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7018 IEM_MC_ADVANCE_RIP();
7019 IEM_MC_END();
7020 }
7021 else
7022 {
7023 IEM_MC_BEGIN(0, 2);
7024 IEM_MC_LOCAL(uint64_t, u64Value);
7025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7028 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7029 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7030 IEM_MC_ADVANCE_RIP();
7031 IEM_MC_END();
7032 }
7033 }
7034 return VINF_SUCCESS;
7035}
7036
7037
7038/** Opcode 0x0f 0xc0. */
7039FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7040{
7041 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7042 IEMOP_HLP_MIN_486();
7043 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7044
7045 /*
7046 * If rm is denoting a register, no more instruction bytes.
7047 */
7048 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7049 {
7050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7051
7052 IEM_MC_BEGIN(3, 0);
7053 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7054 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7055 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7056
7057 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7058 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7059 IEM_MC_REF_EFLAGS(pEFlags);
7060 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7061
7062 IEM_MC_ADVANCE_RIP();
7063 IEM_MC_END();
7064 }
7065 else
7066 {
7067 /*
7068 * We're accessing memory.
7069 */
7070 IEM_MC_BEGIN(3, 3);
7071 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7072 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7073 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7074 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7076
7077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7078 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7079 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7080 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7081 IEM_MC_FETCH_EFLAGS(EFlags);
7082 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7083 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7084 else
7085 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7086
7087 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7088 IEM_MC_COMMIT_EFLAGS(EFlags);
7089 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7090 IEM_MC_ADVANCE_RIP();
7091 IEM_MC_END();
7092 return VINF_SUCCESS;
7093 }
7094 return VINF_SUCCESS;
7095}
7096
7097
7098/** Opcode 0x0f 0xc1. */
7099FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7100{
7101 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7102 IEMOP_HLP_MIN_486();
7103 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7104
7105 /*
7106 * If rm is denoting a register, no more instruction bytes.
7107 */
7108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7109 {
7110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7111
7112 switch (pVCpu->iem.s.enmEffOpSize)
7113 {
7114 case IEMMODE_16BIT:
7115 IEM_MC_BEGIN(3, 0);
7116 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7117 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7118 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7119
7120 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7121 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7122 IEM_MC_REF_EFLAGS(pEFlags);
7123 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7124
7125 IEM_MC_ADVANCE_RIP();
7126 IEM_MC_END();
7127 return VINF_SUCCESS;
7128
7129 case IEMMODE_32BIT:
7130 IEM_MC_BEGIN(3, 0);
7131 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7132 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7133 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7134
7135 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7136 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7137 IEM_MC_REF_EFLAGS(pEFlags);
7138 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7139
7140 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7141 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7142 IEM_MC_ADVANCE_RIP();
7143 IEM_MC_END();
7144 return VINF_SUCCESS;
7145
7146 case IEMMODE_64BIT:
7147 IEM_MC_BEGIN(3, 0);
7148 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7149 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7150 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7151
7152 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7153 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7154 IEM_MC_REF_EFLAGS(pEFlags);
7155 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7156
7157 IEM_MC_ADVANCE_RIP();
7158 IEM_MC_END();
7159 return VINF_SUCCESS;
7160
7161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7162 }
7163 }
7164 else
7165 {
7166 /*
7167 * We're accessing memory.
7168 */
7169 switch (pVCpu->iem.s.enmEffOpSize)
7170 {
7171 case IEMMODE_16BIT:
7172 IEM_MC_BEGIN(3, 3);
7173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7174 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7175 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7176 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7178
7179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7180 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7181 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7182 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7183 IEM_MC_FETCH_EFLAGS(EFlags);
7184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7185 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7186 else
7187 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7188
7189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7190 IEM_MC_COMMIT_EFLAGS(EFlags);
7191 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7192 IEM_MC_ADVANCE_RIP();
7193 IEM_MC_END();
7194 return VINF_SUCCESS;
7195
7196 case IEMMODE_32BIT:
7197 IEM_MC_BEGIN(3, 3);
7198 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7199 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7200 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7201 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7203
7204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7205 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7206 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7207 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7208 IEM_MC_FETCH_EFLAGS(EFlags);
7209 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7210 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7211 else
7212 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7213
7214 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7215 IEM_MC_COMMIT_EFLAGS(EFlags);
7216 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7217 IEM_MC_ADVANCE_RIP();
7218 IEM_MC_END();
7219 return VINF_SUCCESS;
7220
7221 case IEMMODE_64BIT:
7222 IEM_MC_BEGIN(3, 3);
7223 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7224 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7225 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7226 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7228
7229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7230 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7231 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7232 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7233 IEM_MC_FETCH_EFLAGS(EFlags);
7234 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7235 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7236 else
7237 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7238
7239 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7240 IEM_MC_COMMIT_EFLAGS(EFlags);
7241 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7242 IEM_MC_ADVANCE_RIP();
7243 IEM_MC_END();
7244 return VINF_SUCCESS;
7245
7246 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7247 }
7248 }
7249}
7250
7251
7252/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
7253FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
7254/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
7255FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
7256/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
7257FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
7258/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
7259FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
7260
7261
7262/** Opcode 0x0f 0xc3. */
7263FNIEMOP_DEF(iemOp_movnti_My_Gy)
7264{
7265 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7266
7267 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7268
7269 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7270 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7271 {
7272 switch (pVCpu->iem.s.enmEffOpSize)
7273 {
7274 case IEMMODE_32BIT:
7275 IEM_MC_BEGIN(0, 2);
7276 IEM_MC_LOCAL(uint32_t, u32Value);
7277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7278
7279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7281 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7282 return IEMOP_RAISE_INVALID_OPCODE();
7283
7284 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7285 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7286 IEM_MC_ADVANCE_RIP();
7287 IEM_MC_END();
7288 break;
7289
7290 case IEMMODE_64BIT:
7291 IEM_MC_BEGIN(0, 2);
7292 IEM_MC_LOCAL(uint64_t, u64Value);
7293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7294
7295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7297 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7298 return IEMOP_RAISE_INVALID_OPCODE();
7299
7300 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7301 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7302 IEM_MC_ADVANCE_RIP();
7303 IEM_MC_END();
7304 break;
7305
7306 case IEMMODE_16BIT:
7307 /** @todo check this form. */
7308 return IEMOP_RAISE_INVALID_OPCODE();
7309 }
7310 }
7311 else
7312 return IEMOP_RAISE_INVALID_OPCODE();
7313 return VINF_SUCCESS;
7314}
7315/* Opcode 0x66 0x0f 0xc3 - invalid */
7316/* Opcode 0xf3 0x0f 0xc3 - invalid */
7317/* Opcode 0xf2 0x0f 0xc3 - invalid */
7318
7319/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
7320FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7321/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
7322FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
7323/* Opcode 0xf3 0x0f 0xc4 - invalid */
7324/* Opcode 0xf2 0x0f 0xc4 - invalid */
7325
7326/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7327FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7328/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
7329FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
7330/* Opcode 0xf3 0x0f 0xc5 - invalid */
7331/* Opcode 0xf2 0x0f 0xc5 - invalid */
7332
7333/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
7334FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
7335/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
7336FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
7337/* Opcode 0xf3 0x0f 0xc6 - invalid */
7338/* Opcode 0xf2 0x0f 0xc6 - invalid */
7339
7340
7341/** Opcode 0x0f 0xc7 !11/1. */
7342FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7343{
7344 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7345
7346 IEM_MC_BEGIN(4, 3);
7347 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7348 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7349 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7350 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7351 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7352 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7354
7355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7356 IEMOP_HLP_DONE_DECODING();
7357 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7358
7359 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7360 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7361 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7362
7363 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7364 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7365 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7366
7367 IEM_MC_FETCH_EFLAGS(EFlags);
7368 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7369 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7370 else
7371 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7372
7373 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7374 IEM_MC_COMMIT_EFLAGS(EFlags);
7375 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7376 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7377 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7378 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7379 IEM_MC_ENDIF();
7380 IEM_MC_ADVANCE_RIP();
7381
7382 IEM_MC_END();
7383 return VINF_SUCCESS;
7384}
7385
7386
7387/** Opcode REX.W 0x0f 0xc7 !11/1. */
7388FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7389{
7390 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7391 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7392 {
7393#if 0
7394 RT_NOREF(bRm);
7395 IEMOP_BITCH_ABOUT_STUB();
7396 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7397#else
7398 IEM_MC_BEGIN(4, 3);
7399 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7400 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7401 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7402 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7403 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7404 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7406
7407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7408 IEMOP_HLP_DONE_DECODING();
7409 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7410 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7411
7412 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7413 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7414 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7415
7416 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7417 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7418 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7419
7420 IEM_MC_FETCH_EFLAGS(EFlags);
7421# ifdef RT_ARCH_AMD64
7422 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7423 {
7424 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7425 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7426 else
7427 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7428 }
7429 else
7430# endif
7431 {
7432 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7433 accesses and not all all atomic, which works fine on in UNI CPU guest
7434 configuration (ignoring DMA). If guest SMP is active we have no choice
7435 but to use a rendezvous callback here. Sigh. */
7436 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7437 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7438 else
7439 {
7440 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7441 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7442 }
7443 }
7444
7445 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7446 IEM_MC_COMMIT_EFLAGS(EFlags);
7447 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7448 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7449 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7450 IEM_MC_ENDIF();
7451 IEM_MC_ADVANCE_RIP();
7452
7453 IEM_MC_END();
7454 return VINF_SUCCESS;
7455#endif
7456 }
7457 Log(("cmpxchg16b -> #UD\n"));
7458 return IEMOP_RAISE_INVALID_OPCODE();
7459}
7460
7461FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
7462{
7463 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7464 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7465 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7466}
7467
7468/** Opcode 0x0f 0xc7 11/6. */
7469FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7470
7471/** Opcode 0x0f 0xc7 !11/6. */
7472FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7473
7474/** Opcode 0x66 0x0f 0xc7 !11/6. */
7475FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7476
7477/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7478FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7479
7480/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7481FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7482
7483/** Opcode 0x0f 0xc7 11/7. */
7484FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
7485
7486
7487/**
7488 * Group 9 jump table for register variant.
7489 */
7490IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
7491{ /* pfx: none, 066h, 0f3h, 0f2h */
7492 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7493 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
7494 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7495 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7496 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7497 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7498 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7499 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7500};
7501AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
7502
7503
7504/**
7505 * Group 9 jump table for memory variant.
7506 */
7507IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
7508{ /* pfx: none, 066h, 0f3h, 0f2h */
7509 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7510 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
7511 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7512 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7513 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7514 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7515 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
7516 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7517};
7518AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
7519
7520
7521/** Opcode 0x0f 0xc7. */
7522FNIEMOP_DEF(iemOp_Grp9)
7523{
7524 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7525 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7526 /* register, register */
7527 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7528 + pVCpu->iem.s.idxPrefix], bRm);
7529 /* memory, register */
7530 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7531 + pVCpu->iem.s.idxPrefix], bRm);
7532}
7533
7534
7535/**
7536 * Common 'bswap register' helper.
7537 */
7538FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7539{
7540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7541 switch (pVCpu->iem.s.enmEffOpSize)
7542 {
7543 case IEMMODE_16BIT:
7544 IEM_MC_BEGIN(1, 0);
7545 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7546 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7547 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7548 IEM_MC_ADVANCE_RIP();
7549 IEM_MC_END();
7550 return VINF_SUCCESS;
7551
7552 case IEMMODE_32BIT:
7553 IEM_MC_BEGIN(1, 0);
7554 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7555 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7556 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7557 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7558 IEM_MC_ADVANCE_RIP();
7559 IEM_MC_END();
7560 return VINF_SUCCESS;
7561
7562 case IEMMODE_64BIT:
7563 IEM_MC_BEGIN(1, 0);
7564 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7565 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7566 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7567 IEM_MC_ADVANCE_RIP();
7568 IEM_MC_END();
7569 return VINF_SUCCESS;
7570
7571 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7572 }
7573}
7574
7575
7576/** Opcode 0x0f 0xc8. */
7577FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7578{
7579 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7580 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7581 prefix. REX.B is the correct prefix it appears. For a parallel
7582 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7583 IEMOP_HLP_MIN_486();
7584 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7585}
7586
7587
7588/** Opcode 0x0f 0xc9. */
7589FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7590{
7591 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7592 IEMOP_HLP_MIN_486();
7593 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7594}
7595
7596
7597/** Opcode 0x0f 0xca. */
7598FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7599{
7600 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7601 IEMOP_HLP_MIN_486();
7602 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7603}
7604
7605
7606/** Opcode 0x0f 0xcb. */
7607FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7608{
7609 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7610 IEMOP_HLP_MIN_486();
7611 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7612}
7613
7614
7615/** Opcode 0x0f 0xcc. */
7616FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7617{
7618 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7619 IEMOP_HLP_MIN_486();
7620 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7621}
7622
7623
7624/** Opcode 0x0f 0xcd. */
7625FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7626{
7627 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7628 IEMOP_HLP_MIN_486();
7629 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7630}
7631
7632
7633/** Opcode 0x0f 0xce. */
7634FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7635{
7636 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7637 IEMOP_HLP_MIN_486();
7638 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7639}
7640
7641
7642/** Opcode 0x0f 0xcf. */
7643FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7644{
7645 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7646 IEMOP_HLP_MIN_486();
7647 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7648}
7649
7650
7651/* Opcode 0x0f 0xd0 - invalid */
7652/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
7653FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
7654/* Opcode 0xf3 0x0f 0xd0 - invalid */
7655/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
7656FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
7657
7658/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7659FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7660/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
7661FNIEMOP_STUB(iemOp_psrlw_Vx_W);
7662/* Opcode 0xf3 0x0f 0xd1 - invalid */
7663/* Opcode 0xf2 0x0f 0xd1 - invalid */
7664
7665/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7666FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7667/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
7668FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
7669/* Opcode 0xf3 0x0f 0xd2 - invalid */
7670/* Opcode 0xf2 0x0f 0xd2 - invalid */
7671
7672/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7673FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7674/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
7675FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
7676/* Opcode 0xf3 0x0f 0xd3 - invalid */
7677/* Opcode 0xf2 0x0f 0xd3 - invalid */
7678
7679/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7680FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7681/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
7682FNIEMOP_STUB(iemOp_paddq_Vx_W);
7683/* Opcode 0xf3 0x0f 0xd4 - invalid */
7684/* Opcode 0xf2 0x0f 0xd4 - invalid */
7685
7686/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7687FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7688/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
7689FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
7690/* Opcode 0xf3 0x0f 0xd5 - invalid */
7691/* Opcode 0xf2 0x0f 0xd5 - invalid */
7692
7693/* Opcode 0x0f 0xd6 - invalid */
7694
7695/**
7696 * @opcode 0xd6
7697 * @oppfx 0x66
7698 * @opcpuid sse2
7699 * @opgroup og_sse2_pcksclr_datamove
7700 * @opxcpttype none
7701 * @optest op1=-1 op2=2 -> op1=2
7702 * @optest op1=0 op2=-42 -> op1=-42
7703 */
7704FNIEMOP_DEF(iemOp_movq_Wq_Vq)
7705{
7706 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
7707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7708 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7709 {
7710 /*
7711 * Register, register.
7712 */
7713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7714 IEM_MC_BEGIN(0, 2);
7715 IEM_MC_LOCAL(uint64_t, uSrc);
7716
7717 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7718 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7719
7720 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7721 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
7722
7723 IEM_MC_ADVANCE_RIP();
7724 IEM_MC_END();
7725 }
7726 else
7727 {
7728 /*
7729 * Memory, register.
7730 */
7731 IEM_MC_BEGIN(0, 2);
7732 IEM_MC_LOCAL(uint64_t, uSrc);
7733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7734
7735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7737 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7738 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7739
7740 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7741 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7742
7743 IEM_MC_ADVANCE_RIP();
7744 IEM_MC_END();
7745 }
7746 return VINF_SUCCESS;
7747}
7748
7749
7750/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7751FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7752/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7753FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7754#if 0
7755FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7756{
7757 /* Docs says register only. */
7758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7759
7760 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7761 {
7762 case IEM_OP_PRF_SIZE_OP: /* SSE */
7763 I E M O P _ M N E M O N I C(movq_Wq_Vq, "movq Wq,Vq");
7764 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7765 IEM_MC_BEGIN(2, 0);
7766 IEM_MC_ARG(uint64_t *, pDst, 0);
7767 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
7768 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7769 IEM_MC_PREPARE_SSE_USAGE();
7770 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7771 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7772 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7773 IEM_MC_ADVANCE_RIP();
7774 IEM_MC_END();
7775 return VINF_SUCCESS;
7776
7777 case 0: /* MMX */
7778 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7779 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7780 IEM_MC_BEGIN(2, 0);
7781 IEM_MC_ARG(uint64_t *, pDst, 0);
7782 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7783 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7784 IEM_MC_PREPARE_FPU_USAGE();
7785 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7786 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7787 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7788 IEM_MC_ADVANCE_RIP();
7789 IEM_MC_END();
7790 return VINF_SUCCESS;
7791
7792 default:
7793 return IEMOP_RAISE_INVALID_OPCODE();
7794 }
7795}
7796#endif
7797
7798
7799/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7800FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7801{
7802 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7803 /** @todo testcase: Check that the instruction implicitly clears the high
7804 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7805 * and opcode modifications are made to work with the whole width (not
7806 * just 128). */
7807 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
7808 /* Docs says register only. */
7809 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7810 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7811 {
7812 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7813 IEM_MC_BEGIN(2, 0);
7814 IEM_MC_ARG(uint64_t *, pDst, 0);
7815 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7816 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7817 IEM_MC_PREPARE_FPU_USAGE();
7818 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7819 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7820 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7821 IEM_MC_ADVANCE_RIP();
7822 IEM_MC_END();
7823 return VINF_SUCCESS;
7824 }
7825 return IEMOP_RAISE_INVALID_OPCODE();
7826}
7827
7828/** Opcode 0x66 0x0f 0xd7 - */
7829FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
7830{
7831 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7832 /** @todo testcase: Check that the instruction implicitly clears the high
7833 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7834 * and opcode modifications are made to work with the whole width (not
7835 * just 128). */
7836 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
7837 /* Docs says register only. */
7838 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7839 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7840 {
7841 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7842 IEM_MC_BEGIN(2, 0);
7843 IEM_MC_ARG(uint64_t *, pDst, 0);
7844 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
7845 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7846 IEM_MC_PREPARE_SSE_USAGE();
7847 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7848 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7849 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7850 IEM_MC_ADVANCE_RIP();
7851 IEM_MC_END();
7852 return VINF_SUCCESS;
7853 }
7854 return IEMOP_RAISE_INVALID_OPCODE();
7855}
7856
7857/* Opcode 0xf3 0x0f 0xd7 - invalid */
7858/* Opcode 0xf2 0x0f 0xd7 - invalid */
7859
7860
7861/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7862FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7863/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
7864FNIEMOP_STUB(iemOp_psubusb_Vx_W);
7865/* Opcode 0xf3 0x0f 0xd8 - invalid */
7866/* Opcode 0xf2 0x0f 0xd8 - invalid */
7867
7868/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7869FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7870/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
7871FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
7872/* Opcode 0xf3 0x0f 0xd9 - invalid */
7873/* Opcode 0xf2 0x0f 0xd9 - invalid */
7874
7875/** Opcode 0x0f 0xda - pminub Pq, Qq */
7876FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7877/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
7878FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
7879/* Opcode 0xf3 0x0f 0xda - invalid */
7880/* Opcode 0xf2 0x0f 0xda - invalid */
7881
7882/** Opcode 0x0f 0xdb - pand Pq, Qq */
7883FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7884/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
7885FNIEMOP_STUB(iemOp_pand_Vx_W);
7886/* Opcode 0xf3 0x0f 0xdb - invalid */
7887/* Opcode 0xf2 0x0f 0xdb - invalid */
7888
7889/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7890FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7891/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
7892FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
7893/* Opcode 0xf3 0x0f 0xdc - invalid */
7894/* Opcode 0xf2 0x0f 0xdc - invalid */
7895
7896/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7897FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7898/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
7899FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
7900/* Opcode 0xf3 0x0f 0xdd - invalid */
7901/* Opcode 0xf2 0x0f 0xdd - invalid */
7902
7903/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7904FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7905/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
7906FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
7907/* Opcode 0xf3 0x0f 0xde - invalid */
7908/* Opcode 0xf2 0x0f 0xde - invalid */
7909
7910/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7911FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7912/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
7913FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
7914/* Opcode 0xf3 0x0f 0xdf - invalid */
7915/* Opcode 0xf2 0x0f 0xdf - invalid */
7916
7917/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7918FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7919/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
7920FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
7921/* Opcode 0xf3 0x0f 0xe0 - invalid */
7922/* Opcode 0xf2 0x0f 0xe0 - invalid */
7923
7924/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7925FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7926/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
7927FNIEMOP_STUB(iemOp_psraw_Vx_W);
7928/* Opcode 0xf3 0x0f 0xe1 - invalid */
7929/* Opcode 0xf2 0x0f 0xe1 - invalid */
7930
7931/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7932FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7933/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
7934FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
7935/* Opcode 0xf3 0x0f 0xe2 - invalid */
7936/* Opcode 0xf2 0x0f 0xe2 - invalid */
7937
7938/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7939FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7940/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
7941FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
7942/* Opcode 0xf3 0x0f 0xe3 - invalid */
7943/* Opcode 0xf2 0x0f 0xe3 - invalid */
7944
7945/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7946FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7947/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
7948FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
7949/* Opcode 0xf3 0x0f 0xe4 - invalid */
7950/* Opcode 0xf2 0x0f 0xe4 - invalid */
7951
7952/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7953FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7954/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
7955FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
7956/* Opcode 0xf3 0x0f 0xe5 - invalid */
7957/* Opcode 0xf2 0x0f 0xe5 - invalid */
7958
7959/* Opcode 0x0f 0xe6 - invalid */
7960/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
7961FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
7962/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
7963FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
7964/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
7965FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
7966
7967
7968/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
7969FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
7970{
7971 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7972 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7973 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7974 {
7975 /* Register, memory. */
7976 IEM_MC_BEGIN(0, 2);
7977 IEM_MC_LOCAL(uint64_t, uSrc);
7978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7979
7980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7982 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7983 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7984
7985 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7986 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7987
7988 IEM_MC_ADVANCE_RIP();
7989 IEM_MC_END();
7990 return VINF_SUCCESS;
7991 }
7992 /* The register, register encoding is invalid. */
7993 return IEMOP_RAISE_INVALID_OPCODE();
7994}
7995
7996/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
7997FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
7998{
7999 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8000 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8001 {
8002 /* Register, memory. */
8003 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
8004 IEM_MC_BEGIN(0, 2);
8005 IEM_MC_LOCAL(RTUINT128U, uSrc);
8006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8007
8008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8010 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8011 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8012
8013 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8014 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8015
8016 IEM_MC_ADVANCE_RIP();
8017 IEM_MC_END();
8018 return VINF_SUCCESS;
8019 }
8020
8021 /* The register, register encoding is invalid. */
8022 return IEMOP_RAISE_INVALID_OPCODE();
8023}
8024
8025/* Opcode 0xf3 0x0f 0xe7 - invalid */
8026/* Opcode 0xf2 0x0f 0xe7 - invalid */
8027
8028
8029/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8030FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8031/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8032FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8033/* Opcode 0xf3 0x0f 0xe8 - invalid */
8034/* Opcode 0xf2 0x0f 0xe8 - invalid */
8035
8036/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8037FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8038/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8039FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8040/* Opcode 0xf3 0x0f 0xe9 - invalid */
8041/* Opcode 0xf2 0x0f 0xe9 - invalid */
8042
8043/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8044FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8045/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8046FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8047/* Opcode 0xf3 0x0f 0xea - invalid */
8048/* Opcode 0xf2 0x0f 0xea - invalid */
8049
8050/** Opcode 0x0f 0xeb - por Pq, Qq */
8051FNIEMOP_STUB(iemOp_por_Pq_Qq);
8052/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8053FNIEMOP_STUB(iemOp_por_Vx_W);
8054/* Opcode 0xf3 0x0f 0xeb - invalid */
8055/* Opcode 0xf2 0x0f 0xeb - invalid */
8056
8057/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8058FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8059/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8060FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8061/* Opcode 0xf3 0x0f 0xec - invalid */
8062/* Opcode 0xf2 0x0f 0xec - invalid */
8063
8064/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8065FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8066/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8067FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8068/* Opcode 0xf3 0x0f 0xed - invalid */
8069/* Opcode 0xf2 0x0f 0xed - invalid */
8070
8071/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8072FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8073/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8074FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8075/* Opcode 0xf3 0x0f 0xee - invalid */
8076/* Opcode 0xf2 0x0f 0xee - invalid */
8077
8078
8079/** Opcode 0x0f 0xef - pxor Pq, Qq */
8080FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8081{
8082 IEMOP_MNEMONIC(pxor, "pxor");
8083 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8084}
8085
8086/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8087FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8088{
8089 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8090 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8091}
8092
8093/* Opcode 0xf3 0x0f 0xef - invalid */
8094/* Opcode 0xf2 0x0f 0xef - invalid */
8095
8096/* Opcode 0x0f 0xf0 - invalid */
8097/* Opcode 0x66 0x0f 0xf0 - invalid */
8098/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8099FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8100
8101/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8102FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8103/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8104FNIEMOP_STUB(iemOp_psllw_Vx_W);
8105/* Opcode 0xf2 0x0f 0xf1 - invalid */
8106
8107/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8108FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8109/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8110FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8111/* Opcode 0xf2 0x0f 0xf2 - invalid */
8112
8113/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8114FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8115/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8116FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8117/* Opcode 0xf2 0x0f 0xf3 - invalid */
8118
8119/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8120FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8121/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8122FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8123/* Opcode 0xf2 0x0f 0xf4 - invalid */
8124
8125/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8126FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8127/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
8128FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
8129/* Opcode 0xf2 0x0f 0xf5 - invalid */
8130
8131/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8132FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8133/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
8134FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
8135/* Opcode 0xf2 0x0f 0xf6 - invalid */
8136
8137/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8138FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8139/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
8140FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
8141/* Opcode 0xf2 0x0f 0xf7 - invalid */
8142
8143/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8144FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8145/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
8146FNIEMOP_STUB(iemOp_psubb_Vx_W);
8147/* Opcode 0xf2 0x0f 0xf8 - invalid */
8148
8149/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8150FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8151/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
8152FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
8153/* Opcode 0xf2 0x0f 0xf9 - invalid */
8154
8155/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8156FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8157/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
8158FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
8159/* Opcode 0xf2 0x0f 0xfa - invalid */
8160
8161/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8162FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8163/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
8164FNIEMOP_STUB(iemOp_psubq_Vx_W);
8165/* Opcode 0xf2 0x0f 0xfb - invalid */
8166
8167/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8168FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8169/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
8170FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
8171/* Opcode 0xf2 0x0f 0xfc - invalid */
8172
8173/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8174FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8175/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
8176FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
8177/* Opcode 0xf2 0x0f 0xfd - invalid */
8178
8179/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8180FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8181/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
8182FNIEMOP_STUB(iemOp_paddd_Vx_W);
8183/* Opcode 0xf2 0x0f 0xfe - invalid */
8184
8185
8186/** Opcode **** 0x0f 0xff - UD0 */
8187FNIEMOP_DEF(iemOp_ud0)
8188{
8189 IEMOP_MNEMONIC(ud0, "ud0");
8190 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8191 {
8192 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8193#ifndef TST_IEM_CHECK_MC
8194 RTGCPTR GCPtrEff;
8195 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8196 if (rcStrict != VINF_SUCCESS)
8197 return rcStrict;
8198#endif
8199 IEMOP_HLP_DONE_DECODING();
8200 }
8201 return IEMOP_RAISE_INVALID_OPCODE();
8202}
8203
8204
8205
8206/**
8207 * Two byte opcode map, first byte 0x0f.
8208 *
8209 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
8210 * check if it needs updating as well when making changes.
8211 */
8212IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8213{
8214 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8215 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8216 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8217 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8218 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8219 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8220 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8221 /* 0x06 */ IEMOP_X4(iemOp_clts),
8222 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8223 /* 0x08 */ IEMOP_X4(iemOp_invd),
8224 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8225 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8226 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8227 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8228 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8229 /* 0x0e */ IEMOP_X4(iemOp_femms),
8230 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8231
8232 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vx_Wsd,
8233 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
8234 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
8235 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8236 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8237 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8238 /* 0x16 */ iemOp_movhpsv1_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpdv1_Vdq_Mq, iemOp_movshdup_Vx_Wx, iemOp_InvalidNeedRM,
8239 /* 0x17 */ iemOp_movhpsv1_Mq_Vq, iemOp_movhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8240 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8241 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8242 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8243 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8244 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8245 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8246 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8247 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8248
8249 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8250 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8251 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8252 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8253 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8254 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8255 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8256 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8257 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8258 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8259 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
8260 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8261 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
8262 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
8263 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8264 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8265
8266 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8267 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8268 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8269 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8270 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8271 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8272 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8273 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8274 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
8275 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8276 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
8277 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8278 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8279 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8280 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8281 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8282
8283 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8284 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8285 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8286 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8287 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8288 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8289 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8290 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8291 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8292 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8293 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8294 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8295 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8296 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8297 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8298 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8299
8300 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8301 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
8302 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
8303 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
8304 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8305 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8306 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8307 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8308 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
8309 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
8310 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
8311 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8312 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
8313 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
8314 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
8315 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
8316
8317 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8318 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8319 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8320 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8321 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8322 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8323 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8324 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8325 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8326 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8327 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8328 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8329 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8330 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8331 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8332 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vx_Wx, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM,
8333
8334 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
8335 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8336 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8337 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8338 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8339 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8340 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8341 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8342
8343 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8344 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8345 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8346 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8347 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
8348 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
8349 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
8350 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
8351
8352 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8353 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8354 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8355 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8356 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8357 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8358 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8359 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8360 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8361 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8362 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8363 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8364 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8365 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8366 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8367 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8368
8369 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8370 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8371 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8372 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8373 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8374 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8375 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8376 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8377 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8378 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8379 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8380 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8381 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8382 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8383 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8384 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8385
8386 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8387 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8388 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8389 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8390 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8391 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8392 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8393 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8394 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8395 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8396 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8397 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8398 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8399 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8400 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8401 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8402
8403 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8404 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8405 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8406 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8407 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8408 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8409 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8410 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8411 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8412 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8413 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8414 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8415 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8416 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8417 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8418 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8419
8420 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8421 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8422 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
8423 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8424 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8425 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8426 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8427 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8428 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8429 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8430 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8431 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8432 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8433 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8434 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8435 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8436
8437 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
8438 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8439 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8440 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8441 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8442 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8443 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8444 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8445 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8446 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8447 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8448 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8449 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8450 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8451 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8452 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8453
8454 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8455 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8456 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8457 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8458 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8459 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8460 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
8461 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8462 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8463 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8464 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8465 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8466 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8467 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8468 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8469 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8470
8471 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
8472 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8473 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8474 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8475 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8476 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8477 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8478 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8479 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8480 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8481 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8482 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8483 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8484 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8485 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8486 /* 0xff */ IEMOP_X4(iemOp_ud0),
8487};
8488AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8489
8490/** @} */
8491
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette