VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66471

Last change on this file since 66471 was 66471, checked in by vboxsync, 8 years ago

IEM: Split the 3DNow! instruction out into a separate file.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 302.9 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66471 2017-04-07 09:48:47Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.215389.xyz. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 switch (pVCpu->iem.s.enmEffOpSize)
38 {
39 case IEMMODE_16BIT:
40 IEM_MC_BEGIN(0, 1);
41 IEM_MC_LOCAL(uint16_t, u16Ldtr);
42 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
43 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
44 IEM_MC_ADVANCE_RIP();
45 IEM_MC_END();
46 break;
47
48 case IEMMODE_32BIT:
49 IEM_MC_BEGIN(0, 1);
50 IEM_MC_LOCAL(uint32_t, u32Ldtr);
51 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
52 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
53 IEM_MC_ADVANCE_RIP();
54 IEM_MC_END();
55 break;
56
57 case IEMMODE_64BIT:
58 IEM_MC_BEGIN(0, 1);
59 IEM_MC_LOCAL(uint64_t, u64Ldtr);
60 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
61 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
62 IEM_MC_ADVANCE_RIP();
63 IEM_MC_END();
64 break;
65
66 IEM_NOT_REACHED_DEFAULT_CASE_RET();
67 }
68 }
69 else
70 {
71 IEM_MC_BEGIN(0, 2);
72 IEM_MC_LOCAL(uint16_t, u16Ldtr);
73 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
76 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
77 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
78 IEM_MC_ADVANCE_RIP();
79 IEM_MC_END();
80 }
81 return VINF_SUCCESS;
82}
83
84
85/** Opcode 0x0f 0x00 /1. */
86FNIEMOPRM_DEF(iemOp_Grp6_str)
87{
88 IEMOP_MNEMONIC(str, "str Rv/Mw");
89 IEMOP_HLP_MIN_286();
90 IEMOP_HLP_NO_REAL_OR_V86_MODE();
91
92 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
93 {
94 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
95 switch (pVCpu->iem.s.enmEffOpSize)
96 {
97 case IEMMODE_16BIT:
98 IEM_MC_BEGIN(0, 1);
99 IEM_MC_LOCAL(uint16_t, u16Tr);
100 IEM_MC_FETCH_TR_U16(u16Tr);
101 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
102 IEM_MC_ADVANCE_RIP();
103 IEM_MC_END();
104 break;
105
106 case IEMMODE_32BIT:
107 IEM_MC_BEGIN(0, 1);
108 IEM_MC_LOCAL(uint32_t, u32Tr);
109 IEM_MC_FETCH_TR_U32(u32Tr);
110 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
111 IEM_MC_ADVANCE_RIP();
112 IEM_MC_END();
113 break;
114
115 case IEMMODE_64BIT:
116 IEM_MC_BEGIN(0, 1);
117 IEM_MC_LOCAL(uint64_t, u64Tr);
118 IEM_MC_FETCH_TR_U64(u64Tr);
119 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
120 IEM_MC_ADVANCE_RIP();
121 IEM_MC_END();
122 break;
123
124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
125 }
126 }
127 else
128 {
129 IEM_MC_BEGIN(0, 2);
130 IEM_MC_LOCAL(uint16_t, u16Tr);
131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
133 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
134 IEM_MC_FETCH_TR_U16(u16Tr);
135 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
136 IEM_MC_ADVANCE_RIP();
137 IEM_MC_END();
138 }
139 return VINF_SUCCESS;
140}
141
142
143/** Opcode 0x0f 0x00 /2. */
144FNIEMOPRM_DEF(iemOp_Grp6_lldt)
145{
146 IEMOP_MNEMONIC(lldt, "lldt Ew");
147 IEMOP_HLP_MIN_286();
148 IEMOP_HLP_NO_REAL_OR_V86_MODE();
149
150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
151 {
152 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
153 IEM_MC_BEGIN(1, 0);
154 IEM_MC_ARG(uint16_t, u16Sel, 0);
155 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
156 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
157 IEM_MC_END();
158 }
159 else
160 {
161 IEM_MC_BEGIN(1, 1);
162 IEM_MC_ARG(uint16_t, u16Sel, 0);
163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
165 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
166 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
167 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
168 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
169 IEM_MC_END();
170 }
171 return VINF_SUCCESS;
172}
173
174
175/** Opcode 0x0f 0x00 /3. */
176FNIEMOPRM_DEF(iemOp_Grp6_ltr)
177{
178 IEMOP_MNEMONIC(ltr, "ltr Ew");
179 IEMOP_HLP_MIN_286();
180 IEMOP_HLP_NO_REAL_OR_V86_MODE();
181
182 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
183 {
184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
185 IEM_MC_BEGIN(1, 0);
186 IEM_MC_ARG(uint16_t, u16Sel, 0);
187 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
188 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
189 IEM_MC_END();
190 }
191 else
192 {
193 IEM_MC_BEGIN(1, 1);
194 IEM_MC_ARG(uint16_t, u16Sel, 0);
195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
198 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
199 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
200 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
201 IEM_MC_END();
202 }
203 return VINF_SUCCESS;
204}
205
206
207/** Opcode 0x0f 0x00 /3. */
208FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
209{
210 IEMOP_HLP_MIN_286();
211 IEMOP_HLP_NO_REAL_OR_V86_MODE();
212
213 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
214 {
215 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
216 IEM_MC_BEGIN(2, 0);
217 IEM_MC_ARG(uint16_t, u16Sel, 0);
218 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
219 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
220 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
221 IEM_MC_END();
222 }
223 else
224 {
225 IEM_MC_BEGIN(2, 1);
226 IEM_MC_ARG(uint16_t, u16Sel, 0);
227 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
230 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
231 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
232 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
233 IEM_MC_END();
234 }
235 return VINF_SUCCESS;
236}
237
238
239/** Opcode 0x0f 0x00 /4. */
240FNIEMOPRM_DEF(iemOp_Grp6_verr)
241{
242 IEMOP_MNEMONIC(verr, "verr Ew");
243 IEMOP_HLP_MIN_286();
244 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
245}
246
247
248/** Opcode 0x0f 0x00 /5. */
249FNIEMOPRM_DEF(iemOp_Grp6_verw)
250{
251 IEMOP_MNEMONIC(verw, "verw Ew");
252 IEMOP_HLP_MIN_286();
253 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
254}
255
256
257/**
258 * Group 6 jump table.
259 */
260IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
261{
262 iemOp_Grp6_sldt,
263 iemOp_Grp6_str,
264 iemOp_Grp6_lldt,
265 iemOp_Grp6_ltr,
266 iemOp_Grp6_verr,
267 iemOp_Grp6_verw,
268 iemOp_InvalidWithRM,
269 iemOp_InvalidWithRM
270};
271
272/** Opcode 0x0f 0x00. */
273FNIEMOP_DEF(iemOp_Grp6)
274{
275 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
276 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
277}
278
279
280/** Opcode 0x0f 0x01 /0. */
281FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
282{
283 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
284 IEMOP_HLP_MIN_286();
285 IEMOP_HLP_64BIT_OP_SIZE();
286 IEM_MC_BEGIN(2, 1);
287 IEM_MC_ARG(uint8_t, iEffSeg, 0);
288 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
291 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
292 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
293 IEM_MC_END();
294 return VINF_SUCCESS;
295}
296
297
298/** Opcode 0x0f 0x01 /0. */
299FNIEMOP_DEF(iemOp_Grp7_vmcall)
300{
301 IEMOP_BITCH_ABOUT_STUB();
302 return IEMOP_RAISE_INVALID_OPCODE();
303}
304
305
306/** Opcode 0x0f 0x01 /0. */
307FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
308{
309 IEMOP_BITCH_ABOUT_STUB();
310 return IEMOP_RAISE_INVALID_OPCODE();
311}
312
313
314/** Opcode 0x0f 0x01 /0. */
315FNIEMOP_DEF(iemOp_Grp7_vmresume)
316{
317 IEMOP_BITCH_ABOUT_STUB();
318 return IEMOP_RAISE_INVALID_OPCODE();
319}
320
321
322/** Opcode 0x0f 0x01 /0. */
323FNIEMOP_DEF(iemOp_Grp7_vmxoff)
324{
325 IEMOP_BITCH_ABOUT_STUB();
326 return IEMOP_RAISE_INVALID_OPCODE();
327}
328
329
330/** Opcode 0x0f 0x01 /1. */
331FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
332{
333 IEMOP_MNEMONIC(sidt, "sidt Ms");
334 IEMOP_HLP_MIN_286();
335 IEMOP_HLP_64BIT_OP_SIZE();
336 IEM_MC_BEGIN(2, 1);
337 IEM_MC_ARG(uint8_t, iEffSeg, 0);
338 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
341 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
342 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
343 IEM_MC_END();
344 return VINF_SUCCESS;
345}
346
347
348/** Opcode 0x0f 0x01 /1. */
349FNIEMOP_DEF(iemOp_Grp7_monitor)
350{
351 IEMOP_MNEMONIC(monitor, "monitor");
352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
353 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
354}
355
356
357/** Opcode 0x0f 0x01 /1. */
358FNIEMOP_DEF(iemOp_Grp7_mwait)
359{
360 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
362 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
363}
364
365
366/** Opcode 0x0f 0x01 /2. */
367FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
368{
369 IEMOP_MNEMONIC(lgdt, "lgdt");
370 IEMOP_HLP_64BIT_OP_SIZE();
371 IEM_MC_BEGIN(3, 1);
372 IEM_MC_ARG(uint8_t, iEffSeg, 0);
373 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
374 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
377 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
378 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
379 IEM_MC_END();
380 return VINF_SUCCESS;
381}
382
383
384/** Opcode 0x0f 0x01 0xd0. */
385FNIEMOP_DEF(iemOp_Grp7_xgetbv)
386{
387 IEMOP_MNEMONIC(xgetbv, "xgetbv");
388 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
389 {
390 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
391 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
392 }
393 return IEMOP_RAISE_INVALID_OPCODE();
394}
395
396
397/** Opcode 0x0f 0x01 0xd1. */
398FNIEMOP_DEF(iemOp_Grp7_xsetbv)
399{
400 IEMOP_MNEMONIC(xsetbv, "xsetbv");
401 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
402 {
403 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
404 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
405 }
406 return IEMOP_RAISE_INVALID_OPCODE();
407}
408
409
410/** Opcode 0x0f 0x01 /3. */
411FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
412{
413 IEMOP_MNEMONIC(lidt, "lidt");
414 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
415 ? IEMMODE_64BIT
416 : pVCpu->iem.s.enmEffOpSize;
417 IEM_MC_BEGIN(3, 1);
418 IEM_MC_ARG(uint8_t, iEffSeg, 0);
419 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
420 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
423 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
424 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
425 IEM_MC_END();
426 return VINF_SUCCESS;
427}
428
429
430#ifdef VBOX_WITH_NESTED_HWVIRT
431/** Opcode 0x0f 0x01 0xd8. */
432FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
433{
434 IEMOP_MNEMONIC(vmrun, "vmrun");
435 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
436}
437
438/** Opcode 0x0f 0x01 0xd9. */
439FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
440{
441 IEMOP_MNEMONIC(vmmcall, "vmmcall");
442 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
443}
444
445
446/** Opcode 0x0f 0x01 0xda. */
447FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
448{
449 IEMOP_MNEMONIC(vmload, "vmload");
450 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
451}
452
453
454/** Opcode 0x0f 0x01 0xdb. */
455FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
456{
457 IEMOP_MNEMONIC(vmsave, "vmsave");
458 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
459}
460
461
462/** Opcode 0x0f 0x01 0xdc. */
463FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
464{
465 IEMOP_MNEMONIC(stgi, "stgi");
466 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
467}
468
469
470/** Opcode 0x0f 0x01 0xdd. */
471FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
472{
473 IEMOP_MNEMONIC(clgi, "clgi");
474 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
475}
476
477
478/** Opcode 0x0f 0x01 0xdf. */
479FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
480{
481 IEMOP_MNEMONIC(invlpga, "invlpga");
482 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
483}
484#else
485/** Opcode 0x0f 0x01 0xd8. */
486FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
487
488/** Opcode 0x0f 0x01 0xd9. */
489FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
490
491/** Opcode 0x0f 0x01 0xda. */
492FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
493
494/** Opcode 0x0f 0x01 0xdb. */
495FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
496
497/** Opcode 0x0f 0x01 0xdc. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
499
500/** Opcode 0x0f 0x01 0xdd. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
502
503/** Opcode 0x0f 0x01 0xdf. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
505#endif /* VBOX_WITH_NESTED_HWVIRT */
506
507/** Opcode 0x0f 0x01 0xde. */
508FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
509
510/** Opcode 0x0f 0x01 /4. */
511FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
512{
513 IEMOP_MNEMONIC(smsw, "smsw");
514 IEMOP_HLP_MIN_286();
515 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
516 {
517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
518 switch (pVCpu->iem.s.enmEffOpSize)
519 {
520 case IEMMODE_16BIT:
521 IEM_MC_BEGIN(0, 1);
522 IEM_MC_LOCAL(uint16_t, u16Tmp);
523 IEM_MC_FETCH_CR0_U16(u16Tmp);
524 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
525 { /* likely */ }
526 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
527 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
528 else
529 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
530 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
531 IEM_MC_ADVANCE_RIP();
532 IEM_MC_END();
533 return VINF_SUCCESS;
534
535 case IEMMODE_32BIT:
536 IEM_MC_BEGIN(0, 1);
537 IEM_MC_LOCAL(uint32_t, u32Tmp);
538 IEM_MC_FETCH_CR0_U32(u32Tmp);
539 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
540 IEM_MC_ADVANCE_RIP();
541 IEM_MC_END();
542 return VINF_SUCCESS;
543
544 case IEMMODE_64BIT:
545 IEM_MC_BEGIN(0, 1);
546 IEM_MC_LOCAL(uint64_t, u64Tmp);
547 IEM_MC_FETCH_CR0_U64(u64Tmp);
548 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
549 IEM_MC_ADVANCE_RIP();
550 IEM_MC_END();
551 return VINF_SUCCESS;
552
553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
554 }
555 }
556 else
557 {
558 /* Ignore operand size here, memory refs are always 16-bit. */
559 IEM_MC_BEGIN(0, 2);
560 IEM_MC_LOCAL(uint16_t, u16Tmp);
561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
564 IEM_MC_FETCH_CR0_U16(u16Tmp);
565 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
566 { /* likely */ }
567 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
568 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
569 else
570 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
571 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
572 IEM_MC_ADVANCE_RIP();
573 IEM_MC_END();
574 return VINF_SUCCESS;
575 }
576}
577
578
579/** Opcode 0x0f 0x01 /6. */
580FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
581{
582 /* The operand size is effectively ignored, all is 16-bit and only the
583 lower 3-bits are used. */
584 IEMOP_MNEMONIC(lmsw, "lmsw");
585 IEMOP_HLP_MIN_286();
586 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
587 {
588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
589 IEM_MC_BEGIN(1, 0);
590 IEM_MC_ARG(uint16_t, u16Tmp, 0);
591 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
592 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
593 IEM_MC_END();
594 }
595 else
596 {
597 IEM_MC_BEGIN(1, 1);
598 IEM_MC_ARG(uint16_t, u16Tmp, 0);
599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
602 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
603 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
604 IEM_MC_END();
605 }
606 return VINF_SUCCESS;
607}
608
609
610/** Opcode 0x0f 0x01 /7. */
611FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
612{
613 IEMOP_MNEMONIC(invlpg, "invlpg");
614 IEMOP_HLP_MIN_486();
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_BEGIN(1, 1);
617 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
619 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
620 IEM_MC_END();
621 return VINF_SUCCESS;
622}
623
624
625/** Opcode 0x0f 0x01 /7. */
626FNIEMOP_DEF(iemOp_Grp7_swapgs)
627{
628 IEMOP_MNEMONIC(swapgs, "swapgs");
629 IEMOP_HLP_ONLY_64BIT();
630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
631 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
632}
633
634
635/** Opcode 0x0f 0x01 /7. */
636FNIEMOP_DEF(iemOp_Grp7_rdtscp)
637{
638 NOREF(pVCpu);
639 IEMOP_BITCH_ABOUT_STUB();
640 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
641}
642
643
644/**
645 * Group 7 jump table, memory variant.
646 */
647IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
648{
649 iemOp_Grp7_sgdt,
650 iemOp_Grp7_sidt,
651 iemOp_Grp7_lgdt,
652 iemOp_Grp7_lidt,
653 iemOp_Grp7_smsw,
654 iemOp_InvalidWithRM,
655 iemOp_Grp7_lmsw,
656 iemOp_Grp7_invlpg
657};
658
659
660/** Opcode 0x0f 0x01. */
661FNIEMOP_DEF(iemOp_Grp7)
662{
663 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
664 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
665 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
666
667 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
668 {
669 case 0:
670 switch (bRm & X86_MODRM_RM_MASK)
671 {
672 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
673 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
674 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
675 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
676 }
677 return IEMOP_RAISE_INVALID_OPCODE();
678
679 case 1:
680 switch (bRm & X86_MODRM_RM_MASK)
681 {
682 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
683 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
684 }
685 return IEMOP_RAISE_INVALID_OPCODE();
686
687 case 2:
688 switch (bRm & X86_MODRM_RM_MASK)
689 {
690 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
691 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 3:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
700 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
701 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
702 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
703 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
704 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
705 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
706 IEM_NOT_REACHED_DEFAULT_CASE_RET();
707 }
708
709 case 4:
710 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
711
712 case 5:
713 return IEMOP_RAISE_INVALID_OPCODE();
714
715 case 6:
716 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
717
718 case 7:
719 switch (bRm & X86_MODRM_RM_MASK)
720 {
721 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
722 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
723 }
724 return IEMOP_RAISE_INVALID_OPCODE();
725
726 IEM_NOT_REACHED_DEFAULT_CASE_RET();
727 }
728}
729
730/** Opcode 0x0f 0x00 /3. */
731FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
732{
733 IEMOP_HLP_NO_REAL_OR_V86_MODE();
734 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
735
736 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
737 {
738 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
739 switch (pVCpu->iem.s.enmEffOpSize)
740 {
741 case IEMMODE_16BIT:
742 {
743 IEM_MC_BEGIN(3, 0);
744 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
745 IEM_MC_ARG(uint16_t, u16Sel, 1);
746 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
747
748 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
749 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
750 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
751
752 IEM_MC_END();
753 return VINF_SUCCESS;
754 }
755
756 case IEMMODE_32BIT:
757 case IEMMODE_64BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 IEM_NOT_REACHED_DEFAULT_CASE_RET();
773 }
774 }
775 else
776 {
777 switch (pVCpu->iem.s.enmEffOpSize)
778 {
779 case IEMMODE_16BIT:
780 {
781 IEM_MC_BEGIN(3, 1);
782 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
783 IEM_MC_ARG(uint16_t, u16Sel, 1);
784 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
786
787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
788 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
789
790 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
791 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
792 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
793
794 IEM_MC_END();
795 return VINF_SUCCESS;
796 }
797
798 case IEMMODE_32BIT:
799 case IEMMODE_64BIT:
800 {
801 IEM_MC_BEGIN(3, 1);
802 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
803 IEM_MC_ARG(uint16_t, u16Sel, 1);
804 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
806
807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
808 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
809/** @todo testcase: make sure it's a 16-bit read. */
810
811 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
812 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
813 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
814
815 IEM_MC_END();
816 return VINF_SUCCESS;
817 }
818
819 IEM_NOT_REACHED_DEFAULT_CASE_RET();
820 }
821 }
822}
823
824
825
826/** Opcode 0x0f 0x02. */
827FNIEMOP_DEF(iemOp_lar_Gv_Ew)
828{
829 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
830 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
831}
832
833
834/** Opcode 0x0f 0x03. */
835FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
836{
837 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
838 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
839}
840
841
842/** Opcode 0x0f 0x05. */
843FNIEMOP_DEF(iemOp_syscall)
844{
845 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
847 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
848}
849
850
851/** Opcode 0x0f 0x06. */
852FNIEMOP_DEF(iemOp_clts)
853{
854 IEMOP_MNEMONIC(clts, "clts");
855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
856 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
857}
858
859
860/** Opcode 0x0f 0x07. */
861FNIEMOP_DEF(iemOp_sysret)
862{
863 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
865 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
866}
867
868
869/** Opcode 0x0f 0x08. */
870FNIEMOP_STUB(iemOp_invd);
871// IEMOP_HLP_MIN_486();
872
873
874/** Opcode 0x0f 0x09. */
875FNIEMOP_DEF(iemOp_wbinvd)
876{
877 IEMOP_MNEMONIC(wbinvd, "wbinvd");
878 IEMOP_HLP_MIN_486();
879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
880 IEM_MC_BEGIN(0, 0);
881 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
882 IEM_MC_ADVANCE_RIP();
883 IEM_MC_END();
884 return VINF_SUCCESS; /* ignore for now */
885}
886
887
888/** Opcode 0x0f 0x0b. */
889FNIEMOP_DEF(iemOp_ud2)
890{
891 IEMOP_MNEMONIC(ud2, "ud2");
892 return IEMOP_RAISE_INVALID_OPCODE();
893}
894
895/** Opcode 0x0f 0x0d. */
896FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
897{
898 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
899 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
900 {
901 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
902 return IEMOP_RAISE_INVALID_OPCODE();
903 }
904
905 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
906 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
907 {
908 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
909 return IEMOP_RAISE_INVALID_OPCODE();
910 }
911
912 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
913 {
914 case 2: /* Aliased to /0 for the time being. */
915 case 4: /* Aliased to /0 for the time being. */
916 case 5: /* Aliased to /0 for the time being. */
917 case 6: /* Aliased to /0 for the time being. */
918 case 7: /* Aliased to /0 for the time being. */
919 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
920 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
921 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
922 IEM_NOT_REACHED_DEFAULT_CASE_RET();
923 }
924
925 IEM_MC_BEGIN(0, 1);
926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
929 /* Currently a NOP. */
930 NOREF(GCPtrEffSrc);
931 IEM_MC_ADVANCE_RIP();
932 IEM_MC_END();
933 return VINF_SUCCESS;
934}
935
936
937/** Opcode 0x0f 0x0e. */
938FNIEMOP_STUB(iemOp_femms);
939
940
941/** Opcode 0x0f 0x0f. */
942FNIEMOP_DEF(iemOp_3Dnow)
943{
944 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
945 {
946 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
947 return IEMOP_RAISE_INVALID_OPCODE();
948 }
949
950#ifdef IEM_WITH_3DNOW
951 /* This is pretty sparse, use switch instead of table. */
952 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
953 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
954#else
955 IEMOP_BITCH_ABOUT_STUB();
956 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
957#endif
958}
959
960
961/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
962FNIEMOP_STUB(iemOp_movups_Vps_Wps);
963/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
964FNIEMOP_STUB(iemOp_movupd_Vpd_Wpd);
965
966
967/**
968 * @opcode 0x10
969 * @oppfx 0xf3
970 * @opcpuid sse
971 * @opgroup og_sse_simdfp_datamove
972 * @opxcpttype 5
973 * @optest op1=1 op2=2 -> op1=2
974 * @optest op1=0 op2=-22 -> op1=-22
975 */
976FNIEMOP_DEF(iemOp_movss_Vss_Wss)
977{
978 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZxReg, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
979 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
980 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
981 {
982 /*
983 * Register, register.
984 */
985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
986 IEM_MC_BEGIN(0, 1);
987 IEM_MC_LOCAL(uint32_t, uSrc);
988
989 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
990 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
991 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
992 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
993
994 IEM_MC_ADVANCE_RIP();
995 IEM_MC_END();
996 }
997 else
998 {
999 /*
1000 * Memory, register.
1001 */
1002 IEM_MC_BEGIN(0, 2);
1003 IEM_MC_LOCAL(uint32_t, uSrc);
1004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1005
1006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1008 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1009 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1010
1011 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1012 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1013
1014 IEM_MC_ADVANCE_RIP();
1015 IEM_MC_END();
1016 }
1017 return VINF_SUCCESS;
1018}
1019
1020
1021/** Opcode 0xf2 0x0f 0x10 - movsd Vx, Wsd */
1022FNIEMOP_STUB(iemOp_movsd_Vx_Wsd);
1023
1024
1025/**
1026 * @opcode 0x11
1027 * @oppfx none
1028 * @opcpuid sse
1029 * @opgroup og_sse_simdfp_datamove
1030 * @opxcpttype 4UA
1031 * @optest op1=1 op2=2 -> op1=2
1032 * @optest op1=0 op2=-42 -> op1=-42
1033 */
1034FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1035{
1036 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1039 {
1040 /*
1041 * Register, register.
1042 */
1043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1044 IEM_MC_BEGIN(0, 0);
1045 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1046 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1047 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1048 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1049 IEM_MC_ADVANCE_RIP();
1050 IEM_MC_END();
1051 }
1052 else
1053 {
1054 /*
1055 * Memory, register.
1056 */
1057 IEM_MC_BEGIN(0, 2);
1058 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1060
1061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1063 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1064 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1065
1066 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1067 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1068
1069 IEM_MC_ADVANCE_RIP();
1070 IEM_MC_END();
1071 }
1072 return VINF_SUCCESS;
1073}
1074
1075
1076/**
1077 * @opcode 0x11
1078 * @oppfx 0x66
1079 * @opcpuid sse2
1080 * @opgroup og_sse2_pcksclr_datamove
1081 * @opxcpttype 4UA
1082 * @optest op1=1 op2=2 -> op1=2
1083 * @optest op1=0 op2=-42 -> op1=-42
1084 */
1085FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1086{
1087 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1088 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1089 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1090 {
1091 /*
1092 * Register, register.
1093 */
1094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1095 IEM_MC_BEGIN(0, 0);
1096 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1097 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1098 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1099 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1100 IEM_MC_ADVANCE_RIP();
1101 IEM_MC_END();
1102 }
1103 else
1104 {
1105 /*
1106 * Memory, register.
1107 */
1108 IEM_MC_BEGIN(0, 2);
1109 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1111
1112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1114 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1115 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1116
1117 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1118 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1119
1120 IEM_MC_ADVANCE_RIP();
1121 IEM_MC_END();
1122 }
1123 return VINF_SUCCESS;
1124}
1125
1126
1127/**
1128 * @opcode 0x11
1129 * @oppfx 0xf3
1130 * @opcpuid sse
1131 * @opgroup og_sse_simdfp_datamove
1132 * @opxcpttype 5
1133 * @optest op1=1 op2=2 -> op1=2
1134 * @optest op1=0 op2=-22 -> op1=-22
1135 */
1136FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1137{
1138 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1140 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1141 {
1142 /*
1143 * Register, register.
1144 */
1145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1146 IEM_MC_BEGIN(0, 1);
1147 IEM_MC_LOCAL(uint32_t, uSrc);
1148
1149 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1150 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1151 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1152 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1153
1154 IEM_MC_ADVANCE_RIP();
1155 IEM_MC_END();
1156 }
1157 else
1158 {
1159 /*
1160 * Memory, register.
1161 */
1162 IEM_MC_BEGIN(0, 2);
1163 IEM_MC_LOCAL(uint32_t, uSrc);
1164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1165
1166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1168 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1169 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1170
1171 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1172 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1173
1174 IEM_MC_ADVANCE_RIP();
1175 IEM_MC_END();
1176 }
1177 return VINF_SUCCESS;
1178}
1179
1180
1181/**
1182 * @opcode 0x11
1183 * @oppfx 0xf2
1184 * @opcpuid sse2
1185 * @opgroup og_sse2_pcksclr_datamove
1186 * @opxcpttype 5
1187 * @optest op1=1 op2=2 -> op1=2
1188 * @optest op1=0 op2=-42 -> op1=-42
1189 */
1190FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1191{
1192 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1193 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1194 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1195 {
1196 /*
1197 * Register, register.
1198 */
1199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1200 IEM_MC_BEGIN(0, 1);
1201 IEM_MC_LOCAL(uint64_t, uSrc);
1202
1203 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1204 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1205 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1206 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1207
1208 IEM_MC_ADVANCE_RIP();
1209 IEM_MC_END();
1210 }
1211 else
1212 {
1213 /*
1214 * Memory, register.
1215 */
1216 IEM_MC_BEGIN(0, 2);
1217 IEM_MC_LOCAL(uint64_t, uSrc);
1218 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1219
1220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1222 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1223 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1224
1225 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1226 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1227
1228 IEM_MC_ADVANCE_RIP();
1229 IEM_MC_END();
1230 }
1231 return VINF_SUCCESS;
1232}
1233
1234
1235FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1236{
1237 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1238 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1239 {
1240 /**
1241 * @opcode 0x12
1242 * @opcodesub 11 mr/reg
1243 * @oppfx none
1244 * @opcpuid sse
1245 * @opgroup og_sse_simdfp_datamove
1246 * @opxcpttype 5
1247 * @optest op1=1 op2=2 -> op1=2
1248 * @optest op1=0 op2=-42 -> op1=-42
1249 */
1250 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1251
1252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1253 IEM_MC_BEGIN(0, 1);
1254 IEM_MC_LOCAL(uint64_t, uSrc);
1255
1256 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1257 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1258 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1259 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1260
1261 IEM_MC_ADVANCE_RIP();
1262 IEM_MC_END();
1263 }
1264 else
1265 {
1266 /**
1267 * @opdone
1268 * @opcode 0x12
1269 * @opcodesub !11 mr/reg
1270 * @oppfx none
1271 * @opcpuid sse
1272 * @opgroup og_sse_simdfp_datamove
1273 * @opxcpttype 5
1274 * @optest op1=1 op2=2 -> op1=2
1275 * @optest op1=0 op2=-42 -> op1=-42
1276 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1277 */
1278 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1279
1280 IEM_MC_BEGIN(0, 2);
1281 IEM_MC_LOCAL(uint64_t, uSrc);
1282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1283
1284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1286 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1287 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1288
1289 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1290 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1291
1292 IEM_MC_ADVANCE_RIP();
1293 IEM_MC_END();
1294 }
1295 return VINF_SUCCESS;
1296}
1297
1298
1299/**
1300 * @opcode 0x12
1301 * @opcodesub !11 mr/reg
1302 * @oppfx 0x66
1303 * @opcpuid sse2
1304 * @opgroup og_sse2_pcksclr_datamove
1305 * @opxcpttype 5
1306 * @optest op1=1 op2=2 -> op1=2
1307 * @optest op1=0 op2=-42 -> op1=-42
1308 */
1309FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1310{
1311 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1312 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1313 {
1314 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1315
1316 IEM_MC_BEGIN(0, 2);
1317 IEM_MC_LOCAL(uint64_t, uSrc);
1318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1319
1320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1322 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1323 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1324
1325 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1326 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1327
1328 IEM_MC_ADVANCE_RIP();
1329 IEM_MC_END();
1330 return VINF_SUCCESS;
1331 }
1332
1333 /**
1334 * @opdone
1335 * @opmnemonic ud660f12m3
1336 * @opcode 0x12
1337 * @opcodesub 11 mr/reg
1338 * @oppfx 0x66
1339 * @opunused immediate
1340 * @opcpuid sse
1341 * @optest ->
1342 */
1343 return IEMOP_RAISE_INVALID_OPCODE();
1344}
1345
1346
1347/**
1348 * @opcode 0x12
1349 * @oppfx 0xf3
1350 * @opcpuid sse3
1351 * @opgroup og_sse3_pcksclr_datamove
1352 * @opxcpttype 4
1353 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1354 * op1=0x00000002000000020000000100000001
1355 */
1356FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1357{
1358 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1359 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1360 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1361 {
1362 /*
1363 * Register, register.
1364 */
1365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1366 IEM_MC_BEGIN(2, 0);
1367 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1368 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1369
1370 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1371 IEM_MC_PREPARE_SSE_USAGE();
1372
1373 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1374 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1375 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1376
1377 IEM_MC_ADVANCE_RIP();
1378 IEM_MC_END();
1379 }
1380 else
1381 {
1382 /*
1383 * Register, memory.
1384 */
1385 IEM_MC_BEGIN(2, 2);
1386 IEM_MC_LOCAL(RTUINT128U, uSrc);
1387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1388 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1389 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1390
1391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1393 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1394 IEM_MC_PREPARE_SSE_USAGE();
1395
1396 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1397 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1398 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1399
1400 IEM_MC_ADVANCE_RIP();
1401 IEM_MC_END();
1402 }
1403 return VINF_SUCCESS;
1404}
1405
1406
1407/**
1408 * @opcode 0x12
1409 * @oppfx 0xf2
1410 * @opcpuid sse3
1411 * @opgroup og_sse3_pcksclr_datamove
1412 * @opxcpttype 5
1413 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1414 * op1=0x22222222111111112222222211111111
1415 */
1416FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1417{
1418 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1419 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1420 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1421 {
1422 /*
1423 * Register, register.
1424 */
1425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1426 IEM_MC_BEGIN(2, 0);
1427 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1428 IEM_MC_ARG(uint64_t, uSrc, 1);
1429
1430 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1431 IEM_MC_PREPARE_SSE_USAGE();
1432
1433 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1434 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1435 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1436
1437 IEM_MC_ADVANCE_RIP();
1438 IEM_MC_END();
1439 }
1440 else
1441 {
1442 /*
1443 * Register, memory.
1444 */
1445 IEM_MC_BEGIN(2, 2);
1446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1447 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1448 IEM_MC_ARG(uint64_t, uSrc, 1);
1449
1450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1452 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1453 IEM_MC_PREPARE_SSE_USAGE();
1454
1455 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1456 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1457 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1458
1459 IEM_MC_ADVANCE_RIP();
1460 IEM_MC_END();
1461 }
1462 return VINF_SUCCESS;
1463}
1464
1465
1466/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1467FNIEMOP_STUB(iemOp_movlps_Mq_Vq);
1468
1469/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1470FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1471{
1472 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1474 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1475 {
1476#if 0
1477 /*
1478 * Register, register.
1479 */
1480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1481 IEM_MC_BEGIN(0, 1);
1482 IEM_MC_LOCAL(uint64_t, uSrc);
1483 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1484 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1485 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1486 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1487 IEM_MC_ADVANCE_RIP();
1488 IEM_MC_END();
1489#else
1490 return IEMOP_RAISE_INVALID_OPCODE();
1491#endif
1492 }
1493 else
1494 {
1495 /*
1496 * Memory, register.
1497 */
1498 IEM_MC_BEGIN(0, 2);
1499 IEM_MC_LOCAL(uint64_t, uSrc);
1500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1501
1502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1504 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1505 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1506
1507 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1508 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1509
1510 IEM_MC_ADVANCE_RIP();
1511 IEM_MC_END();
1512 }
1513 return VINF_SUCCESS;
1514}
1515
1516/* Opcode 0xf3 0x0f 0x13 - invalid */
1517/* Opcode 0xf2 0x0f 0x13 - invalid */
1518
1519/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1520FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1521/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1522FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1523/* Opcode 0xf3 0x0f 0x14 - invalid */
1524/* Opcode 0xf2 0x0f 0x14 - invalid */
1525/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1526FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1527/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1528FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1529/* Opcode 0xf3 0x0f 0x15 - invalid */
1530/* Opcode 0xf2 0x0f 0x15 - invalid */
1531/** Opcode 0x0f 0x16 - movhpsv1 Vdq, Mq movlhps Vdq, Uq */
1532FNIEMOP_STUB(iemOp_movhpsv1_Vdq_Mq__movlhps_Vdq_Uq); //NEXT
1533/** Opcode 0x66 0x0f 0x16 - movhpdv1 Vdq, Mq */
1534FNIEMOP_STUB(iemOp_movhpdv1_Vdq_Mq); //NEXT
1535/** Opcode 0xf3 0x0f 0x16 - movshdup Vx, Wx */
1536FNIEMOP_STUB(iemOp_movshdup_Vx_Wx); //NEXT
1537/* Opcode 0xf2 0x0f 0x16 - invalid */
1538/** Opcode 0x0f 0x17 - movhpsv1 Mq, Vq */
1539FNIEMOP_STUB(iemOp_movhpsv1_Mq_Vq); //NEXT
1540/** Opcode 0x66 0x0f 0x17 - movhpdv1 Mq, Vq */
1541FNIEMOP_STUB(iemOp_movhpdv1_Mq_Vq); //NEXT
1542/* Opcode 0xf3 0x0f 0x17 - invalid */
1543/* Opcode 0xf2 0x0f 0x17 - invalid */
1544
1545
1546/** Opcode 0x0f 0x18. */
1547FNIEMOP_DEF(iemOp_prefetch_Grp16)
1548{
1549 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1550 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1551 {
1552 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1553 {
1554 case 4: /* Aliased to /0 for the time being according to AMD. */
1555 case 5: /* Aliased to /0 for the time being according to AMD. */
1556 case 6: /* Aliased to /0 for the time being according to AMD. */
1557 case 7: /* Aliased to /0 for the time being according to AMD. */
1558 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1559 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1560 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1561 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1563 }
1564
1565 IEM_MC_BEGIN(0, 1);
1566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1569 /* Currently a NOP. */
1570 NOREF(GCPtrEffSrc);
1571 IEM_MC_ADVANCE_RIP();
1572 IEM_MC_END();
1573 return VINF_SUCCESS;
1574 }
1575
1576 return IEMOP_RAISE_INVALID_OPCODE();
1577}
1578
1579
1580/** Opcode 0x0f 0x19..0x1f. */
1581FNIEMOP_DEF(iemOp_nop_Ev)
1582{
1583 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1585 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1586 {
1587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1588 IEM_MC_BEGIN(0, 0);
1589 IEM_MC_ADVANCE_RIP();
1590 IEM_MC_END();
1591 }
1592 else
1593 {
1594 IEM_MC_BEGIN(0, 1);
1595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1598 /* Currently a NOP. */
1599 NOREF(GCPtrEffSrc);
1600 IEM_MC_ADVANCE_RIP();
1601 IEM_MC_END();
1602 }
1603 return VINF_SUCCESS;
1604}
1605
1606
1607/** Opcode 0x0f 0x20. */
1608FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1609{
1610 /* mod is ignored, as is operand size overrides. */
1611 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1612 IEMOP_HLP_MIN_386();
1613 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1614 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1615 else
1616 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1617
1618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1619 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1620 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1621 {
1622 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1623 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1624 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1625 iCrReg |= 8;
1626 }
1627 switch (iCrReg)
1628 {
1629 case 0: case 2: case 3: case 4: case 8:
1630 break;
1631 default:
1632 return IEMOP_RAISE_INVALID_OPCODE();
1633 }
1634 IEMOP_HLP_DONE_DECODING();
1635
1636 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1637}
1638
1639
1640/** Opcode 0x0f 0x21. */
1641FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1642{
1643 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1644 IEMOP_HLP_MIN_386();
1645 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1647 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1648 return IEMOP_RAISE_INVALID_OPCODE();
1649 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1650 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1651 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1652}
1653
1654
1655/** Opcode 0x0f 0x22. */
1656FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1657{
1658 /* mod is ignored, as is operand size overrides. */
1659 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1660 IEMOP_HLP_MIN_386();
1661 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1662 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1663 else
1664 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1665
1666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1667 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1668 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1669 {
1670 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1671 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1672 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1673 iCrReg |= 8;
1674 }
1675 switch (iCrReg)
1676 {
1677 case 0: case 2: case 3: case 4: case 8:
1678 break;
1679 default:
1680 return IEMOP_RAISE_INVALID_OPCODE();
1681 }
1682 IEMOP_HLP_DONE_DECODING();
1683
1684 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1685}
1686
1687
1688/** Opcode 0x0f 0x23. */
1689FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1690{
1691 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1692 IEMOP_HLP_MIN_386();
1693 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1695 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1696 return IEMOP_RAISE_INVALID_OPCODE();
1697 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1698 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1699 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1700}
1701
1702
1703/** Opcode 0x0f 0x24. */
1704FNIEMOP_DEF(iemOp_mov_Rd_Td)
1705{
1706 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1707 /** @todo works on 386 and 486. */
1708 /* The RM byte is not considered, see testcase. */
1709 return IEMOP_RAISE_INVALID_OPCODE();
1710}
1711
1712
1713/** Opcode 0x0f 0x26. */
1714FNIEMOP_DEF(iemOp_mov_Td_Rd)
1715{
1716 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1717 /** @todo works on 386 and 486. */
1718 /* The RM byte is not considered, see testcase. */
1719 return IEMOP_RAISE_INVALID_OPCODE();
1720}
1721
1722
1723/** Opcode 0x0f 0x28 - movaps Vps, Wps */
1724FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
1725{
1726 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1727 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1728 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1729 {
1730 /*
1731 * Register, register.
1732 */
1733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1734 IEM_MC_BEGIN(0, 0);
1735 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1736 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1737 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1738 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1739 IEM_MC_ADVANCE_RIP();
1740 IEM_MC_END();
1741 }
1742 else
1743 {
1744 /*
1745 * Register, memory.
1746 */
1747 IEM_MC_BEGIN(0, 2);
1748 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1750
1751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1753 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1754 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1755
1756 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1757 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1758
1759 IEM_MC_ADVANCE_RIP();
1760 IEM_MC_END();
1761 }
1762 return VINF_SUCCESS;
1763}
1764
1765/** Opcode 0x66 0x0f 0x28 - movapd Vpd, Wpd */
1766FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
1767{
1768 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1770 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1771 {
1772 /*
1773 * Register, register.
1774 */
1775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1776 IEM_MC_BEGIN(0, 0);
1777 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1778 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1779 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1780 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1781 IEM_MC_ADVANCE_RIP();
1782 IEM_MC_END();
1783 }
1784 else
1785 {
1786 /*
1787 * Register, memory.
1788 */
1789 IEM_MC_BEGIN(0, 2);
1790 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1792
1793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1795 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1796 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1797
1798 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1799 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1800
1801 IEM_MC_ADVANCE_RIP();
1802 IEM_MC_END();
1803 }
1804 return VINF_SUCCESS;
1805}
1806
1807/* Opcode 0xf3 0x0f 0x28 - invalid */
1808/* Opcode 0xf2 0x0f 0x28 - invalid */
1809
1810/** Opcode 0x0f 0x29 - movaps Wps, Vps */
1811FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
1812{
1813 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1815 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1816 {
1817 /*
1818 * Register, register.
1819 */
1820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1821 IEM_MC_BEGIN(0, 0);
1822 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1823 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1824 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1825 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1826 IEM_MC_ADVANCE_RIP();
1827 IEM_MC_END();
1828 }
1829 else
1830 {
1831 /*
1832 * Memory, register.
1833 */
1834 IEM_MC_BEGIN(0, 2);
1835 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1837
1838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1840 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1841 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1842
1843 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1844 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1845
1846 IEM_MC_ADVANCE_RIP();
1847 IEM_MC_END();
1848 }
1849 return VINF_SUCCESS;
1850}
1851
1852/** Opcode 0x66 0x0f 0x29 - movapd Wpd,Vpd */
1853FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
1854{
1855 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1856 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1857 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1858 {
1859 /*
1860 * Register, register.
1861 */
1862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1863 IEM_MC_BEGIN(0, 0);
1864 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1866 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1867 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1868 IEM_MC_ADVANCE_RIP();
1869 IEM_MC_END();
1870 }
1871 else
1872 {
1873 /*
1874 * Memory, register.
1875 */
1876 IEM_MC_BEGIN(0, 2);
1877 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1879
1880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1882 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1883 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1884
1885 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1886 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1887
1888 IEM_MC_ADVANCE_RIP();
1889 IEM_MC_END();
1890 }
1891 return VINF_SUCCESS;
1892}
1893
1894/* Opcode 0xf3 0x0f 0x29 - invalid */
1895/* Opcode 0xf2 0x0f 0x29 - invalid */
1896
1897
1898/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
1899FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
1900/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
1901FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
1902/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
1903FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
1904/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1905FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
1906
1907
1908/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
1909FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
1910{
1911 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1912 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1913 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1914 {
1915 /*
1916 * memory, register.
1917 */
1918 IEM_MC_BEGIN(0, 2);
1919 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1921
1922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1924 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1925 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1926
1927 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1928 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1929
1930 IEM_MC_ADVANCE_RIP();
1931 IEM_MC_END();
1932 }
1933 /* The register, register encoding is invalid. */
1934 else
1935 return IEMOP_RAISE_INVALID_OPCODE();
1936 return VINF_SUCCESS;
1937}
1938
1939/** Opcode 0x66 0x0f 0x2b - movntpd Mpd, Vpd */
1940FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
1941{
1942 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1943 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1944 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1945 {
1946 /*
1947 * memory, register.
1948 */
1949 IEM_MC_BEGIN(0, 2);
1950 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1952
1953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1955 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1956 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1957
1958 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1959 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1960
1961 IEM_MC_ADVANCE_RIP();
1962 IEM_MC_END();
1963 }
1964 /* The register, register encoding is invalid. */
1965 else
1966 return IEMOP_RAISE_INVALID_OPCODE();
1967 return VINF_SUCCESS;
1968}
1969/* Opcode 0xf3 0x0f 0x2b - invalid */
1970/* Opcode 0xf2 0x0f 0x2b - invalid */
1971
1972
1973/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
1974FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
1975/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
1976FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
1977/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
1978FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
1979/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
1980FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
1981
1982/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
1983FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
1984/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
1985FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
1986/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
1987FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
1988/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
1989FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
1990
1991/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
1992FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
1993/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
1994FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
1995/* Opcode 0xf3 0x0f 0x2e - invalid */
1996/* Opcode 0xf2 0x0f 0x2e - invalid */
1997
1998/** Opcode 0x0f 0x2f - comiss Vss, Wss */
1999FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2000/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2001FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2002/* Opcode 0xf3 0x0f 0x2f - invalid */
2003/* Opcode 0xf2 0x0f 0x2f - invalid */
2004
2005/** Opcode 0x0f 0x30. */
2006FNIEMOP_DEF(iemOp_wrmsr)
2007{
2008 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2010 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2011}
2012
2013
2014/** Opcode 0x0f 0x31. */
2015FNIEMOP_DEF(iemOp_rdtsc)
2016{
2017 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2019 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2020}
2021
2022
2023/** Opcode 0x0f 0x33. */
2024FNIEMOP_DEF(iemOp_rdmsr)
2025{
2026 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2028 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2029}
2030
2031
2032/** Opcode 0x0f 0x34. */
2033FNIEMOP_STUB(iemOp_rdpmc);
2034/** Opcode 0x0f 0x34. */
2035FNIEMOP_STUB(iemOp_sysenter);
2036/** Opcode 0x0f 0x35. */
2037FNIEMOP_STUB(iemOp_sysexit);
2038/** Opcode 0x0f 0x37. */
2039FNIEMOP_STUB(iemOp_getsec);
2040/** Opcode 0x0f 0x38. */
2041FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2042/** Opcode 0x0f 0x3a. */
2043FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2044
2045
2046/**
2047 * Implements a conditional move.
2048 *
2049 * Wish there was an obvious way to do this where we could share and reduce
2050 * code bloat.
2051 *
2052 * @param a_Cnd The conditional "microcode" operation.
2053 */
2054#define CMOV_X(a_Cnd) \
2055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2056 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2057 { \
2058 switch (pVCpu->iem.s.enmEffOpSize) \
2059 { \
2060 case IEMMODE_16BIT: \
2061 IEM_MC_BEGIN(0, 1); \
2062 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2063 a_Cnd { \
2064 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2065 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2066 } IEM_MC_ENDIF(); \
2067 IEM_MC_ADVANCE_RIP(); \
2068 IEM_MC_END(); \
2069 return VINF_SUCCESS; \
2070 \
2071 case IEMMODE_32BIT: \
2072 IEM_MC_BEGIN(0, 1); \
2073 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2074 a_Cnd { \
2075 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2076 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2077 } IEM_MC_ELSE() { \
2078 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2079 } IEM_MC_ENDIF(); \
2080 IEM_MC_ADVANCE_RIP(); \
2081 IEM_MC_END(); \
2082 return VINF_SUCCESS; \
2083 \
2084 case IEMMODE_64BIT: \
2085 IEM_MC_BEGIN(0, 1); \
2086 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2087 a_Cnd { \
2088 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2089 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2090 } IEM_MC_ENDIF(); \
2091 IEM_MC_ADVANCE_RIP(); \
2092 IEM_MC_END(); \
2093 return VINF_SUCCESS; \
2094 \
2095 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2096 } \
2097 } \
2098 else \
2099 { \
2100 switch (pVCpu->iem.s.enmEffOpSize) \
2101 { \
2102 case IEMMODE_16BIT: \
2103 IEM_MC_BEGIN(0, 2); \
2104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2105 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2107 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2108 a_Cnd { \
2109 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2110 } IEM_MC_ENDIF(); \
2111 IEM_MC_ADVANCE_RIP(); \
2112 IEM_MC_END(); \
2113 return VINF_SUCCESS; \
2114 \
2115 case IEMMODE_32BIT: \
2116 IEM_MC_BEGIN(0, 2); \
2117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2118 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2119 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2120 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2121 a_Cnd { \
2122 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2123 } IEM_MC_ELSE() { \
2124 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2125 } IEM_MC_ENDIF(); \
2126 IEM_MC_ADVANCE_RIP(); \
2127 IEM_MC_END(); \
2128 return VINF_SUCCESS; \
2129 \
2130 case IEMMODE_64BIT: \
2131 IEM_MC_BEGIN(0, 2); \
2132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2133 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2135 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2136 a_Cnd { \
2137 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2138 } IEM_MC_ENDIF(); \
2139 IEM_MC_ADVANCE_RIP(); \
2140 IEM_MC_END(); \
2141 return VINF_SUCCESS; \
2142 \
2143 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2144 } \
2145 } do {} while (0)
2146
2147
2148
2149/** Opcode 0x0f 0x40. */
2150FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2151{
2152 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2153 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2154}
2155
2156
2157/** Opcode 0x0f 0x41. */
2158FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2159{
2160 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2161 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2162}
2163
2164
2165/** Opcode 0x0f 0x42. */
2166FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2167{
2168 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2169 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2170}
2171
2172
2173/** Opcode 0x0f 0x43. */
2174FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2175{
2176 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2177 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2178}
2179
2180
2181/** Opcode 0x0f 0x44. */
2182FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2183{
2184 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2185 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2186}
2187
2188
2189/** Opcode 0x0f 0x45. */
2190FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2191{
2192 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2193 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2194}
2195
2196
2197/** Opcode 0x0f 0x46. */
2198FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2199{
2200 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2201 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2202}
2203
2204
2205/** Opcode 0x0f 0x47. */
2206FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2207{
2208 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2209 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2210}
2211
2212
2213/** Opcode 0x0f 0x48. */
2214FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2215{
2216 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2217 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2218}
2219
2220
2221/** Opcode 0x0f 0x49. */
2222FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2223{
2224 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2225 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2226}
2227
2228
2229/** Opcode 0x0f 0x4a. */
2230FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2231{
2232 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2233 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2234}
2235
2236
2237/** Opcode 0x0f 0x4b. */
2238FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2239{
2240 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2241 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2242}
2243
2244
2245/** Opcode 0x0f 0x4c. */
2246FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2247{
2248 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2249 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2250}
2251
2252
2253/** Opcode 0x0f 0x4d. */
2254FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2255{
2256 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2257 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2258}
2259
2260
2261/** Opcode 0x0f 0x4e. */
2262FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2263{
2264 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2265 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2266}
2267
2268
2269/** Opcode 0x0f 0x4f. */
2270FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2271{
2272 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2273 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2274}
2275
2276#undef CMOV_X
2277
2278/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2279FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2280/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2281FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2282/* Opcode 0xf3 0x0f 0x50 - invalid */
2283/* Opcode 0xf2 0x0f 0x50 - invalid */
2284
2285/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2286FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2287/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2288FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2289/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2290FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2291/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2292FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2293
2294/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2295FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2296/* Opcode 0x66 0x0f 0x52 - invalid */
2297/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2298FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2299/* Opcode 0xf2 0x0f 0x52 - invalid */
2300
2301/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2302FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2303/* Opcode 0x66 0x0f 0x53 - invalid */
2304/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2305FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2306/* Opcode 0xf2 0x0f 0x53 - invalid */
2307
2308/** Opcode 0x0f 0x54 - andps Vps, Wps */
2309FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2310/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2311FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2312/* Opcode 0xf3 0x0f 0x54 - invalid */
2313/* Opcode 0xf2 0x0f 0x54 - invalid */
2314
2315/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2316FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2317/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2318FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2319/* Opcode 0xf3 0x0f 0x55 - invalid */
2320/* Opcode 0xf2 0x0f 0x55 - invalid */
2321
2322/** Opcode 0x0f 0x56 - orps Vps, Wps */
2323FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2324/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2325FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2326/* Opcode 0xf3 0x0f 0x56 - invalid */
2327/* Opcode 0xf2 0x0f 0x56 - invalid */
2328
2329/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2330FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2331/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2332FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2333/* Opcode 0xf3 0x0f 0x57 - invalid */
2334/* Opcode 0xf2 0x0f 0x57 - invalid */
2335
2336/** Opcode 0x0f 0x58 - addps Vps, Wps */
2337FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2338/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2339FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2340/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2341FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2342/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2343FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2344
2345/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2346FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2347/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2348FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2349/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2350FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2351/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2352FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2353
2354/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2355FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2356/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2357FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2358/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2359FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2360/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2361FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2362
2363/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2364FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2365/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2366FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2367/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2368FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2369/* Opcode 0xf2 0x0f 0x5b - invalid */
2370
2371/** Opcode 0x0f 0x5c - subps Vps, Wps */
2372FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2373/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2374FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2375/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2376FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2377/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2378FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2379
2380/** Opcode 0x0f 0x5d - minps Vps, Wps */
2381FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2382/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2383FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2384/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2385FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2386/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2387FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2388
2389/** Opcode 0x0f 0x5e - divps Vps, Wps */
2390FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2391/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2392FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2393/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2394FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2395/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2396FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2397
2398/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2399FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2400/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2401FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2402/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2403FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2404/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2405FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2406
2407/**
2408 * Common worker for MMX instructions on the forms:
2409 * pxxxx mm1, mm2/mem32
2410 *
2411 * The 2nd operand is the first half of a register, which in the memory case
2412 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2413 * memory accessed for MMX.
2414 *
2415 * Exceptions type 4.
2416 */
2417FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2418{
2419 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2420 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2421 {
2422 /*
2423 * Register, register.
2424 */
2425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2426 IEM_MC_BEGIN(2, 0);
2427 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2428 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2429 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2430 IEM_MC_PREPARE_SSE_USAGE();
2431 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2432 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2433 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2434 IEM_MC_ADVANCE_RIP();
2435 IEM_MC_END();
2436 }
2437 else
2438 {
2439 /*
2440 * Register, memory.
2441 */
2442 IEM_MC_BEGIN(2, 2);
2443 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2444 IEM_MC_LOCAL(uint64_t, uSrc);
2445 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2447
2448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2450 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2451 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2452
2453 IEM_MC_PREPARE_SSE_USAGE();
2454 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2455 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2456
2457 IEM_MC_ADVANCE_RIP();
2458 IEM_MC_END();
2459 }
2460 return VINF_SUCCESS;
2461}
2462
2463
2464/**
2465 * Common worker for SSE2 instructions on the forms:
2466 * pxxxx xmm1, xmm2/mem128
2467 *
2468 * The 2nd operand is the first half of a register, which in the memory case
2469 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2470 * memory accessed for MMX.
2471 *
2472 * Exceptions type 4.
2473 */
2474FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2475{
2476 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2477 if (!pImpl->pfnU64)
2478 return IEMOP_RAISE_INVALID_OPCODE();
2479 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2480 {
2481 /*
2482 * Register, register.
2483 */
2484 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2485 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2487 IEM_MC_BEGIN(2, 0);
2488 IEM_MC_ARG(uint64_t *, pDst, 0);
2489 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2490 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2491 IEM_MC_PREPARE_FPU_USAGE();
2492 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2493 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2494 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2495 IEM_MC_ADVANCE_RIP();
2496 IEM_MC_END();
2497 }
2498 else
2499 {
2500 /*
2501 * Register, memory.
2502 */
2503 IEM_MC_BEGIN(2, 2);
2504 IEM_MC_ARG(uint64_t *, pDst, 0);
2505 IEM_MC_LOCAL(uint32_t, uSrc);
2506 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2508
2509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2511 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2512 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2513
2514 IEM_MC_PREPARE_FPU_USAGE();
2515 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2516 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2517
2518 IEM_MC_ADVANCE_RIP();
2519 IEM_MC_END();
2520 }
2521 return VINF_SUCCESS;
2522}
2523
2524
2525/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2526FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2527{
2528 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2529 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2530}
2531
2532/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
2533FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
2534{
2535 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
2536 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2537}
2538
2539/* Opcode 0xf3 0x0f 0x60 - invalid */
2540
2541
2542/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2543FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2544{
2545 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2546 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2547}
2548
2549/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
2550FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
2551{
2552 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
2553 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2554}
2555
2556/* Opcode 0xf3 0x0f 0x61 - invalid */
2557
2558
2559/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2560FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2561{
2562 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2563 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2564}
2565
2566/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
2567FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
2568{
2569 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
2570 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2571}
2572
2573/* Opcode 0xf3 0x0f 0x62 - invalid */
2574
2575
2576
2577/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2578FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2579/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
2580FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
2581/* Opcode 0xf3 0x0f 0x63 - invalid */
2582
2583/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2584FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2585/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
2586FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
2587/* Opcode 0xf3 0x0f 0x64 - invalid */
2588
2589/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2590FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2591/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
2592FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
2593/* Opcode 0xf3 0x0f 0x65 - invalid */
2594
2595/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2596FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2597/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
2598FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
2599/* Opcode 0xf3 0x0f 0x66 - invalid */
2600
2601/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2602FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2603/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
2604FNIEMOP_STUB(iemOp_packuswb_Vx_W);
2605/* Opcode 0xf3 0x0f 0x67 - invalid */
2606
2607
2608/**
2609 * Common worker for MMX instructions on the form:
2610 * pxxxx mm1, mm2/mem64
2611 *
2612 * The 2nd operand is the second half of a register, which in the memory case
2613 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2614 * where it may read the full 128 bits or only the upper 64 bits.
2615 *
2616 * Exceptions type 4.
2617 */
2618FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2619{
2620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2621 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2622 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2623 {
2624 /*
2625 * Register, register.
2626 */
2627 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2628 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2630 IEM_MC_BEGIN(2, 0);
2631 IEM_MC_ARG(uint64_t *, pDst, 0);
2632 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2633 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2634 IEM_MC_PREPARE_FPU_USAGE();
2635 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2636 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2637 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2638 IEM_MC_ADVANCE_RIP();
2639 IEM_MC_END();
2640 }
2641 else
2642 {
2643 /*
2644 * Register, memory.
2645 */
2646 IEM_MC_BEGIN(2, 2);
2647 IEM_MC_ARG(uint64_t *, pDst, 0);
2648 IEM_MC_LOCAL(uint64_t, uSrc);
2649 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2651
2652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2654 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2655 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2656
2657 IEM_MC_PREPARE_FPU_USAGE();
2658 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2659 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2660
2661 IEM_MC_ADVANCE_RIP();
2662 IEM_MC_END();
2663 }
2664 return VINF_SUCCESS;
2665}
2666
2667
2668/**
2669 * Common worker for SSE2 instructions on the form:
2670 * pxxxx xmm1, xmm2/mem128
2671 *
2672 * The 2nd operand is the second half of a register, which in the memory case
2673 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2674 * where it may read the full 128 bits or only the upper 64 bits.
2675 *
2676 * Exceptions type 4.
2677 */
2678FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2679{
2680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2681 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2682 {
2683 /*
2684 * Register, register.
2685 */
2686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2687 IEM_MC_BEGIN(2, 0);
2688 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2689 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2690 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2691 IEM_MC_PREPARE_SSE_USAGE();
2692 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2693 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2694 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2695 IEM_MC_ADVANCE_RIP();
2696 IEM_MC_END();
2697 }
2698 else
2699 {
2700 /*
2701 * Register, memory.
2702 */
2703 IEM_MC_BEGIN(2, 2);
2704 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2705 IEM_MC_LOCAL(RTUINT128U, uSrc);
2706 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2708
2709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2711 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2712 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2713
2714 IEM_MC_PREPARE_SSE_USAGE();
2715 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2716 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2717
2718 IEM_MC_ADVANCE_RIP();
2719 IEM_MC_END();
2720 }
2721 return VINF_SUCCESS;
2722}
2723
2724
2725/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2726FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2727{
2728 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2729 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2730}
2731
2732/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
2733FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
2734{
2735 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
2736 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2737}
2738/* Opcode 0xf3 0x0f 0x68 - invalid */
2739
2740
2741/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2742FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2743{
2744 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2745 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2746}
2747
2748/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
2749FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
2750{
2751 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
2752 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2753
2754}
2755/* Opcode 0xf3 0x0f 0x69 - invalid */
2756
2757
2758/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2759FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2760{
2761 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2762 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2763}
2764
2765/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
2766FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
2767{
2768 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
2769 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2770}
2771/* Opcode 0xf3 0x0f 0x6a - invalid */
2772
2773
2774/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2775FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2776/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
2777FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
2778/* Opcode 0xf3 0x0f 0x6b - invalid */
2779
2780
2781/* Opcode 0x0f 0x6c - invalid */
2782
2783/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
2784FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
2785{
2786 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
2787 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2788}
2789
2790/* Opcode 0xf3 0x0f 0x6c - invalid */
2791/* Opcode 0xf2 0x0f 0x6c - invalid */
2792
2793
2794/* Opcode 0x0f 0x6d - invalid */
2795
2796/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
2797FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
2798{
2799 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
2800 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2801}
2802
2803/* Opcode 0xf3 0x0f 0x6d - invalid */
2804
2805
2806/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
2807FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
2808{
2809 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2810 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2811 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2812 else
2813 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2814 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2815 {
2816 /* MMX, greg */
2817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2818 IEM_MC_BEGIN(0, 1);
2819 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2820 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2821 IEM_MC_LOCAL(uint64_t, u64Tmp);
2822 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2823 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2824 else
2825 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2826 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2827 IEM_MC_ADVANCE_RIP();
2828 IEM_MC_END();
2829 }
2830 else
2831 {
2832 /* MMX, [mem] */
2833 IEM_MC_BEGIN(0, 2);
2834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2835 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2838 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2839 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2840 {
2841 IEM_MC_LOCAL(uint64_t, u64Tmp);
2842 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2843 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2844 }
2845 else
2846 {
2847 IEM_MC_LOCAL(uint32_t, u32Tmp);
2848 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2849 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2850 }
2851 IEM_MC_ADVANCE_RIP();
2852 IEM_MC_END();
2853 }
2854 return VINF_SUCCESS;
2855}
2856
2857/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
2858FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
2859{
2860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2861 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2862 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
2863 else
2864 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
2865 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2866 {
2867 /* XMM, greg*/
2868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2869 IEM_MC_BEGIN(0, 1);
2870 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2871 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2872 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2873 {
2874 IEM_MC_LOCAL(uint64_t, u64Tmp);
2875 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2876 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2877 }
2878 else
2879 {
2880 IEM_MC_LOCAL(uint32_t, u32Tmp);
2881 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2882 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2883 }
2884 IEM_MC_ADVANCE_RIP();
2885 IEM_MC_END();
2886 }
2887 else
2888 {
2889 /* XMM, [mem] */
2890 IEM_MC_BEGIN(0, 2);
2891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2892 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2895 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2896 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2897 {
2898 IEM_MC_LOCAL(uint64_t, u64Tmp);
2899 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2900 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2901 }
2902 else
2903 {
2904 IEM_MC_LOCAL(uint32_t, u32Tmp);
2905 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2906 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2907 }
2908 IEM_MC_ADVANCE_RIP();
2909 IEM_MC_END();
2910 }
2911 return VINF_SUCCESS;
2912}
2913
2914/* Opcode 0xf3 0x0f 0x6e - invalid */
2915
2916
2917/** Opcode 0x0f 0x6f - movq Pq, Qq */
2918FNIEMOP_DEF(iemOp_movq_Pq_Qq)
2919{
2920 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2921 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2922 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2923 {
2924 /*
2925 * Register, register.
2926 */
2927 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2928 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2930 IEM_MC_BEGIN(0, 1);
2931 IEM_MC_LOCAL(uint64_t, u64Tmp);
2932 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2933 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2934 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2935 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2936 IEM_MC_ADVANCE_RIP();
2937 IEM_MC_END();
2938 }
2939 else
2940 {
2941 /*
2942 * Register, memory.
2943 */
2944 IEM_MC_BEGIN(0, 2);
2945 IEM_MC_LOCAL(uint64_t, u64Tmp);
2946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2947
2948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2950 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2951 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2952 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2953 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2954
2955 IEM_MC_ADVANCE_RIP();
2956 IEM_MC_END();
2957 }
2958 return VINF_SUCCESS;
2959}
2960
2961/** Opcode 0x66 0x0f 0x6f - movdqa Vx, Wx */
2962FNIEMOP_DEF(iemOp_movdqa_Vx_Wx)
2963{
2964 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2965 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
2966 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2967 {
2968 /*
2969 * Register, register.
2970 */
2971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2972 IEM_MC_BEGIN(0, 0);
2973 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2974 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2975 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2976 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2977 IEM_MC_ADVANCE_RIP();
2978 IEM_MC_END();
2979 }
2980 else
2981 {
2982 /*
2983 * Register, memory.
2984 */
2985 IEM_MC_BEGIN(0, 2);
2986 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
2987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2988
2989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2991 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2992 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2993 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2994 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2995
2996 IEM_MC_ADVANCE_RIP();
2997 IEM_MC_END();
2998 }
2999 return VINF_SUCCESS;
3000}
3001
3002/** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */
3003FNIEMOP_DEF(iemOp_movdqu_Vx_Wx)
3004{
3005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3006 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3007 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3008 {
3009 /*
3010 * Register, register.
3011 */
3012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3013 IEM_MC_BEGIN(0, 0);
3014 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3015 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3016 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3017 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3018 IEM_MC_ADVANCE_RIP();
3019 IEM_MC_END();
3020 }
3021 else
3022 {
3023 /*
3024 * Register, memory.
3025 */
3026 IEM_MC_BEGIN(0, 2);
3027 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3028 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3029
3030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3032 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3033 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3034 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3035 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3036
3037 IEM_MC_ADVANCE_RIP();
3038 IEM_MC_END();
3039 }
3040 return VINF_SUCCESS;
3041}
3042
3043
3044/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3045FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3046{
3047 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3048 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3049 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3050 {
3051 /*
3052 * Register, register.
3053 */
3054 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3056
3057 IEM_MC_BEGIN(3, 0);
3058 IEM_MC_ARG(uint64_t *, pDst, 0);
3059 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3060 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3061 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3062 IEM_MC_PREPARE_FPU_USAGE();
3063 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3064 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3065 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3066 IEM_MC_ADVANCE_RIP();
3067 IEM_MC_END();
3068 }
3069 else
3070 {
3071 /*
3072 * Register, memory.
3073 */
3074 IEM_MC_BEGIN(3, 2);
3075 IEM_MC_ARG(uint64_t *, pDst, 0);
3076 IEM_MC_LOCAL(uint64_t, uSrc);
3077 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3079
3080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3081 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3082 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3084 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3085
3086 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3087 IEM_MC_PREPARE_FPU_USAGE();
3088 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3089 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3090
3091 IEM_MC_ADVANCE_RIP();
3092 IEM_MC_END();
3093 }
3094 return VINF_SUCCESS;
3095}
3096
3097/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3098FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3099{
3100 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3101 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3102 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3103 {
3104 /*
3105 * Register, register.
3106 */
3107 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3109
3110 IEM_MC_BEGIN(3, 0);
3111 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3112 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3113 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3114 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3115 IEM_MC_PREPARE_SSE_USAGE();
3116 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3117 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3118 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3119 IEM_MC_ADVANCE_RIP();
3120 IEM_MC_END();
3121 }
3122 else
3123 {
3124 /*
3125 * Register, memory.
3126 */
3127 IEM_MC_BEGIN(3, 2);
3128 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3129 IEM_MC_LOCAL(RTUINT128U, uSrc);
3130 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3132
3133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3134 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3135 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3137 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3138
3139 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3140 IEM_MC_PREPARE_SSE_USAGE();
3141 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3142 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3143
3144 IEM_MC_ADVANCE_RIP();
3145 IEM_MC_END();
3146 }
3147 return VINF_SUCCESS;
3148}
3149
3150/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3151FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3152{
3153 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3155 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3156 {
3157 /*
3158 * Register, register.
3159 */
3160 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3162
3163 IEM_MC_BEGIN(3, 0);
3164 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3165 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3166 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3167 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3168 IEM_MC_PREPARE_SSE_USAGE();
3169 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3170 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3171 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3172 IEM_MC_ADVANCE_RIP();
3173 IEM_MC_END();
3174 }
3175 else
3176 {
3177 /*
3178 * Register, memory.
3179 */
3180 IEM_MC_BEGIN(3, 2);
3181 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3182 IEM_MC_LOCAL(RTUINT128U, uSrc);
3183 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3185
3186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3187 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3188 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3190 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3191
3192 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3193 IEM_MC_PREPARE_SSE_USAGE();
3194 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3195 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3196
3197 IEM_MC_ADVANCE_RIP();
3198 IEM_MC_END();
3199 }
3200 return VINF_SUCCESS;
3201}
3202
3203/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3204FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3205{
3206 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3208 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3209 {
3210 /*
3211 * Register, register.
3212 */
3213 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3215
3216 IEM_MC_BEGIN(3, 0);
3217 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3218 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3219 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3220 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3221 IEM_MC_PREPARE_SSE_USAGE();
3222 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3223 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3224 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3225 IEM_MC_ADVANCE_RIP();
3226 IEM_MC_END();
3227 }
3228 else
3229 {
3230 /*
3231 * Register, memory.
3232 */
3233 IEM_MC_BEGIN(3, 2);
3234 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3235 IEM_MC_LOCAL(RTUINT128U, uSrc);
3236 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3238
3239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3240 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3241 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3243 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3244
3245 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3246 IEM_MC_PREPARE_SSE_USAGE();
3247 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3248 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3249
3250 IEM_MC_ADVANCE_RIP();
3251 IEM_MC_END();
3252 }
3253 return VINF_SUCCESS;
3254}
3255
3256
3257/** Opcode 0x0f 0x71 11/2. */
3258FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3259
3260/** Opcode 0x66 0x0f 0x71 11/2. */
3261FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3262
3263/** Opcode 0x0f 0x71 11/4. */
3264FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3265
3266/** Opcode 0x66 0x0f 0x71 11/4. */
3267FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3268
3269/** Opcode 0x0f 0x71 11/6. */
3270FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3271
3272/** Opcode 0x66 0x0f 0x71 11/6. */
3273FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3274
3275
3276/**
3277 * Group 12 jump table for register variant.
3278 */
3279IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3280{
3281 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3282 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3283 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3284 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3285 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3286 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3287 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3288 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3289};
3290AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3291
3292
3293/** Opcode 0x0f 0x71. */
3294FNIEMOP_DEF(iemOp_Grp12)
3295{
3296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3297 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3298 /* register, register */
3299 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3300 + pVCpu->iem.s.idxPrefix], bRm);
3301 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3302}
3303
3304
3305/** Opcode 0x0f 0x72 11/2. */
3306FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3307
3308/** Opcode 0x66 0x0f 0x72 11/2. */
3309FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
3310
3311/** Opcode 0x0f 0x72 11/4. */
3312FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3313
3314/** Opcode 0x66 0x0f 0x72 11/4. */
3315FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
3316
3317/** Opcode 0x0f 0x72 11/6. */
3318FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3319
3320/** Opcode 0x66 0x0f 0x72 11/6. */
3321FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
3322
3323
3324/**
3325 * Group 13 jump table for register variant.
3326 */
3327IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3328{
3329 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3330 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3331 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3332 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3333 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3334 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3335 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3336 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3337};
3338AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3339
3340/** Opcode 0x0f 0x72. */
3341FNIEMOP_DEF(iemOp_Grp13)
3342{
3343 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3344 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3345 /* register, register */
3346 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3347 + pVCpu->iem.s.idxPrefix], bRm);
3348 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3349}
3350
3351
3352/** Opcode 0x0f 0x73 11/2. */
3353FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3354
3355/** Opcode 0x66 0x0f 0x73 11/2. */
3356FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
3357
3358/** Opcode 0x66 0x0f 0x73 11/3. */
3359FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
3360
3361/** Opcode 0x0f 0x73 11/6. */
3362FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3363
3364/** Opcode 0x66 0x0f 0x73 11/6. */
3365FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
3366
3367/** Opcode 0x66 0x0f 0x73 11/7. */
3368FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
3369
3370/**
3371 * Group 14 jump table for register variant.
3372 */
3373IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
3374{
3375 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3376 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3377 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3378 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3379 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3380 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3381 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3382 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3383};
3384AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
3385
3386
3387/** Opcode 0x0f 0x73. */
3388FNIEMOP_DEF(iemOp_Grp14)
3389{
3390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3391 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3392 /* register, register */
3393 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3394 + pVCpu->iem.s.idxPrefix], bRm);
3395 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3396}
3397
3398
3399/**
3400 * Common worker for MMX instructions on the form:
3401 * pxxx mm1, mm2/mem64
3402 */
3403FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3404{
3405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3406 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3407 {
3408 /*
3409 * Register, register.
3410 */
3411 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3412 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3414 IEM_MC_BEGIN(2, 0);
3415 IEM_MC_ARG(uint64_t *, pDst, 0);
3416 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3417 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3418 IEM_MC_PREPARE_FPU_USAGE();
3419 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3420 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3421 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3422 IEM_MC_ADVANCE_RIP();
3423 IEM_MC_END();
3424 }
3425 else
3426 {
3427 /*
3428 * Register, memory.
3429 */
3430 IEM_MC_BEGIN(2, 2);
3431 IEM_MC_ARG(uint64_t *, pDst, 0);
3432 IEM_MC_LOCAL(uint64_t, uSrc);
3433 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3435
3436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3438 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3439 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3440
3441 IEM_MC_PREPARE_FPU_USAGE();
3442 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3443 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3444
3445 IEM_MC_ADVANCE_RIP();
3446 IEM_MC_END();
3447 }
3448 return VINF_SUCCESS;
3449}
3450
3451
3452/**
3453 * Common worker for SSE2 instructions on the forms:
3454 * pxxx xmm1, xmm2/mem128
3455 *
3456 * Proper alignment of the 128-bit operand is enforced.
3457 * Exceptions type 4. SSE2 cpuid checks.
3458 */
3459FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3460{
3461 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3462 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3463 {
3464 /*
3465 * Register, register.
3466 */
3467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3468 IEM_MC_BEGIN(2, 0);
3469 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3470 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3471 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3472 IEM_MC_PREPARE_SSE_USAGE();
3473 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3474 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3475 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3476 IEM_MC_ADVANCE_RIP();
3477 IEM_MC_END();
3478 }
3479 else
3480 {
3481 /*
3482 * Register, memory.
3483 */
3484 IEM_MC_BEGIN(2, 2);
3485 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3486 IEM_MC_LOCAL(RTUINT128U, uSrc);
3487 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3489
3490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3492 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3493 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3494
3495 IEM_MC_PREPARE_SSE_USAGE();
3496 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3497 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3498
3499 IEM_MC_ADVANCE_RIP();
3500 IEM_MC_END();
3501 }
3502 return VINF_SUCCESS;
3503}
3504
3505
3506/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3507FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3508{
3509 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3510 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3511}
3512
3513/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
3514FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
3515{
3516 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
3517 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3518}
3519
3520/* Opcode 0xf3 0x0f 0x74 - invalid */
3521/* Opcode 0xf2 0x0f 0x74 - invalid */
3522
3523
3524/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3525FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3526{
3527 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3528 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3529}
3530
3531/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
3532FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
3533{
3534 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
3535 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3536}
3537
3538/* Opcode 0xf3 0x0f 0x75 - invalid */
3539/* Opcode 0xf2 0x0f 0x75 - invalid */
3540
3541
3542/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3543FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3544{
3545 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3546 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3547}
3548
3549/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
3550FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
3551{
3552 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
3553 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3554}
3555
3556/* Opcode 0xf3 0x0f 0x76 - invalid */
3557/* Opcode 0xf2 0x0f 0x76 - invalid */
3558
3559
3560/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
3561FNIEMOP_STUB(iemOp_emms);
3562/* Opcode 0x66 0x0f 0x77 - invalid */
3563/* Opcode 0xf3 0x0f 0x77 - invalid */
3564/* Opcode 0xf2 0x0f 0x77 - invalid */
3565
3566/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3567FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3568/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3569FNIEMOP_STUB(iemOp_AmdGrp17);
3570/* Opcode 0xf3 0x0f 0x78 - invalid */
3571/* Opcode 0xf2 0x0f 0x78 - invalid */
3572
3573/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3574FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3575/* Opcode 0x66 0x0f 0x79 - invalid */
3576/* Opcode 0xf3 0x0f 0x79 - invalid */
3577/* Opcode 0xf2 0x0f 0x79 - invalid */
3578
3579/* Opcode 0x0f 0x7a - invalid */
3580/* Opcode 0x66 0x0f 0x7a - invalid */
3581/* Opcode 0xf3 0x0f 0x7a - invalid */
3582/* Opcode 0xf2 0x0f 0x7a - invalid */
3583
3584/* Opcode 0x0f 0x7b - invalid */
3585/* Opcode 0x66 0x0f 0x7b - invalid */
3586/* Opcode 0xf3 0x0f 0x7b - invalid */
3587/* Opcode 0xf2 0x0f 0x7b - invalid */
3588
3589/* Opcode 0x0f 0x7c - invalid */
3590/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
3591FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
3592/* Opcode 0xf3 0x0f 0x7c - invalid */
3593/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
3594FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
3595
3596/* Opcode 0x0f 0x7d - invalid */
3597/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
3598FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
3599/* Opcode 0xf3 0x0f 0x7d - invalid */
3600/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
3601FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
3602
3603
3604/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3605FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3606{
3607 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3608 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3609 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3610 else
3611 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3612 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3613 {
3614 /* greg, MMX */
3615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3616 IEM_MC_BEGIN(0, 1);
3617 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3618 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3619 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3620 {
3621 IEM_MC_LOCAL(uint64_t, u64Tmp);
3622 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3623 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3624 }
3625 else
3626 {
3627 IEM_MC_LOCAL(uint32_t, u32Tmp);
3628 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3629 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3630 }
3631 IEM_MC_ADVANCE_RIP();
3632 IEM_MC_END();
3633 }
3634 else
3635 {
3636 /* [mem], MMX */
3637 IEM_MC_BEGIN(0, 2);
3638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3639 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3642 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3643 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3644 {
3645 IEM_MC_LOCAL(uint64_t, u64Tmp);
3646 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3647 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3648 }
3649 else
3650 {
3651 IEM_MC_LOCAL(uint32_t, u32Tmp);
3652 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3653 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3654 }
3655 IEM_MC_ADVANCE_RIP();
3656 IEM_MC_END();
3657 }
3658 return VINF_SUCCESS;
3659}
3660
3661/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
3662FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
3663{
3664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3665 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3666 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3667 else
3668 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3669 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3670 {
3671 /* greg, XMM */
3672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3673 IEM_MC_BEGIN(0, 1);
3674 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3675 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3676 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3677 {
3678 IEM_MC_LOCAL(uint64_t, u64Tmp);
3679 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3680 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3681 }
3682 else
3683 {
3684 IEM_MC_LOCAL(uint32_t, u32Tmp);
3685 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3686 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3687 }
3688 IEM_MC_ADVANCE_RIP();
3689 IEM_MC_END();
3690 }
3691 else
3692 {
3693 /* [mem], XMM */
3694 IEM_MC_BEGIN(0, 2);
3695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3696 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3699 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3700 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3701 {
3702 IEM_MC_LOCAL(uint64_t, u64Tmp);
3703 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3704 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3705 }
3706 else
3707 {
3708 IEM_MC_LOCAL(uint32_t, u32Tmp);
3709 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3710 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3711 }
3712 IEM_MC_ADVANCE_RIP();
3713 IEM_MC_END();
3714 }
3715 return VINF_SUCCESS;
3716}
3717
3718/** Opcode 0xf3 0x0f 0x7e - movq Vq, Wq */
3719FNIEMOP_STUB(iemOp_movq_Vq_Wq);
3720/* Opcode 0xf2 0x0f 0x7e - invalid */
3721
3722
3723/** Opcode 0x0f 0x7f - movq Qq, Pq */
3724FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3725{
3726 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3727 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3728 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3729 {
3730 /*
3731 * Register, register.
3732 */
3733 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3734 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3736 IEM_MC_BEGIN(0, 1);
3737 IEM_MC_LOCAL(uint64_t, u64Tmp);
3738 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3739 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3740 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3741 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3742 IEM_MC_ADVANCE_RIP();
3743 IEM_MC_END();
3744 }
3745 else
3746 {
3747 /*
3748 * Register, memory.
3749 */
3750 IEM_MC_BEGIN(0, 2);
3751 IEM_MC_LOCAL(uint64_t, u64Tmp);
3752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3753
3754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3756 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3757 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3758
3759 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3760 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3761
3762 IEM_MC_ADVANCE_RIP();
3763 IEM_MC_END();
3764 }
3765 return VINF_SUCCESS;
3766}
3767
3768/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
3769FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
3770{
3771 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
3772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3773 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3774 {
3775 /*
3776 * Register, register.
3777 */
3778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3779 IEM_MC_BEGIN(0, 0);
3780 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3781 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3782 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3783 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3784 IEM_MC_ADVANCE_RIP();
3785 IEM_MC_END();
3786 }
3787 else
3788 {
3789 /*
3790 * Register, memory.
3791 */
3792 IEM_MC_BEGIN(0, 2);
3793 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3795
3796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3798 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3799 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3800
3801 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3802 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3803
3804 IEM_MC_ADVANCE_RIP();
3805 IEM_MC_END();
3806 }
3807 return VINF_SUCCESS;
3808}
3809
3810/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
3811FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
3812{
3813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3814 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
3815 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3816 {
3817 /*
3818 * Register, register.
3819 */
3820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3821 IEM_MC_BEGIN(0, 0);
3822 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3823 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3824 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3825 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3826 IEM_MC_ADVANCE_RIP();
3827 IEM_MC_END();
3828 }
3829 else
3830 {
3831 /*
3832 * Register, memory.
3833 */
3834 IEM_MC_BEGIN(0, 2);
3835 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3837
3838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3840 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3841 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3842
3843 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3844 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3845
3846 IEM_MC_ADVANCE_RIP();
3847 IEM_MC_END();
3848 }
3849 return VINF_SUCCESS;
3850}
3851
3852/* Opcode 0xf2 0x0f 0x7f - invalid */
3853
3854
3855
3856/** Opcode 0x0f 0x80. */
3857FNIEMOP_DEF(iemOp_jo_Jv)
3858{
3859 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3860 IEMOP_HLP_MIN_386();
3861 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3862 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3863 {
3864 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3866
3867 IEM_MC_BEGIN(0, 0);
3868 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3869 IEM_MC_REL_JMP_S16(i16Imm);
3870 } IEM_MC_ELSE() {
3871 IEM_MC_ADVANCE_RIP();
3872 } IEM_MC_ENDIF();
3873 IEM_MC_END();
3874 }
3875 else
3876 {
3877 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3879
3880 IEM_MC_BEGIN(0, 0);
3881 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3882 IEM_MC_REL_JMP_S32(i32Imm);
3883 } IEM_MC_ELSE() {
3884 IEM_MC_ADVANCE_RIP();
3885 } IEM_MC_ENDIF();
3886 IEM_MC_END();
3887 }
3888 return VINF_SUCCESS;
3889}
3890
3891
3892/** Opcode 0x0f 0x81. */
3893FNIEMOP_DEF(iemOp_jno_Jv)
3894{
3895 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3896 IEMOP_HLP_MIN_386();
3897 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3898 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3899 {
3900 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3902
3903 IEM_MC_BEGIN(0, 0);
3904 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3905 IEM_MC_ADVANCE_RIP();
3906 } IEM_MC_ELSE() {
3907 IEM_MC_REL_JMP_S16(i16Imm);
3908 } IEM_MC_ENDIF();
3909 IEM_MC_END();
3910 }
3911 else
3912 {
3913 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3915
3916 IEM_MC_BEGIN(0, 0);
3917 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3918 IEM_MC_ADVANCE_RIP();
3919 } IEM_MC_ELSE() {
3920 IEM_MC_REL_JMP_S32(i32Imm);
3921 } IEM_MC_ENDIF();
3922 IEM_MC_END();
3923 }
3924 return VINF_SUCCESS;
3925}
3926
3927
3928/** Opcode 0x0f 0x82. */
3929FNIEMOP_DEF(iemOp_jc_Jv)
3930{
3931 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3932 IEMOP_HLP_MIN_386();
3933 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3934 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3935 {
3936 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3938
3939 IEM_MC_BEGIN(0, 0);
3940 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3941 IEM_MC_REL_JMP_S16(i16Imm);
3942 } IEM_MC_ELSE() {
3943 IEM_MC_ADVANCE_RIP();
3944 } IEM_MC_ENDIF();
3945 IEM_MC_END();
3946 }
3947 else
3948 {
3949 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3951
3952 IEM_MC_BEGIN(0, 0);
3953 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3954 IEM_MC_REL_JMP_S32(i32Imm);
3955 } IEM_MC_ELSE() {
3956 IEM_MC_ADVANCE_RIP();
3957 } IEM_MC_ENDIF();
3958 IEM_MC_END();
3959 }
3960 return VINF_SUCCESS;
3961}
3962
3963
3964/** Opcode 0x0f 0x83. */
3965FNIEMOP_DEF(iemOp_jnc_Jv)
3966{
3967 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3968 IEMOP_HLP_MIN_386();
3969 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3970 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3971 {
3972 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3974
3975 IEM_MC_BEGIN(0, 0);
3976 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3977 IEM_MC_ADVANCE_RIP();
3978 } IEM_MC_ELSE() {
3979 IEM_MC_REL_JMP_S16(i16Imm);
3980 } IEM_MC_ENDIF();
3981 IEM_MC_END();
3982 }
3983 else
3984 {
3985 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3987
3988 IEM_MC_BEGIN(0, 0);
3989 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3990 IEM_MC_ADVANCE_RIP();
3991 } IEM_MC_ELSE() {
3992 IEM_MC_REL_JMP_S32(i32Imm);
3993 } IEM_MC_ENDIF();
3994 IEM_MC_END();
3995 }
3996 return VINF_SUCCESS;
3997}
3998
3999
4000/** Opcode 0x0f 0x84. */
4001FNIEMOP_DEF(iemOp_je_Jv)
4002{
4003 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4004 IEMOP_HLP_MIN_386();
4005 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4006 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4007 {
4008 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4010
4011 IEM_MC_BEGIN(0, 0);
4012 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4013 IEM_MC_REL_JMP_S16(i16Imm);
4014 } IEM_MC_ELSE() {
4015 IEM_MC_ADVANCE_RIP();
4016 } IEM_MC_ENDIF();
4017 IEM_MC_END();
4018 }
4019 else
4020 {
4021 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4023
4024 IEM_MC_BEGIN(0, 0);
4025 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4026 IEM_MC_REL_JMP_S32(i32Imm);
4027 } IEM_MC_ELSE() {
4028 IEM_MC_ADVANCE_RIP();
4029 } IEM_MC_ENDIF();
4030 IEM_MC_END();
4031 }
4032 return VINF_SUCCESS;
4033}
4034
4035
4036/** Opcode 0x0f 0x85. */
4037FNIEMOP_DEF(iemOp_jne_Jv)
4038{
4039 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4040 IEMOP_HLP_MIN_386();
4041 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4042 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4043 {
4044 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4046
4047 IEM_MC_BEGIN(0, 0);
4048 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4049 IEM_MC_ADVANCE_RIP();
4050 } IEM_MC_ELSE() {
4051 IEM_MC_REL_JMP_S16(i16Imm);
4052 } IEM_MC_ENDIF();
4053 IEM_MC_END();
4054 }
4055 else
4056 {
4057 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4059
4060 IEM_MC_BEGIN(0, 0);
4061 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4062 IEM_MC_ADVANCE_RIP();
4063 } IEM_MC_ELSE() {
4064 IEM_MC_REL_JMP_S32(i32Imm);
4065 } IEM_MC_ENDIF();
4066 IEM_MC_END();
4067 }
4068 return VINF_SUCCESS;
4069}
4070
4071
4072/** Opcode 0x0f 0x86. */
4073FNIEMOP_DEF(iemOp_jbe_Jv)
4074{
4075 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4076 IEMOP_HLP_MIN_386();
4077 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4078 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4079 {
4080 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4082
4083 IEM_MC_BEGIN(0, 0);
4084 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4085 IEM_MC_REL_JMP_S16(i16Imm);
4086 } IEM_MC_ELSE() {
4087 IEM_MC_ADVANCE_RIP();
4088 } IEM_MC_ENDIF();
4089 IEM_MC_END();
4090 }
4091 else
4092 {
4093 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4095
4096 IEM_MC_BEGIN(0, 0);
4097 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4098 IEM_MC_REL_JMP_S32(i32Imm);
4099 } IEM_MC_ELSE() {
4100 IEM_MC_ADVANCE_RIP();
4101 } IEM_MC_ENDIF();
4102 IEM_MC_END();
4103 }
4104 return VINF_SUCCESS;
4105}
4106
4107
4108/** Opcode 0x0f 0x87. */
4109FNIEMOP_DEF(iemOp_jnbe_Jv)
4110{
4111 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4112 IEMOP_HLP_MIN_386();
4113 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4114 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4115 {
4116 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4118
4119 IEM_MC_BEGIN(0, 0);
4120 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4121 IEM_MC_ADVANCE_RIP();
4122 } IEM_MC_ELSE() {
4123 IEM_MC_REL_JMP_S16(i16Imm);
4124 } IEM_MC_ENDIF();
4125 IEM_MC_END();
4126 }
4127 else
4128 {
4129 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4131
4132 IEM_MC_BEGIN(0, 0);
4133 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4134 IEM_MC_ADVANCE_RIP();
4135 } IEM_MC_ELSE() {
4136 IEM_MC_REL_JMP_S32(i32Imm);
4137 } IEM_MC_ENDIF();
4138 IEM_MC_END();
4139 }
4140 return VINF_SUCCESS;
4141}
4142
4143
4144/** Opcode 0x0f 0x88. */
4145FNIEMOP_DEF(iemOp_js_Jv)
4146{
4147 IEMOP_MNEMONIC(js_Jv, "js Jv");
4148 IEMOP_HLP_MIN_386();
4149 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4150 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4151 {
4152 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4154
4155 IEM_MC_BEGIN(0, 0);
4156 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4157 IEM_MC_REL_JMP_S16(i16Imm);
4158 } IEM_MC_ELSE() {
4159 IEM_MC_ADVANCE_RIP();
4160 } IEM_MC_ENDIF();
4161 IEM_MC_END();
4162 }
4163 else
4164 {
4165 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4167
4168 IEM_MC_BEGIN(0, 0);
4169 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4170 IEM_MC_REL_JMP_S32(i32Imm);
4171 } IEM_MC_ELSE() {
4172 IEM_MC_ADVANCE_RIP();
4173 } IEM_MC_ENDIF();
4174 IEM_MC_END();
4175 }
4176 return VINF_SUCCESS;
4177}
4178
4179
4180/** Opcode 0x0f 0x89. */
4181FNIEMOP_DEF(iemOp_jns_Jv)
4182{
4183 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4184 IEMOP_HLP_MIN_386();
4185 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4186 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4187 {
4188 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4190
4191 IEM_MC_BEGIN(0, 0);
4192 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4193 IEM_MC_ADVANCE_RIP();
4194 } IEM_MC_ELSE() {
4195 IEM_MC_REL_JMP_S16(i16Imm);
4196 } IEM_MC_ENDIF();
4197 IEM_MC_END();
4198 }
4199 else
4200 {
4201 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4203
4204 IEM_MC_BEGIN(0, 0);
4205 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4206 IEM_MC_ADVANCE_RIP();
4207 } IEM_MC_ELSE() {
4208 IEM_MC_REL_JMP_S32(i32Imm);
4209 } IEM_MC_ENDIF();
4210 IEM_MC_END();
4211 }
4212 return VINF_SUCCESS;
4213}
4214
4215
4216/** Opcode 0x0f 0x8a. */
4217FNIEMOP_DEF(iemOp_jp_Jv)
4218{
4219 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4220 IEMOP_HLP_MIN_386();
4221 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4222 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4223 {
4224 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4226
4227 IEM_MC_BEGIN(0, 0);
4228 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4229 IEM_MC_REL_JMP_S16(i16Imm);
4230 } IEM_MC_ELSE() {
4231 IEM_MC_ADVANCE_RIP();
4232 } IEM_MC_ENDIF();
4233 IEM_MC_END();
4234 }
4235 else
4236 {
4237 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4239
4240 IEM_MC_BEGIN(0, 0);
4241 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4242 IEM_MC_REL_JMP_S32(i32Imm);
4243 } IEM_MC_ELSE() {
4244 IEM_MC_ADVANCE_RIP();
4245 } IEM_MC_ENDIF();
4246 IEM_MC_END();
4247 }
4248 return VINF_SUCCESS;
4249}
4250
4251
4252/** Opcode 0x0f 0x8b. */
4253FNIEMOP_DEF(iemOp_jnp_Jv)
4254{
4255 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4256 IEMOP_HLP_MIN_386();
4257 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4258 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4259 {
4260 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4262
4263 IEM_MC_BEGIN(0, 0);
4264 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4265 IEM_MC_ADVANCE_RIP();
4266 } IEM_MC_ELSE() {
4267 IEM_MC_REL_JMP_S16(i16Imm);
4268 } IEM_MC_ENDIF();
4269 IEM_MC_END();
4270 }
4271 else
4272 {
4273 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4275
4276 IEM_MC_BEGIN(0, 0);
4277 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4278 IEM_MC_ADVANCE_RIP();
4279 } IEM_MC_ELSE() {
4280 IEM_MC_REL_JMP_S32(i32Imm);
4281 } IEM_MC_ENDIF();
4282 IEM_MC_END();
4283 }
4284 return VINF_SUCCESS;
4285}
4286
4287
4288/** Opcode 0x0f 0x8c. */
4289FNIEMOP_DEF(iemOp_jl_Jv)
4290{
4291 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4292 IEMOP_HLP_MIN_386();
4293 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4294 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4295 {
4296 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4298
4299 IEM_MC_BEGIN(0, 0);
4300 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4301 IEM_MC_REL_JMP_S16(i16Imm);
4302 } IEM_MC_ELSE() {
4303 IEM_MC_ADVANCE_RIP();
4304 } IEM_MC_ENDIF();
4305 IEM_MC_END();
4306 }
4307 else
4308 {
4309 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4311
4312 IEM_MC_BEGIN(0, 0);
4313 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4314 IEM_MC_REL_JMP_S32(i32Imm);
4315 } IEM_MC_ELSE() {
4316 IEM_MC_ADVANCE_RIP();
4317 } IEM_MC_ENDIF();
4318 IEM_MC_END();
4319 }
4320 return VINF_SUCCESS;
4321}
4322
4323
4324/** Opcode 0x0f 0x8d. */
4325FNIEMOP_DEF(iemOp_jnl_Jv)
4326{
4327 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4328 IEMOP_HLP_MIN_386();
4329 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4330 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4331 {
4332 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4334
4335 IEM_MC_BEGIN(0, 0);
4336 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4337 IEM_MC_ADVANCE_RIP();
4338 } IEM_MC_ELSE() {
4339 IEM_MC_REL_JMP_S16(i16Imm);
4340 } IEM_MC_ENDIF();
4341 IEM_MC_END();
4342 }
4343 else
4344 {
4345 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4347
4348 IEM_MC_BEGIN(0, 0);
4349 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4350 IEM_MC_ADVANCE_RIP();
4351 } IEM_MC_ELSE() {
4352 IEM_MC_REL_JMP_S32(i32Imm);
4353 } IEM_MC_ENDIF();
4354 IEM_MC_END();
4355 }
4356 return VINF_SUCCESS;
4357}
4358
4359
4360/** Opcode 0x0f 0x8e. */
4361FNIEMOP_DEF(iemOp_jle_Jv)
4362{
4363 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4364 IEMOP_HLP_MIN_386();
4365 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4366 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4367 {
4368 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4370
4371 IEM_MC_BEGIN(0, 0);
4372 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4373 IEM_MC_REL_JMP_S16(i16Imm);
4374 } IEM_MC_ELSE() {
4375 IEM_MC_ADVANCE_RIP();
4376 } IEM_MC_ENDIF();
4377 IEM_MC_END();
4378 }
4379 else
4380 {
4381 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4383
4384 IEM_MC_BEGIN(0, 0);
4385 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4386 IEM_MC_REL_JMP_S32(i32Imm);
4387 } IEM_MC_ELSE() {
4388 IEM_MC_ADVANCE_RIP();
4389 } IEM_MC_ENDIF();
4390 IEM_MC_END();
4391 }
4392 return VINF_SUCCESS;
4393}
4394
4395
4396/** Opcode 0x0f 0x8f. */
4397FNIEMOP_DEF(iemOp_jnle_Jv)
4398{
4399 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4400 IEMOP_HLP_MIN_386();
4401 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4402 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4403 {
4404 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4406
4407 IEM_MC_BEGIN(0, 0);
4408 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4409 IEM_MC_ADVANCE_RIP();
4410 } IEM_MC_ELSE() {
4411 IEM_MC_REL_JMP_S16(i16Imm);
4412 } IEM_MC_ENDIF();
4413 IEM_MC_END();
4414 }
4415 else
4416 {
4417 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4419
4420 IEM_MC_BEGIN(0, 0);
4421 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4422 IEM_MC_ADVANCE_RIP();
4423 } IEM_MC_ELSE() {
4424 IEM_MC_REL_JMP_S32(i32Imm);
4425 } IEM_MC_ENDIF();
4426 IEM_MC_END();
4427 }
4428 return VINF_SUCCESS;
4429}
4430
4431
4432/** Opcode 0x0f 0x90. */
4433FNIEMOP_DEF(iemOp_seto_Eb)
4434{
4435 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4436 IEMOP_HLP_MIN_386();
4437 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4438
4439 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4440 * any way. AMD says it's "unused", whatever that means. We're
4441 * ignoring for now. */
4442 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4443 {
4444 /* register target */
4445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4446 IEM_MC_BEGIN(0, 0);
4447 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4448 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4449 } IEM_MC_ELSE() {
4450 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4451 } IEM_MC_ENDIF();
4452 IEM_MC_ADVANCE_RIP();
4453 IEM_MC_END();
4454 }
4455 else
4456 {
4457 /* memory target */
4458 IEM_MC_BEGIN(0, 1);
4459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4462 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4463 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4464 } IEM_MC_ELSE() {
4465 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4466 } IEM_MC_ENDIF();
4467 IEM_MC_ADVANCE_RIP();
4468 IEM_MC_END();
4469 }
4470 return VINF_SUCCESS;
4471}
4472
4473
4474/** Opcode 0x0f 0x91. */
4475FNIEMOP_DEF(iemOp_setno_Eb)
4476{
4477 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4478 IEMOP_HLP_MIN_386();
4479 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4480
4481 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4482 * any way. AMD says it's "unused", whatever that means. We're
4483 * ignoring for now. */
4484 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4485 {
4486 /* register target */
4487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4488 IEM_MC_BEGIN(0, 0);
4489 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4490 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4491 } IEM_MC_ELSE() {
4492 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4493 } IEM_MC_ENDIF();
4494 IEM_MC_ADVANCE_RIP();
4495 IEM_MC_END();
4496 }
4497 else
4498 {
4499 /* memory target */
4500 IEM_MC_BEGIN(0, 1);
4501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4504 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4505 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4506 } IEM_MC_ELSE() {
4507 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4508 } IEM_MC_ENDIF();
4509 IEM_MC_ADVANCE_RIP();
4510 IEM_MC_END();
4511 }
4512 return VINF_SUCCESS;
4513}
4514
4515
4516/** Opcode 0x0f 0x92. */
4517FNIEMOP_DEF(iemOp_setc_Eb)
4518{
4519 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4520 IEMOP_HLP_MIN_386();
4521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4522
4523 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4524 * any way. AMD says it's "unused", whatever that means. We're
4525 * ignoring for now. */
4526 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4527 {
4528 /* register target */
4529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4530 IEM_MC_BEGIN(0, 0);
4531 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4532 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4533 } IEM_MC_ELSE() {
4534 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4535 } IEM_MC_ENDIF();
4536 IEM_MC_ADVANCE_RIP();
4537 IEM_MC_END();
4538 }
4539 else
4540 {
4541 /* memory target */
4542 IEM_MC_BEGIN(0, 1);
4543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4546 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4547 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4548 } IEM_MC_ELSE() {
4549 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4550 } IEM_MC_ENDIF();
4551 IEM_MC_ADVANCE_RIP();
4552 IEM_MC_END();
4553 }
4554 return VINF_SUCCESS;
4555}
4556
4557
4558/** Opcode 0x0f 0x93. */
4559FNIEMOP_DEF(iemOp_setnc_Eb)
4560{
4561 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4562 IEMOP_HLP_MIN_386();
4563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4564
4565 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4566 * any way. AMD says it's "unused", whatever that means. We're
4567 * ignoring for now. */
4568 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4569 {
4570 /* register target */
4571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4572 IEM_MC_BEGIN(0, 0);
4573 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4574 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4575 } IEM_MC_ELSE() {
4576 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4577 } IEM_MC_ENDIF();
4578 IEM_MC_ADVANCE_RIP();
4579 IEM_MC_END();
4580 }
4581 else
4582 {
4583 /* memory target */
4584 IEM_MC_BEGIN(0, 1);
4585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4588 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4589 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4590 } IEM_MC_ELSE() {
4591 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4592 } IEM_MC_ENDIF();
4593 IEM_MC_ADVANCE_RIP();
4594 IEM_MC_END();
4595 }
4596 return VINF_SUCCESS;
4597}
4598
4599
4600/** Opcode 0x0f 0x94. */
4601FNIEMOP_DEF(iemOp_sete_Eb)
4602{
4603 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4604 IEMOP_HLP_MIN_386();
4605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4606
4607 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4608 * any way. AMD says it's "unused", whatever that means. We're
4609 * ignoring for now. */
4610 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4611 {
4612 /* register target */
4613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4614 IEM_MC_BEGIN(0, 0);
4615 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4616 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4617 } IEM_MC_ELSE() {
4618 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4619 } IEM_MC_ENDIF();
4620 IEM_MC_ADVANCE_RIP();
4621 IEM_MC_END();
4622 }
4623 else
4624 {
4625 /* memory target */
4626 IEM_MC_BEGIN(0, 1);
4627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4630 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4631 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4632 } IEM_MC_ELSE() {
4633 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4634 } IEM_MC_ENDIF();
4635 IEM_MC_ADVANCE_RIP();
4636 IEM_MC_END();
4637 }
4638 return VINF_SUCCESS;
4639}
4640
4641
4642/** Opcode 0x0f 0x95. */
4643FNIEMOP_DEF(iemOp_setne_Eb)
4644{
4645 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4646 IEMOP_HLP_MIN_386();
4647 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4648
4649 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4650 * any way. AMD says it's "unused", whatever that means. We're
4651 * ignoring for now. */
4652 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4653 {
4654 /* register target */
4655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4656 IEM_MC_BEGIN(0, 0);
4657 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4658 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4659 } IEM_MC_ELSE() {
4660 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4661 } IEM_MC_ENDIF();
4662 IEM_MC_ADVANCE_RIP();
4663 IEM_MC_END();
4664 }
4665 else
4666 {
4667 /* memory target */
4668 IEM_MC_BEGIN(0, 1);
4669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4672 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4673 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4674 } IEM_MC_ELSE() {
4675 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4676 } IEM_MC_ENDIF();
4677 IEM_MC_ADVANCE_RIP();
4678 IEM_MC_END();
4679 }
4680 return VINF_SUCCESS;
4681}
4682
4683
4684/** Opcode 0x0f 0x96. */
4685FNIEMOP_DEF(iemOp_setbe_Eb)
4686{
4687 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4688 IEMOP_HLP_MIN_386();
4689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4690
4691 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4692 * any way. AMD says it's "unused", whatever that means. We're
4693 * ignoring for now. */
4694 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4695 {
4696 /* register target */
4697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4698 IEM_MC_BEGIN(0, 0);
4699 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4700 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4701 } IEM_MC_ELSE() {
4702 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4703 } IEM_MC_ENDIF();
4704 IEM_MC_ADVANCE_RIP();
4705 IEM_MC_END();
4706 }
4707 else
4708 {
4709 /* memory target */
4710 IEM_MC_BEGIN(0, 1);
4711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4714 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4715 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4716 } IEM_MC_ELSE() {
4717 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4718 } IEM_MC_ENDIF();
4719 IEM_MC_ADVANCE_RIP();
4720 IEM_MC_END();
4721 }
4722 return VINF_SUCCESS;
4723}
4724
4725
4726/** Opcode 0x0f 0x97. */
4727FNIEMOP_DEF(iemOp_setnbe_Eb)
4728{
4729 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4730 IEMOP_HLP_MIN_386();
4731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4732
4733 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4734 * any way. AMD says it's "unused", whatever that means. We're
4735 * ignoring for now. */
4736 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4737 {
4738 /* register target */
4739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4740 IEM_MC_BEGIN(0, 0);
4741 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4742 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4743 } IEM_MC_ELSE() {
4744 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4745 } IEM_MC_ENDIF();
4746 IEM_MC_ADVANCE_RIP();
4747 IEM_MC_END();
4748 }
4749 else
4750 {
4751 /* memory target */
4752 IEM_MC_BEGIN(0, 1);
4753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4756 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4757 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4758 } IEM_MC_ELSE() {
4759 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4760 } IEM_MC_ENDIF();
4761 IEM_MC_ADVANCE_RIP();
4762 IEM_MC_END();
4763 }
4764 return VINF_SUCCESS;
4765}
4766
4767
4768/** Opcode 0x0f 0x98. */
4769FNIEMOP_DEF(iemOp_sets_Eb)
4770{
4771 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4772 IEMOP_HLP_MIN_386();
4773 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4774
4775 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4776 * any way. AMD says it's "unused", whatever that means. We're
4777 * ignoring for now. */
4778 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4779 {
4780 /* register target */
4781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4782 IEM_MC_BEGIN(0, 0);
4783 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4784 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4785 } IEM_MC_ELSE() {
4786 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4787 } IEM_MC_ENDIF();
4788 IEM_MC_ADVANCE_RIP();
4789 IEM_MC_END();
4790 }
4791 else
4792 {
4793 /* memory target */
4794 IEM_MC_BEGIN(0, 1);
4795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4798 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4799 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4800 } IEM_MC_ELSE() {
4801 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4802 } IEM_MC_ENDIF();
4803 IEM_MC_ADVANCE_RIP();
4804 IEM_MC_END();
4805 }
4806 return VINF_SUCCESS;
4807}
4808
4809
4810/** Opcode 0x0f 0x99. */
4811FNIEMOP_DEF(iemOp_setns_Eb)
4812{
4813 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4814 IEMOP_HLP_MIN_386();
4815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4816
4817 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4818 * any way. AMD says it's "unused", whatever that means. We're
4819 * ignoring for now. */
4820 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4821 {
4822 /* register target */
4823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4824 IEM_MC_BEGIN(0, 0);
4825 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4826 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4827 } IEM_MC_ELSE() {
4828 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4829 } IEM_MC_ENDIF();
4830 IEM_MC_ADVANCE_RIP();
4831 IEM_MC_END();
4832 }
4833 else
4834 {
4835 /* memory target */
4836 IEM_MC_BEGIN(0, 1);
4837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4840 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4841 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4842 } IEM_MC_ELSE() {
4843 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4844 } IEM_MC_ENDIF();
4845 IEM_MC_ADVANCE_RIP();
4846 IEM_MC_END();
4847 }
4848 return VINF_SUCCESS;
4849}
4850
4851
4852/** Opcode 0x0f 0x9a. */
4853FNIEMOP_DEF(iemOp_setp_Eb)
4854{
4855 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4856 IEMOP_HLP_MIN_386();
4857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4858
4859 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4860 * any way. AMD says it's "unused", whatever that means. We're
4861 * ignoring for now. */
4862 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4863 {
4864 /* register target */
4865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4866 IEM_MC_BEGIN(0, 0);
4867 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4868 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4869 } IEM_MC_ELSE() {
4870 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4871 } IEM_MC_ENDIF();
4872 IEM_MC_ADVANCE_RIP();
4873 IEM_MC_END();
4874 }
4875 else
4876 {
4877 /* memory target */
4878 IEM_MC_BEGIN(0, 1);
4879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4882 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4883 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4884 } IEM_MC_ELSE() {
4885 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4886 } IEM_MC_ENDIF();
4887 IEM_MC_ADVANCE_RIP();
4888 IEM_MC_END();
4889 }
4890 return VINF_SUCCESS;
4891}
4892
4893
4894/** Opcode 0x0f 0x9b. */
4895FNIEMOP_DEF(iemOp_setnp_Eb)
4896{
4897 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4898 IEMOP_HLP_MIN_386();
4899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4900
4901 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4902 * any way. AMD says it's "unused", whatever that means. We're
4903 * ignoring for now. */
4904 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4905 {
4906 /* register target */
4907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4908 IEM_MC_BEGIN(0, 0);
4909 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4910 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4911 } IEM_MC_ELSE() {
4912 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4913 } IEM_MC_ENDIF();
4914 IEM_MC_ADVANCE_RIP();
4915 IEM_MC_END();
4916 }
4917 else
4918 {
4919 /* memory target */
4920 IEM_MC_BEGIN(0, 1);
4921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4924 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4925 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4926 } IEM_MC_ELSE() {
4927 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4928 } IEM_MC_ENDIF();
4929 IEM_MC_ADVANCE_RIP();
4930 IEM_MC_END();
4931 }
4932 return VINF_SUCCESS;
4933}
4934
4935
4936/** Opcode 0x0f 0x9c. */
4937FNIEMOP_DEF(iemOp_setl_Eb)
4938{
4939 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4940 IEMOP_HLP_MIN_386();
4941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4942
4943 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4944 * any way. AMD says it's "unused", whatever that means. We're
4945 * ignoring for now. */
4946 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4947 {
4948 /* register target */
4949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4950 IEM_MC_BEGIN(0, 0);
4951 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4952 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4953 } IEM_MC_ELSE() {
4954 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4955 } IEM_MC_ENDIF();
4956 IEM_MC_ADVANCE_RIP();
4957 IEM_MC_END();
4958 }
4959 else
4960 {
4961 /* memory target */
4962 IEM_MC_BEGIN(0, 1);
4963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4966 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4967 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4968 } IEM_MC_ELSE() {
4969 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4970 } IEM_MC_ENDIF();
4971 IEM_MC_ADVANCE_RIP();
4972 IEM_MC_END();
4973 }
4974 return VINF_SUCCESS;
4975}
4976
4977
4978/** Opcode 0x0f 0x9d. */
4979FNIEMOP_DEF(iemOp_setnl_Eb)
4980{
4981 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4982 IEMOP_HLP_MIN_386();
4983 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4984
4985 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4986 * any way. AMD says it's "unused", whatever that means. We're
4987 * ignoring for now. */
4988 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4989 {
4990 /* register target */
4991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4992 IEM_MC_BEGIN(0, 0);
4993 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4994 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4995 } IEM_MC_ELSE() {
4996 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4997 } IEM_MC_ENDIF();
4998 IEM_MC_ADVANCE_RIP();
4999 IEM_MC_END();
5000 }
5001 else
5002 {
5003 /* memory target */
5004 IEM_MC_BEGIN(0, 1);
5005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5008 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5009 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5010 } IEM_MC_ELSE() {
5011 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5012 } IEM_MC_ENDIF();
5013 IEM_MC_ADVANCE_RIP();
5014 IEM_MC_END();
5015 }
5016 return VINF_SUCCESS;
5017}
5018
5019
5020/** Opcode 0x0f 0x9e. */
5021FNIEMOP_DEF(iemOp_setle_Eb)
5022{
5023 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5024 IEMOP_HLP_MIN_386();
5025 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5026
5027 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5028 * any way. AMD says it's "unused", whatever that means. We're
5029 * ignoring for now. */
5030 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5031 {
5032 /* register target */
5033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5034 IEM_MC_BEGIN(0, 0);
5035 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5036 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5037 } IEM_MC_ELSE() {
5038 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5039 } IEM_MC_ENDIF();
5040 IEM_MC_ADVANCE_RIP();
5041 IEM_MC_END();
5042 }
5043 else
5044 {
5045 /* memory target */
5046 IEM_MC_BEGIN(0, 1);
5047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5050 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5051 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5052 } IEM_MC_ELSE() {
5053 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5054 } IEM_MC_ENDIF();
5055 IEM_MC_ADVANCE_RIP();
5056 IEM_MC_END();
5057 }
5058 return VINF_SUCCESS;
5059}
5060
5061
5062/** Opcode 0x0f 0x9f. */
5063FNIEMOP_DEF(iemOp_setnle_Eb)
5064{
5065 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5066 IEMOP_HLP_MIN_386();
5067 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5068
5069 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5070 * any way. AMD says it's "unused", whatever that means. We're
5071 * ignoring for now. */
5072 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5073 {
5074 /* register target */
5075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5076 IEM_MC_BEGIN(0, 0);
5077 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5078 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5079 } IEM_MC_ELSE() {
5080 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5081 } IEM_MC_ENDIF();
5082 IEM_MC_ADVANCE_RIP();
5083 IEM_MC_END();
5084 }
5085 else
5086 {
5087 /* memory target */
5088 IEM_MC_BEGIN(0, 1);
5089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5092 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5093 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5094 } IEM_MC_ELSE() {
5095 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5096 } IEM_MC_ENDIF();
5097 IEM_MC_ADVANCE_RIP();
5098 IEM_MC_END();
5099 }
5100 return VINF_SUCCESS;
5101}
5102
5103
5104/**
5105 * Common 'push segment-register' helper.
5106 */
5107FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5108{
5109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5110 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5111 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5112
5113 switch (pVCpu->iem.s.enmEffOpSize)
5114 {
5115 case IEMMODE_16BIT:
5116 IEM_MC_BEGIN(0, 1);
5117 IEM_MC_LOCAL(uint16_t, u16Value);
5118 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5119 IEM_MC_PUSH_U16(u16Value);
5120 IEM_MC_ADVANCE_RIP();
5121 IEM_MC_END();
5122 break;
5123
5124 case IEMMODE_32BIT:
5125 IEM_MC_BEGIN(0, 1);
5126 IEM_MC_LOCAL(uint32_t, u32Value);
5127 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5128 IEM_MC_PUSH_U32_SREG(u32Value);
5129 IEM_MC_ADVANCE_RIP();
5130 IEM_MC_END();
5131 break;
5132
5133 case IEMMODE_64BIT:
5134 IEM_MC_BEGIN(0, 1);
5135 IEM_MC_LOCAL(uint64_t, u64Value);
5136 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5137 IEM_MC_PUSH_U64(u64Value);
5138 IEM_MC_ADVANCE_RIP();
5139 IEM_MC_END();
5140 break;
5141 }
5142
5143 return VINF_SUCCESS;
5144}
5145
5146
5147/** Opcode 0x0f 0xa0. */
5148FNIEMOP_DEF(iemOp_push_fs)
5149{
5150 IEMOP_MNEMONIC(push_fs, "push fs");
5151 IEMOP_HLP_MIN_386();
5152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5153 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5154}
5155
5156
5157/** Opcode 0x0f 0xa1. */
5158FNIEMOP_DEF(iemOp_pop_fs)
5159{
5160 IEMOP_MNEMONIC(pop_fs, "pop fs");
5161 IEMOP_HLP_MIN_386();
5162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5163 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5164}
5165
5166
5167/** Opcode 0x0f 0xa2. */
5168FNIEMOP_DEF(iemOp_cpuid)
5169{
5170 IEMOP_MNEMONIC(cpuid, "cpuid");
5171 IEMOP_HLP_MIN_486(); /* not all 486es. */
5172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5173 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5174}
5175
5176
5177/**
5178 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5179 * iemOp_bts_Ev_Gv.
5180 */
5181FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5182{
5183 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5184 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5185
5186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5187 {
5188 /* register destination. */
5189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5190 switch (pVCpu->iem.s.enmEffOpSize)
5191 {
5192 case IEMMODE_16BIT:
5193 IEM_MC_BEGIN(3, 0);
5194 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5195 IEM_MC_ARG(uint16_t, u16Src, 1);
5196 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5197
5198 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5199 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5200 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5201 IEM_MC_REF_EFLAGS(pEFlags);
5202 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5203
5204 IEM_MC_ADVANCE_RIP();
5205 IEM_MC_END();
5206 return VINF_SUCCESS;
5207
5208 case IEMMODE_32BIT:
5209 IEM_MC_BEGIN(3, 0);
5210 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5211 IEM_MC_ARG(uint32_t, u32Src, 1);
5212 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5213
5214 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5215 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5216 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5217 IEM_MC_REF_EFLAGS(pEFlags);
5218 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5219
5220 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5221 IEM_MC_ADVANCE_RIP();
5222 IEM_MC_END();
5223 return VINF_SUCCESS;
5224
5225 case IEMMODE_64BIT:
5226 IEM_MC_BEGIN(3, 0);
5227 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5228 IEM_MC_ARG(uint64_t, u64Src, 1);
5229 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5230
5231 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5232 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5233 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5234 IEM_MC_REF_EFLAGS(pEFlags);
5235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5236
5237 IEM_MC_ADVANCE_RIP();
5238 IEM_MC_END();
5239 return VINF_SUCCESS;
5240
5241 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5242 }
5243 }
5244 else
5245 {
5246 /* memory destination. */
5247
5248 uint32_t fAccess;
5249 if (pImpl->pfnLockedU16)
5250 fAccess = IEM_ACCESS_DATA_RW;
5251 else /* BT */
5252 fAccess = IEM_ACCESS_DATA_R;
5253
5254 /** @todo test negative bit offsets! */
5255 switch (pVCpu->iem.s.enmEffOpSize)
5256 {
5257 case IEMMODE_16BIT:
5258 IEM_MC_BEGIN(3, 2);
5259 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5260 IEM_MC_ARG(uint16_t, u16Src, 1);
5261 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5263 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5264
5265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5266 if (pImpl->pfnLockedU16)
5267 IEMOP_HLP_DONE_DECODING();
5268 else
5269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5270 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5271 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5272 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5273 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5274 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5275 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5276 IEM_MC_FETCH_EFLAGS(EFlags);
5277
5278 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5279 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5280 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5281 else
5282 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5283 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5284
5285 IEM_MC_COMMIT_EFLAGS(EFlags);
5286 IEM_MC_ADVANCE_RIP();
5287 IEM_MC_END();
5288 return VINF_SUCCESS;
5289
5290 case IEMMODE_32BIT:
5291 IEM_MC_BEGIN(3, 2);
5292 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5293 IEM_MC_ARG(uint32_t, u32Src, 1);
5294 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5296 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5297
5298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5299 if (pImpl->pfnLockedU16)
5300 IEMOP_HLP_DONE_DECODING();
5301 else
5302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5303 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5304 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5305 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5306 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5307 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5308 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5309 IEM_MC_FETCH_EFLAGS(EFlags);
5310
5311 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5312 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5313 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5314 else
5315 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5316 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5317
5318 IEM_MC_COMMIT_EFLAGS(EFlags);
5319 IEM_MC_ADVANCE_RIP();
5320 IEM_MC_END();
5321 return VINF_SUCCESS;
5322
5323 case IEMMODE_64BIT:
5324 IEM_MC_BEGIN(3, 2);
5325 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5326 IEM_MC_ARG(uint64_t, u64Src, 1);
5327 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5329 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5330
5331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5332 if (pImpl->pfnLockedU16)
5333 IEMOP_HLP_DONE_DECODING();
5334 else
5335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5336 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5337 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5338 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5339 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5340 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5341 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5342 IEM_MC_FETCH_EFLAGS(EFlags);
5343
5344 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5345 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5347 else
5348 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5349 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5350
5351 IEM_MC_COMMIT_EFLAGS(EFlags);
5352 IEM_MC_ADVANCE_RIP();
5353 IEM_MC_END();
5354 return VINF_SUCCESS;
5355
5356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5357 }
5358 }
5359}
5360
5361
5362/** Opcode 0x0f 0xa3. */
5363FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5364{
5365 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5366 IEMOP_HLP_MIN_386();
5367 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5368}
5369
5370
5371/**
5372 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5373 */
5374FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5375{
5376 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5377 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5378
5379 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5380 {
5381 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5383
5384 switch (pVCpu->iem.s.enmEffOpSize)
5385 {
5386 case IEMMODE_16BIT:
5387 IEM_MC_BEGIN(4, 0);
5388 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5389 IEM_MC_ARG(uint16_t, u16Src, 1);
5390 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5391 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5392
5393 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5394 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5395 IEM_MC_REF_EFLAGS(pEFlags);
5396 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5397
5398 IEM_MC_ADVANCE_RIP();
5399 IEM_MC_END();
5400 return VINF_SUCCESS;
5401
5402 case IEMMODE_32BIT:
5403 IEM_MC_BEGIN(4, 0);
5404 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5405 IEM_MC_ARG(uint32_t, u32Src, 1);
5406 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5407 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5408
5409 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5410 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5411 IEM_MC_REF_EFLAGS(pEFlags);
5412 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5413
5414 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5415 IEM_MC_ADVANCE_RIP();
5416 IEM_MC_END();
5417 return VINF_SUCCESS;
5418
5419 case IEMMODE_64BIT:
5420 IEM_MC_BEGIN(4, 0);
5421 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5422 IEM_MC_ARG(uint64_t, u64Src, 1);
5423 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5424 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5425
5426 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5427 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5428 IEM_MC_REF_EFLAGS(pEFlags);
5429 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5430
5431 IEM_MC_ADVANCE_RIP();
5432 IEM_MC_END();
5433 return VINF_SUCCESS;
5434
5435 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5436 }
5437 }
5438 else
5439 {
5440 switch (pVCpu->iem.s.enmEffOpSize)
5441 {
5442 case IEMMODE_16BIT:
5443 IEM_MC_BEGIN(4, 2);
5444 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5445 IEM_MC_ARG(uint16_t, u16Src, 1);
5446 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5447 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5449
5450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5451 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5452 IEM_MC_ASSIGN(cShiftArg, cShift);
5453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5454 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5455 IEM_MC_FETCH_EFLAGS(EFlags);
5456 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5457 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5458
5459 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5460 IEM_MC_COMMIT_EFLAGS(EFlags);
5461 IEM_MC_ADVANCE_RIP();
5462 IEM_MC_END();
5463 return VINF_SUCCESS;
5464
5465 case IEMMODE_32BIT:
5466 IEM_MC_BEGIN(4, 2);
5467 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5468 IEM_MC_ARG(uint32_t, u32Src, 1);
5469 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5470 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5472
5473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5474 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5475 IEM_MC_ASSIGN(cShiftArg, cShift);
5476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5477 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5478 IEM_MC_FETCH_EFLAGS(EFlags);
5479 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5480 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5481
5482 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5483 IEM_MC_COMMIT_EFLAGS(EFlags);
5484 IEM_MC_ADVANCE_RIP();
5485 IEM_MC_END();
5486 return VINF_SUCCESS;
5487
5488 case IEMMODE_64BIT:
5489 IEM_MC_BEGIN(4, 2);
5490 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5491 IEM_MC_ARG(uint64_t, u64Src, 1);
5492 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5493 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5495
5496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5497 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5498 IEM_MC_ASSIGN(cShiftArg, cShift);
5499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5500 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5501 IEM_MC_FETCH_EFLAGS(EFlags);
5502 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5503 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5504
5505 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5506 IEM_MC_COMMIT_EFLAGS(EFlags);
5507 IEM_MC_ADVANCE_RIP();
5508 IEM_MC_END();
5509 return VINF_SUCCESS;
5510
5511 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5512 }
5513 }
5514}
5515
5516
5517/**
5518 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5519 */
5520FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5521{
5522 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5523 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5524
5525 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5526 {
5527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5528
5529 switch (pVCpu->iem.s.enmEffOpSize)
5530 {
5531 case IEMMODE_16BIT:
5532 IEM_MC_BEGIN(4, 0);
5533 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5534 IEM_MC_ARG(uint16_t, u16Src, 1);
5535 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5536 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5537
5538 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5539 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5540 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5541 IEM_MC_REF_EFLAGS(pEFlags);
5542 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5543
5544 IEM_MC_ADVANCE_RIP();
5545 IEM_MC_END();
5546 return VINF_SUCCESS;
5547
5548 case IEMMODE_32BIT:
5549 IEM_MC_BEGIN(4, 0);
5550 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5551 IEM_MC_ARG(uint32_t, u32Src, 1);
5552 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5553 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5554
5555 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5556 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5557 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5558 IEM_MC_REF_EFLAGS(pEFlags);
5559 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5560
5561 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5562 IEM_MC_ADVANCE_RIP();
5563 IEM_MC_END();
5564 return VINF_SUCCESS;
5565
5566 case IEMMODE_64BIT:
5567 IEM_MC_BEGIN(4, 0);
5568 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5569 IEM_MC_ARG(uint64_t, u64Src, 1);
5570 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5571 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5572
5573 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5574 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5575 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5576 IEM_MC_REF_EFLAGS(pEFlags);
5577 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5578
5579 IEM_MC_ADVANCE_RIP();
5580 IEM_MC_END();
5581 return VINF_SUCCESS;
5582
5583 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5584 }
5585 }
5586 else
5587 {
5588 switch (pVCpu->iem.s.enmEffOpSize)
5589 {
5590 case IEMMODE_16BIT:
5591 IEM_MC_BEGIN(4, 2);
5592 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5593 IEM_MC_ARG(uint16_t, u16Src, 1);
5594 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5595 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5597
5598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5600 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5601 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5602 IEM_MC_FETCH_EFLAGS(EFlags);
5603 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5604 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5605
5606 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5607 IEM_MC_COMMIT_EFLAGS(EFlags);
5608 IEM_MC_ADVANCE_RIP();
5609 IEM_MC_END();
5610 return VINF_SUCCESS;
5611
5612 case IEMMODE_32BIT:
5613 IEM_MC_BEGIN(4, 2);
5614 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5615 IEM_MC_ARG(uint32_t, u32Src, 1);
5616 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5617 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5619
5620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5622 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5623 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5624 IEM_MC_FETCH_EFLAGS(EFlags);
5625 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5626 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5627
5628 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5629 IEM_MC_COMMIT_EFLAGS(EFlags);
5630 IEM_MC_ADVANCE_RIP();
5631 IEM_MC_END();
5632 return VINF_SUCCESS;
5633
5634 case IEMMODE_64BIT:
5635 IEM_MC_BEGIN(4, 2);
5636 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5637 IEM_MC_ARG(uint64_t, u64Src, 1);
5638 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5639 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5641
5642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5644 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5645 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5646 IEM_MC_FETCH_EFLAGS(EFlags);
5647 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5648 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5649
5650 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5651 IEM_MC_COMMIT_EFLAGS(EFlags);
5652 IEM_MC_ADVANCE_RIP();
5653 IEM_MC_END();
5654 return VINF_SUCCESS;
5655
5656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5657 }
5658 }
5659}
5660
5661
5662
5663/** Opcode 0x0f 0xa4. */
5664FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5665{
5666 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5667 IEMOP_HLP_MIN_386();
5668 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5669}
5670
5671
5672/** Opcode 0x0f 0xa5. */
5673FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5674{
5675 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5676 IEMOP_HLP_MIN_386();
5677 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5678}
5679
5680
5681/** Opcode 0x0f 0xa8. */
5682FNIEMOP_DEF(iemOp_push_gs)
5683{
5684 IEMOP_MNEMONIC(push_gs, "push gs");
5685 IEMOP_HLP_MIN_386();
5686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5687 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5688}
5689
5690
5691/** Opcode 0x0f 0xa9. */
5692FNIEMOP_DEF(iemOp_pop_gs)
5693{
5694 IEMOP_MNEMONIC(pop_gs, "pop gs");
5695 IEMOP_HLP_MIN_386();
5696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5697 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5698}
5699
5700
5701/** Opcode 0x0f 0xaa. */
5702FNIEMOP_STUB(iemOp_rsm);
5703//IEMOP_HLP_MIN_386();
5704
5705
5706/** Opcode 0x0f 0xab. */
5707FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5708{
5709 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5710 IEMOP_HLP_MIN_386();
5711 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5712}
5713
5714
5715/** Opcode 0x0f 0xac. */
5716FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5717{
5718 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5719 IEMOP_HLP_MIN_386();
5720 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5721}
5722
5723
5724/** Opcode 0x0f 0xad. */
5725FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5726{
5727 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5728 IEMOP_HLP_MIN_386();
5729 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5730}
5731
5732
5733/** Opcode 0x0f 0xae mem/0. */
5734FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5735{
5736 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5737 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5738 return IEMOP_RAISE_INVALID_OPCODE();
5739
5740 IEM_MC_BEGIN(3, 1);
5741 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5742 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5743 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5746 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
5747 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5748 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5749 IEM_MC_END();
5750 return VINF_SUCCESS;
5751}
5752
5753
5754/** Opcode 0x0f 0xae mem/1. */
5755FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5756{
5757 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5758 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5759 return IEMOP_RAISE_INVALID_OPCODE();
5760
5761 IEM_MC_BEGIN(3, 1);
5762 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5763 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5764 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5767 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5768 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5769 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5770 IEM_MC_END();
5771 return VINF_SUCCESS;
5772}
5773
5774
5775/**
5776 * @opmaps grp15
5777 * @opcode !11/2
5778 * @oppfx none
5779 * @opcpuid sse
5780 * @opgroup og_sse_mxcsrsm
5781 * @opxcpttype 5
5782 * @optest op1=0 -> mxcsr=0
5783 * @optest op1=0x2083 -> mxcsr=0x2083
5784 * @optest op1=0xfffffffe -> value.xcpt=0xd
5785 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
5786 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
5787 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
5788 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
5789 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
5790 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5791 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5792 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5793 */
5794FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
5795{
5796 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
5797 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
5798 return IEMOP_RAISE_INVALID_OPCODE();
5799
5800 IEM_MC_BEGIN(2, 0);
5801 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5802 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5805 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5806 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5807 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
5808 IEM_MC_END();
5809 return VINF_SUCCESS;
5810}
5811
5812
5813/**
5814 * @opmaps grp15
5815 * @opcode !11/3
5816 * @oppfx none
5817 * @opcpuid sse
5818 * @opgroup og_sse_mxcsrsm
5819 * @opxcpttype 5
5820 * @optest mxcsr=0 -> op1=0
5821 * @optest mxcsr=0x2083 -> op1=0x2083
5822 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
5823 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
5824 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
5825 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
5826 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
5827 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5828 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5829 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5830 */
5831FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
5832{
5833 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, MdWO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
5834 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
5835 return IEMOP_RAISE_INVALID_OPCODE();
5836
5837 IEM_MC_BEGIN(2, 0);
5838 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5839 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5842 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5843 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5844 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
5845 IEM_MC_END();
5846 return VINF_SUCCESS;
5847}
5848
5849
5850/**
5851 * @opmaps grp15
5852 * @opcode !11/4
5853 * @oppfx none
5854 * @opcpuid xsave
5855 * @opgroup og_system
5856 * @opxcpttype none
5857 */
5858FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
5859{
5860 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, MRW, DISOPTYPE_HARMLESS, 0);
5861 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
5862 return IEMOP_RAISE_INVALID_OPCODE();
5863
5864 IEM_MC_BEGIN(3, 0);
5865 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5866 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5867 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5870 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
5871 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5872 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
5873 IEM_MC_END();
5874 return VINF_SUCCESS;
5875}
5876
5877
5878/**
5879 * @opmaps grp15
5880 * @opcode !11/5
5881 * @oppfx none
5882 * @opcpuid xsave
5883 * @opgroup og_system
5884 * @opxcpttype none
5885 */
5886FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
5887{
5888 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, MRO, DISOPTYPE_HARMLESS, 0);
5889 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
5890 return IEMOP_RAISE_INVALID_OPCODE();
5891
5892 IEM_MC_BEGIN(3, 0);
5893 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5894 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5895 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5898 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
5899 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5900 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5901 IEM_MC_END();
5902 return VINF_SUCCESS;
5903}
5904
5905/** Opcode 0x0f 0xae mem/6. */
5906FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5907
5908/**
5909 * @opmaps grp15
5910 * @opcode !11/7
5911 * @oppfx none
5912 * @opcpuid clfsh
5913 * @opgroup og_cachectl
5914 * @optest op1=1 ->
5915 */
5916FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
5917{
5918 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
5919 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
5920 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
5921
5922 IEM_MC_BEGIN(2, 0);
5923 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5924 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5927 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5928 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
5929 IEM_MC_END();
5930 return VINF_SUCCESS;
5931}
5932
5933/**
5934 * @opmaps grp15
5935 * @opcode !11/7
5936 * @oppfx 0x66
5937 * @opcpuid clflushopt
5938 * @opgroup og_cachectl
5939 * @optest op1=1 ->
5940 */
5941FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
5942{
5943 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
5944 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
5945 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
5946
5947 IEM_MC_BEGIN(2, 0);
5948 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5949 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5952 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5953 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
5954 IEM_MC_END();
5955 return VINF_SUCCESS;
5956}
5957
5958
5959/** Opcode 0x0f 0xae 11b/5. */
5960FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5961{
5962 RT_NOREF_PV(bRm);
5963 IEMOP_MNEMONIC(lfence, "lfence");
5964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5965 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5966 return IEMOP_RAISE_INVALID_OPCODE();
5967
5968 IEM_MC_BEGIN(0, 0);
5969 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5970 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5971 else
5972 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5973 IEM_MC_ADVANCE_RIP();
5974 IEM_MC_END();
5975 return VINF_SUCCESS;
5976}
5977
5978
5979/** Opcode 0x0f 0xae 11b/6. */
5980FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5981{
5982 RT_NOREF_PV(bRm);
5983 IEMOP_MNEMONIC(mfence, "mfence");
5984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5985 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5986 return IEMOP_RAISE_INVALID_OPCODE();
5987
5988 IEM_MC_BEGIN(0, 0);
5989 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5990 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5991 else
5992 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5993 IEM_MC_ADVANCE_RIP();
5994 IEM_MC_END();
5995 return VINF_SUCCESS;
5996}
5997
5998
5999/** Opcode 0x0f 0xae 11b/7. */
6000FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6001{
6002 RT_NOREF_PV(bRm);
6003 IEMOP_MNEMONIC(sfence, "sfence");
6004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6005 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6006 return IEMOP_RAISE_INVALID_OPCODE();
6007
6008 IEM_MC_BEGIN(0, 0);
6009 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6010 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6011 else
6012 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6013 IEM_MC_ADVANCE_RIP();
6014 IEM_MC_END();
6015 return VINF_SUCCESS;
6016}
6017
6018
6019/** Opcode 0xf3 0x0f 0xae 11b/0. */
6020FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6021
6022/** Opcode 0xf3 0x0f 0xae 11b/1. */
6023FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6024
6025/** Opcode 0xf3 0x0f 0xae 11b/2. */
6026FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6027
6028/** Opcode 0xf3 0x0f 0xae 11b/3. */
6029FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6030
6031
6032/**
6033 * Group 15 jump table for register variant.
6034 */
6035IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6036{ /* pfx: none, 066h, 0f3h, 0f2h */
6037 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6038 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6039 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6040 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6041 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6042 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6043 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6044 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6045};
6046AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6047
6048
6049/**
6050 * Group 15 jump table for memory variant.
6051 */
6052IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6053{ /* pfx: none, 066h, 0f3h, 0f2h */
6054 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6055 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6056 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6057 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6058 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6059 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6060 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6061 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6062};
6063AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6064
6065
6066/** Opcode 0x0f 0xae. */
6067FNIEMOP_DEF(iemOp_Grp15)
6068{
6069 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6071 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6072 /* register, register */
6073 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6074 + pVCpu->iem.s.idxPrefix], bRm);
6075 /* memory, register */
6076 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6077 + pVCpu->iem.s.idxPrefix], bRm);
6078}
6079
6080
6081/** Opcode 0x0f 0xaf. */
6082FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6083{
6084 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6085 IEMOP_HLP_MIN_386();
6086 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6087 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6088}
6089
6090
6091/** Opcode 0x0f 0xb0. */
6092FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6093{
6094 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6095 IEMOP_HLP_MIN_486();
6096 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6097
6098 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6099 {
6100 IEMOP_HLP_DONE_DECODING();
6101 IEM_MC_BEGIN(4, 0);
6102 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6103 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6104 IEM_MC_ARG(uint8_t, u8Src, 2);
6105 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6106
6107 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6108 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6109 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6110 IEM_MC_REF_EFLAGS(pEFlags);
6111 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6112 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6113 else
6114 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6115
6116 IEM_MC_ADVANCE_RIP();
6117 IEM_MC_END();
6118 }
6119 else
6120 {
6121 IEM_MC_BEGIN(4, 3);
6122 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6123 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6124 IEM_MC_ARG(uint8_t, u8Src, 2);
6125 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6127 IEM_MC_LOCAL(uint8_t, u8Al);
6128
6129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6130 IEMOP_HLP_DONE_DECODING();
6131 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6132 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6133 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6134 IEM_MC_FETCH_EFLAGS(EFlags);
6135 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6136 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6137 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6138 else
6139 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6140
6141 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6142 IEM_MC_COMMIT_EFLAGS(EFlags);
6143 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6144 IEM_MC_ADVANCE_RIP();
6145 IEM_MC_END();
6146 }
6147 return VINF_SUCCESS;
6148}
6149
6150/** Opcode 0x0f 0xb1. */
6151FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6152{
6153 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6154 IEMOP_HLP_MIN_486();
6155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6156
6157 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6158 {
6159 IEMOP_HLP_DONE_DECODING();
6160 switch (pVCpu->iem.s.enmEffOpSize)
6161 {
6162 case IEMMODE_16BIT:
6163 IEM_MC_BEGIN(4, 0);
6164 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6165 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6166 IEM_MC_ARG(uint16_t, u16Src, 2);
6167 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6168
6169 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6170 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6171 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6172 IEM_MC_REF_EFLAGS(pEFlags);
6173 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6174 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6175 else
6176 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6177
6178 IEM_MC_ADVANCE_RIP();
6179 IEM_MC_END();
6180 return VINF_SUCCESS;
6181
6182 case IEMMODE_32BIT:
6183 IEM_MC_BEGIN(4, 0);
6184 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6185 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6186 IEM_MC_ARG(uint32_t, u32Src, 2);
6187 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6188
6189 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6190 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6191 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6192 IEM_MC_REF_EFLAGS(pEFlags);
6193 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6194 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6195 else
6196 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6197
6198 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6199 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6200 IEM_MC_ADVANCE_RIP();
6201 IEM_MC_END();
6202 return VINF_SUCCESS;
6203
6204 case IEMMODE_64BIT:
6205 IEM_MC_BEGIN(4, 0);
6206 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6207 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6208#ifdef RT_ARCH_X86
6209 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6210#else
6211 IEM_MC_ARG(uint64_t, u64Src, 2);
6212#endif
6213 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6214
6215 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6216 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6217 IEM_MC_REF_EFLAGS(pEFlags);
6218#ifdef RT_ARCH_X86
6219 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6220 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6221 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6222 else
6223 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6224#else
6225 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6226 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6227 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6228 else
6229 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6230#endif
6231
6232 IEM_MC_ADVANCE_RIP();
6233 IEM_MC_END();
6234 return VINF_SUCCESS;
6235
6236 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6237 }
6238 }
6239 else
6240 {
6241 switch (pVCpu->iem.s.enmEffOpSize)
6242 {
6243 case IEMMODE_16BIT:
6244 IEM_MC_BEGIN(4, 3);
6245 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6246 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6247 IEM_MC_ARG(uint16_t, u16Src, 2);
6248 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6250 IEM_MC_LOCAL(uint16_t, u16Ax);
6251
6252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6253 IEMOP_HLP_DONE_DECODING();
6254 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6255 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6256 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6257 IEM_MC_FETCH_EFLAGS(EFlags);
6258 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6259 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6260 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6261 else
6262 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6263
6264 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6265 IEM_MC_COMMIT_EFLAGS(EFlags);
6266 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6267 IEM_MC_ADVANCE_RIP();
6268 IEM_MC_END();
6269 return VINF_SUCCESS;
6270
6271 case IEMMODE_32BIT:
6272 IEM_MC_BEGIN(4, 3);
6273 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6274 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6275 IEM_MC_ARG(uint32_t, u32Src, 2);
6276 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6278 IEM_MC_LOCAL(uint32_t, u32Eax);
6279
6280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6281 IEMOP_HLP_DONE_DECODING();
6282 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6283 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6284 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6285 IEM_MC_FETCH_EFLAGS(EFlags);
6286 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6287 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6288 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6289 else
6290 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6291
6292 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6293 IEM_MC_COMMIT_EFLAGS(EFlags);
6294 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6295 IEM_MC_ADVANCE_RIP();
6296 IEM_MC_END();
6297 return VINF_SUCCESS;
6298
6299 case IEMMODE_64BIT:
6300 IEM_MC_BEGIN(4, 3);
6301 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6302 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6303#ifdef RT_ARCH_X86
6304 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6305#else
6306 IEM_MC_ARG(uint64_t, u64Src, 2);
6307#endif
6308 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6310 IEM_MC_LOCAL(uint64_t, u64Rax);
6311
6312 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6313 IEMOP_HLP_DONE_DECODING();
6314 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6315 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6316 IEM_MC_FETCH_EFLAGS(EFlags);
6317 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6318#ifdef RT_ARCH_X86
6319 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6320 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6321 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6322 else
6323 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6324#else
6325 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6326 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6327 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6328 else
6329 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6330#endif
6331
6332 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6333 IEM_MC_COMMIT_EFLAGS(EFlags);
6334 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6335 IEM_MC_ADVANCE_RIP();
6336 IEM_MC_END();
6337 return VINF_SUCCESS;
6338
6339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6340 }
6341 }
6342}
6343
6344
6345FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6346{
6347 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6348 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6349
6350 switch (pVCpu->iem.s.enmEffOpSize)
6351 {
6352 case IEMMODE_16BIT:
6353 IEM_MC_BEGIN(5, 1);
6354 IEM_MC_ARG(uint16_t, uSel, 0);
6355 IEM_MC_ARG(uint16_t, offSeg, 1);
6356 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6357 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6358 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6359 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6362 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6363 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6364 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6365 IEM_MC_END();
6366 return VINF_SUCCESS;
6367
6368 case IEMMODE_32BIT:
6369 IEM_MC_BEGIN(5, 1);
6370 IEM_MC_ARG(uint16_t, uSel, 0);
6371 IEM_MC_ARG(uint32_t, offSeg, 1);
6372 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6373 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6374 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6375 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6378 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6379 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6380 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6381 IEM_MC_END();
6382 return VINF_SUCCESS;
6383
6384 case IEMMODE_64BIT:
6385 IEM_MC_BEGIN(5, 1);
6386 IEM_MC_ARG(uint16_t, uSel, 0);
6387 IEM_MC_ARG(uint64_t, offSeg, 1);
6388 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6389 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6390 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6391 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6394 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6395 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6396 else
6397 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6398 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6399 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6400 IEM_MC_END();
6401 return VINF_SUCCESS;
6402
6403 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6404 }
6405}
6406
6407
6408/** Opcode 0x0f 0xb2. */
6409FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6410{
6411 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6412 IEMOP_HLP_MIN_386();
6413 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6414 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6415 return IEMOP_RAISE_INVALID_OPCODE();
6416 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6417}
6418
6419
6420/** Opcode 0x0f 0xb3. */
6421FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6422{
6423 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6424 IEMOP_HLP_MIN_386();
6425 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6426}
6427
6428
6429/** Opcode 0x0f 0xb4. */
6430FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6431{
6432 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6433 IEMOP_HLP_MIN_386();
6434 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6435 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6436 return IEMOP_RAISE_INVALID_OPCODE();
6437 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6438}
6439
6440
6441/** Opcode 0x0f 0xb5. */
6442FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6443{
6444 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6445 IEMOP_HLP_MIN_386();
6446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6447 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6448 return IEMOP_RAISE_INVALID_OPCODE();
6449 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6450}
6451
6452
6453/** Opcode 0x0f 0xb6. */
6454FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6455{
6456 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6457 IEMOP_HLP_MIN_386();
6458
6459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6460
6461 /*
6462 * If rm is denoting a register, no more instruction bytes.
6463 */
6464 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6465 {
6466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6467 switch (pVCpu->iem.s.enmEffOpSize)
6468 {
6469 case IEMMODE_16BIT:
6470 IEM_MC_BEGIN(0, 1);
6471 IEM_MC_LOCAL(uint16_t, u16Value);
6472 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6473 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6474 IEM_MC_ADVANCE_RIP();
6475 IEM_MC_END();
6476 return VINF_SUCCESS;
6477
6478 case IEMMODE_32BIT:
6479 IEM_MC_BEGIN(0, 1);
6480 IEM_MC_LOCAL(uint32_t, u32Value);
6481 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6482 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6483 IEM_MC_ADVANCE_RIP();
6484 IEM_MC_END();
6485 return VINF_SUCCESS;
6486
6487 case IEMMODE_64BIT:
6488 IEM_MC_BEGIN(0, 1);
6489 IEM_MC_LOCAL(uint64_t, u64Value);
6490 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6491 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6492 IEM_MC_ADVANCE_RIP();
6493 IEM_MC_END();
6494 return VINF_SUCCESS;
6495
6496 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6497 }
6498 }
6499 else
6500 {
6501 /*
6502 * We're loading a register from memory.
6503 */
6504 switch (pVCpu->iem.s.enmEffOpSize)
6505 {
6506 case IEMMODE_16BIT:
6507 IEM_MC_BEGIN(0, 2);
6508 IEM_MC_LOCAL(uint16_t, u16Value);
6509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6512 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6513 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6514 IEM_MC_ADVANCE_RIP();
6515 IEM_MC_END();
6516 return VINF_SUCCESS;
6517
6518 case IEMMODE_32BIT:
6519 IEM_MC_BEGIN(0, 2);
6520 IEM_MC_LOCAL(uint32_t, u32Value);
6521 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6524 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6525 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6526 IEM_MC_ADVANCE_RIP();
6527 IEM_MC_END();
6528 return VINF_SUCCESS;
6529
6530 case IEMMODE_64BIT:
6531 IEM_MC_BEGIN(0, 2);
6532 IEM_MC_LOCAL(uint64_t, u64Value);
6533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6536 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6537 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6538 IEM_MC_ADVANCE_RIP();
6539 IEM_MC_END();
6540 return VINF_SUCCESS;
6541
6542 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6543 }
6544 }
6545}
6546
6547
6548/** Opcode 0x0f 0xb7. */
6549FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6550{
6551 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6552 IEMOP_HLP_MIN_386();
6553
6554 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6555
6556 /** @todo Not entirely sure how the operand size prefix is handled here,
6557 * assuming that it will be ignored. Would be nice to have a few
6558 * test for this. */
6559 /*
6560 * If rm is denoting a register, no more instruction bytes.
6561 */
6562 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6563 {
6564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6565 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6566 {
6567 IEM_MC_BEGIN(0, 1);
6568 IEM_MC_LOCAL(uint32_t, u32Value);
6569 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6570 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6571 IEM_MC_ADVANCE_RIP();
6572 IEM_MC_END();
6573 }
6574 else
6575 {
6576 IEM_MC_BEGIN(0, 1);
6577 IEM_MC_LOCAL(uint64_t, u64Value);
6578 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6579 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6580 IEM_MC_ADVANCE_RIP();
6581 IEM_MC_END();
6582 }
6583 }
6584 else
6585 {
6586 /*
6587 * We're loading a register from memory.
6588 */
6589 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6590 {
6591 IEM_MC_BEGIN(0, 2);
6592 IEM_MC_LOCAL(uint32_t, u32Value);
6593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6596 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6597 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6598 IEM_MC_ADVANCE_RIP();
6599 IEM_MC_END();
6600 }
6601 else
6602 {
6603 IEM_MC_BEGIN(0, 2);
6604 IEM_MC_LOCAL(uint64_t, u64Value);
6605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6608 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6609 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6610 IEM_MC_ADVANCE_RIP();
6611 IEM_MC_END();
6612 }
6613 }
6614 return VINF_SUCCESS;
6615}
6616
6617
6618/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6619FNIEMOP_UD_STUB(iemOp_jmpe);
6620/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6621FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6622
6623
6624/**
6625 * @opcode 0xb9
6626 * @opinvalid intel-modrm
6627 * @optest ->
6628 */
6629FNIEMOP_DEF(iemOp_Grp10)
6630{
6631 /*
6632 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
6633 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
6634 */
6635 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
6636 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZE); /* just picked Gb,Eb here. */
6637 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
6638}
6639
6640
6641/** Opcode 0x0f 0xba. */
6642FNIEMOP_DEF(iemOp_Grp8)
6643{
6644 IEMOP_HLP_MIN_386();
6645 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6646 PCIEMOPBINSIZES pImpl;
6647 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6648 {
6649 case 0: case 1: case 2: case 3:
6650 /* Both AMD and Intel want full modr/m decoding and imm8. */
6651 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
6652 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6653 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6654 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6655 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6657 }
6658 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6659
6660 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6661 {
6662 /* register destination. */
6663 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6665
6666 switch (pVCpu->iem.s.enmEffOpSize)
6667 {
6668 case IEMMODE_16BIT:
6669 IEM_MC_BEGIN(3, 0);
6670 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6671 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6672 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6673
6674 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6675 IEM_MC_REF_EFLAGS(pEFlags);
6676 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6677
6678 IEM_MC_ADVANCE_RIP();
6679 IEM_MC_END();
6680 return VINF_SUCCESS;
6681
6682 case IEMMODE_32BIT:
6683 IEM_MC_BEGIN(3, 0);
6684 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6685 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6686 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6687
6688 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6689 IEM_MC_REF_EFLAGS(pEFlags);
6690 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6691
6692 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6693 IEM_MC_ADVANCE_RIP();
6694 IEM_MC_END();
6695 return VINF_SUCCESS;
6696
6697 case IEMMODE_64BIT:
6698 IEM_MC_BEGIN(3, 0);
6699 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6700 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6701 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6702
6703 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6704 IEM_MC_REF_EFLAGS(pEFlags);
6705 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6706
6707 IEM_MC_ADVANCE_RIP();
6708 IEM_MC_END();
6709 return VINF_SUCCESS;
6710
6711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6712 }
6713 }
6714 else
6715 {
6716 /* memory destination. */
6717
6718 uint32_t fAccess;
6719 if (pImpl->pfnLockedU16)
6720 fAccess = IEM_ACCESS_DATA_RW;
6721 else /* BT */
6722 fAccess = IEM_ACCESS_DATA_R;
6723
6724 /** @todo test negative bit offsets! */
6725 switch (pVCpu->iem.s.enmEffOpSize)
6726 {
6727 case IEMMODE_16BIT:
6728 IEM_MC_BEGIN(3, 1);
6729 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6730 IEM_MC_ARG(uint16_t, u16Src, 1);
6731 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6733
6734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6735 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6736 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6737 if (pImpl->pfnLockedU16)
6738 IEMOP_HLP_DONE_DECODING();
6739 else
6740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6741 IEM_MC_FETCH_EFLAGS(EFlags);
6742 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6743 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6744 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6745 else
6746 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6747 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6748
6749 IEM_MC_COMMIT_EFLAGS(EFlags);
6750 IEM_MC_ADVANCE_RIP();
6751 IEM_MC_END();
6752 return VINF_SUCCESS;
6753
6754 case IEMMODE_32BIT:
6755 IEM_MC_BEGIN(3, 1);
6756 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6757 IEM_MC_ARG(uint32_t, u32Src, 1);
6758 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6760
6761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6762 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6763 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6764 if (pImpl->pfnLockedU16)
6765 IEMOP_HLP_DONE_DECODING();
6766 else
6767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6768 IEM_MC_FETCH_EFLAGS(EFlags);
6769 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6770 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6771 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6772 else
6773 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6774 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6775
6776 IEM_MC_COMMIT_EFLAGS(EFlags);
6777 IEM_MC_ADVANCE_RIP();
6778 IEM_MC_END();
6779 return VINF_SUCCESS;
6780
6781 case IEMMODE_64BIT:
6782 IEM_MC_BEGIN(3, 1);
6783 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6784 IEM_MC_ARG(uint64_t, u64Src, 1);
6785 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6787
6788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6789 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6790 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6791 if (pImpl->pfnLockedU16)
6792 IEMOP_HLP_DONE_DECODING();
6793 else
6794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6795 IEM_MC_FETCH_EFLAGS(EFlags);
6796 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6797 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6798 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6799 else
6800 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6801 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6802
6803 IEM_MC_COMMIT_EFLAGS(EFlags);
6804 IEM_MC_ADVANCE_RIP();
6805 IEM_MC_END();
6806 return VINF_SUCCESS;
6807
6808 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6809 }
6810 }
6811}
6812
6813
6814/** Opcode 0x0f 0xbb. */
6815FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6816{
6817 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6818 IEMOP_HLP_MIN_386();
6819 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6820}
6821
6822
6823/** Opcode 0x0f 0xbc. */
6824FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6825{
6826 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6827 IEMOP_HLP_MIN_386();
6828 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6829 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6830}
6831
6832
6833/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6834FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6835
6836
6837/** Opcode 0x0f 0xbd. */
6838FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6839{
6840 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6841 IEMOP_HLP_MIN_386();
6842 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6843 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6844}
6845
6846
6847/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6848FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6849
6850
6851/** Opcode 0x0f 0xbe. */
6852FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6853{
6854 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6855 IEMOP_HLP_MIN_386();
6856
6857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6858
6859 /*
6860 * If rm is denoting a register, no more instruction bytes.
6861 */
6862 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6863 {
6864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6865 switch (pVCpu->iem.s.enmEffOpSize)
6866 {
6867 case IEMMODE_16BIT:
6868 IEM_MC_BEGIN(0, 1);
6869 IEM_MC_LOCAL(uint16_t, u16Value);
6870 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6871 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6872 IEM_MC_ADVANCE_RIP();
6873 IEM_MC_END();
6874 return VINF_SUCCESS;
6875
6876 case IEMMODE_32BIT:
6877 IEM_MC_BEGIN(0, 1);
6878 IEM_MC_LOCAL(uint32_t, u32Value);
6879 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6880 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6881 IEM_MC_ADVANCE_RIP();
6882 IEM_MC_END();
6883 return VINF_SUCCESS;
6884
6885 case IEMMODE_64BIT:
6886 IEM_MC_BEGIN(0, 1);
6887 IEM_MC_LOCAL(uint64_t, u64Value);
6888 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6889 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6890 IEM_MC_ADVANCE_RIP();
6891 IEM_MC_END();
6892 return VINF_SUCCESS;
6893
6894 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6895 }
6896 }
6897 else
6898 {
6899 /*
6900 * We're loading a register from memory.
6901 */
6902 switch (pVCpu->iem.s.enmEffOpSize)
6903 {
6904 case IEMMODE_16BIT:
6905 IEM_MC_BEGIN(0, 2);
6906 IEM_MC_LOCAL(uint16_t, u16Value);
6907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6910 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6911 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6912 IEM_MC_ADVANCE_RIP();
6913 IEM_MC_END();
6914 return VINF_SUCCESS;
6915
6916 case IEMMODE_32BIT:
6917 IEM_MC_BEGIN(0, 2);
6918 IEM_MC_LOCAL(uint32_t, u32Value);
6919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6922 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6923 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6924 IEM_MC_ADVANCE_RIP();
6925 IEM_MC_END();
6926 return VINF_SUCCESS;
6927
6928 case IEMMODE_64BIT:
6929 IEM_MC_BEGIN(0, 2);
6930 IEM_MC_LOCAL(uint64_t, u64Value);
6931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6934 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6935 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6936 IEM_MC_ADVANCE_RIP();
6937 IEM_MC_END();
6938 return VINF_SUCCESS;
6939
6940 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6941 }
6942 }
6943}
6944
6945
6946/** Opcode 0x0f 0xbf. */
6947FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6948{
6949 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6950 IEMOP_HLP_MIN_386();
6951
6952 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6953
6954 /** @todo Not entirely sure how the operand size prefix is handled here,
6955 * assuming that it will be ignored. Would be nice to have a few
6956 * test for this. */
6957 /*
6958 * If rm is denoting a register, no more instruction bytes.
6959 */
6960 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6961 {
6962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6963 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6964 {
6965 IEM_MC_BEGIN(0, 1);
6966 IEM_MC_LOCAL(uint32_t, u32Value);
6967 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6968 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6969 IEM_MC_ADVANCE_RIP();
6970 IEM_MC_END();
6971 }
6972 else
6973 {
6974 IEM_MC_BEGIN(0, 1);
6975 IEM_MC_LOCAL(uint64_t, u64Value);
6976 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6977 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6978 IEM_MC_ADVANCE_RIP();
6979 IEM_MC_END();
6980 }
6981 }
6982 else
6983 {
6984 /*
6985 * We're loading a register from memory.
6986 */
6987 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6988 {
6989 IEM_MC_BEGIN(0, 2);
6990 IEM_MC_LOCAL(uint32_t, u32Value);
6991 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6994 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6995 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6996 IEM_MC_ADVANCE_RIP();
6997 IEM_MC_END();
6998 }
6999 else
7000 {
7001 IEM_MC_BEGIN(0, 2);
7002 IEM_MC_LOCAL(uint64_t, u64Value);
7003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7006 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7007 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7008 IEM_MC_ADVANCE_RIP();
7009 IEM_MC_END();
7010 }
7011 }
7012 return VINF_SUCCESS;
7013}
7014
7015
7016/** Opcode 0x0f 0xc0. */
7017FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7018{
7019 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7020 IEMOP_HLP_MIN_486();
7021 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7022
7023 /*
7024 * If rm is denoting a register, no more instruction bytes.
7025 */
7026 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7027 {
7028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7029
7030 IEM_MC_BEGIN(3, 0);
7031 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7032 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7033 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7034
7035 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7036 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7037 IEM_MC_REF_EFLAGS(pEFlags);
7038 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7039
7040 IEM_MC_ADVANCE_RIP();
7041 IEM_MC_END();
7042 }
7043 else
7044 {
7045 /*
7046 * We're accessing memory.
7047 */
7048 IEM_MC_BEGIN(3, 3);
7049 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7050 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7051 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7052 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7054
7055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7056 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7057 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7058 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7059 IEM_MC_FETCH_EFLAGS(EFlags);
7060 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7061 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7062 else
7063 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7064
7065 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7066 IEM_MC_COMMIT_EFLAGS(EFlags);
7067 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7068 IEM_MC_ADVANCE_RIP();
7069 IEM_MC_END();
7070 return VINF_SUCCESS;
7071 }
7072 return VINF_SUCCESS;
7073}
7074
7075
7076/** Opcode 0x0f 0xc1. */
7077FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7078{
7079 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7080 IEMOP_HLP_MIN_486();
7081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7082
7083 /*
7084 * If rm is denoting a register, no more instruction bytes.
7085 */
7086 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7087 {
7088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7089
7090 switch (pVCpu->iem.s.enmEffOpSize)
7091 {
7092 case IEMMODE_16BIT:
7093 IEM_MC_BEGIN(3, 0);
7094 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7095 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7096 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7097
7098 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7099 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7100 IEM_MC_REF_EFLAGS(pEFlags);
7101 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7102
7103 IEM_MC_ADVANCE_RIP();
7104 IEM_MC_END();
7105 return VINF_SUCCESS;
7106
7107 case IEMMODE_32BIT:
7108 IEM_MC_BEGIN(3, 0);
7109 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7110 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7111 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7112
7113 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7114 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7115 IEM_MC_REF_EFLAGS(pEFlags);
7116 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7117
7118 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7119 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7120 IEM_MC_ADVANCE_RIP();
7121 IEM_MC_END();
7122 return VINF_SUCCESS;
7123
7124 case IEMMODE_64BIT:
7125 IEM_MC_BEGIN(3, 0);
7126 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7127 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7128 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7129
7130 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7131 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7132 IEM_MC_REF_EFLAGS(pEFlags);
7133 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7134
7135 IEM_MC_ADVANCE_RIP();
7136 IEM_MC_END();
7137 return VINF_SUCCESS;
7138
7139 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7140 }
7141 }
7142 else
7143 {
7144 /*
7145 * We're accessing memory.
7146 */
7147 switch (pVCpu->iem.s.enmEffOpSize)
7148 {
7149 case IEMMODE_16BIT:
7150 IEM_MC_BEGIN(3, 3);
7151 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7152 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7153 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7154 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7156
7157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7158 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7159 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7160 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7161 IEM_MC_FETCH_EFLAGS(EFlags);
7162 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7163 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7164 else
7165 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7166
7167 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7168 IEM_MC_COMMIT_EFLAGS(EFlags);
7169 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7170 IEM_MC_ADVANCE_RIP();
7171 IEM_MC_END();
7172 return VINF_SUCCESS;
7173
7174 case IEMMODE_32BIT:
7175 IEM_MC_BEGIN(3, 3);
7176 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7177 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7178 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7179 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7181
7182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7183 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7184 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7185 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7186 IEM_MC_FETCH_EFLAGS(EFlags);
7187 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7188 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7189 else
7190 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7191
7192 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7193 IEM_MC_COMMIT_EFLAGS(EFlags);
7194 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7195 IEM_MC_ADVANCE_RIP();
7196 IEM_MC_END();
7197 return VINF_SUCCESS;
7198
7199 case IEMMODE_64BIT:
7200 IEM_MC_BEGIN(3, 3);
7201 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7202 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7203 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7204 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7206
7207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7208 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7209 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7210 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7211 IEM_MC_FETCH_EFLAGS(EFlags);
7212 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7213 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7214 else
7215 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7216
7217 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7218 IEM_MC_COMMIT_EFLAGS(EFlags);
7219 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7220 IEM_MC_ADVANCE_RIP();
7221 IEM_MC_END();
7222 return VINF_SUCCESS;
7223
7224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7225 }
7226 }
7227}
7228
7229
7230/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
7231FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
7232/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
7233FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
7234/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
7235FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
7236/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
7237FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
7238
7239
7240/** Opcode 0x0f 0xc3. */
7241FNIEMOP_DEF(iemOp_movnti_My_Gy)
7242{
7243 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7244
7245 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7246
7247 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7248 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7249 {
7250 switch (pVCpu->iem.s.enmEffOpSize)
7251 {
7252 case IEMMODE_32BIT:
7253 IEM_MC_BEGIN(0, 2);
7254 IEM_MC_LOCAL(uint32_t, u32Value);
7255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7256
7257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7259 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7260 return IEMOP_RAISE_INVALID_OPCODE();
7261
7262 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7263 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7264 IEM_MC_ADVANCE_RIP();
7265 IEM_MC_END();
7266 break;
7267
7268 case IEMMODE_64BIT:
7269 IEM_MC_BEGIN(0, 2);
7270 IEM_MC_LOCAL(uint64_t, u64Value);
7271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7272
7273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7275 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7276 return IEMOP_RAISE_INVALID_OPCODE();
7277
7278 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7279 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7280 IEM_MC_ADVANCE_RIP();
7281 IEM_MC_END();
7282 break;
7283
7284 case IEMMODE_16BIT:
7285 /** @todo check this form. */
7286 return IEMOP_RAISE_INVALID_OPCODE();
7287 }
7288 }
7289 else
7290 return IEMOP_RAISE_INVALID_OPCODE();
7291 return VINF_SUCCESS;
7292}
7293/* Opcode 0x66 0x0f 0xc3 - invalid */
7294/* Opcode 0xf3 0x0f 0xc3 - invalid */
7295/* Opcode 0xf2 0x0f 0xc3 - invalid */
7296
7297/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
7298FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7299/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
7300FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
7301/* Opcode 0xf3 0x0f 0xc4 - invalid */
7302/* Opcode 0xf2 0x0f 0xc4 - invalid */
7303
7304/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7305FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7306/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
7307FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
7308/* Opcode 0xf3 0x0f 0xc5 - invalid */
7309/* Opcode 0xf2 0x0f 0xc5 - invalid */
7310
7311/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
7312FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
7313/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
7314FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
7315/* Opcode 0xf3 0x0f 0xc6 - invalid */
7316/* Opcode 0xf2 0x0f 0xc6 - invalid */
7317
7318
7319/** Opcode 0x0f 0xc7 !11/1. */
7320FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7321{
7322 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7323
7324 IEM_MC_BEGIN(4, 3);
7325 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7326 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7327 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7328 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7329 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7330 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7332
7333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7334 IEMOP_HLP_DONE_DECODING();
7335 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7336
7337 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7338 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7339 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7340
7341 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7342 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7343 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7344
7345 IEM_MC_FETCH_EFLAGS(EFlags);
7346 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7347 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7348 else
7349 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7350
7351 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7352 IEM_MC_COMMIT_EFLAGS(EFlags);
7353 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7354 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7355 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7356 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7357 IEM_MC_ENDIF();
7358 IEM_MC_ADVANCE_RIP();
7359
7360 IEM_MC_END();
7361 return VINF_SUCCESS;
7362}
7363
7364
7365/** Opcode REX.W 0x0f 0xc7 !11/1. */
7366FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7367{
7368 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7369 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7370 {
7371#if 0
7372 RT_NOREF(bRm);
7373 IEMOP_BITCH_ABOUT_STUB();
7374 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7375#else
7376 IEM_MC_BEGIN(4, 3);
7377 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7378 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7379 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7380 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7381 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7382 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7384
7385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7386 IEMOP_HLP_DONE_DECODING();
7387 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7388 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7389
7390 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7391 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7392 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7393
7394 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7395 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7396 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7397
7398 IEM_MC_FETCH_EFLAGS(EFlags);
7399# ifdef RT_ARCH_AMD64
7400 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7401 {
7402 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7403 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7404 else
7405 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7406 }
7407 else
7408# endif
7409 {
7410 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7411 accesses and not all all atomic, which works fine on in UNI CPU guest
7412 configuration (ignoring DMA). If guest SMP is active we have no choice
7413 but to use a rendezvous callback here. Sigh. */
7414 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7415 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7416 else
7417 {
7418 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7419 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7420 }
7421 }
7422
7423 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7424 IEM_MC_COMMIT_EFLAGS(EFlags);
7425 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7426 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7427 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7428 IEM_MC_ENDIF();
7429 IEM_MC_ADVANCE_RIP();
7430
7431 IEM_MC_END();
7432 return VINF_SUCCESS;
7433#endif
7434 }
7435 Log(("cmpxchg16b -> #UD\n"));
7436 return IEMOP_RAISE_INVALID_OPCODE();
7437}
7438
7439FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
7440{
7441 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7442 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7443 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7444}
7445
7446/** Opcode 0x0f 0xc7 11/6. */
7447FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7448
7449/** Opcode 0x0f 0xc7 !11/6. */
7450FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7451
7452/** Opcode 0x66 0x0f 0xc7 !11/6. */
7453FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7454
7455/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7456FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7457
7458/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7459FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7460
7461/** Opcode 0x0f 0xc7 11/7. */
7462FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
7463
7464
7465/**
7466 * Group 9 jump table for register variant.
7467 */
7468IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
7469{ /* pfx: none, 066h, 0f3h, 0f2h */
7470 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7471 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
7472 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7473 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7474 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7475 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7476 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7477 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7478};
7479AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
7480
7481
7482/**
7483 * Group 9 jump table for memory variant.
7484 */
7485IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
7486{ /* pfx: none, 066h, 0f3h, 0f2h */
7487 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7488 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
7489 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7490 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7491 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7492 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7493 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
7494 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7495};
7496AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
7497
7498
7499/** Opcode 0x0f 0xc7. */
7500FNIEMOP_DEF(iemOp_Grp9)
7501{
7502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7503 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7504 /* register, register */
7505 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7506 + pVCpu->iem.s.idxPrefix], bRm);
7507 /* memory, register */
7508 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7509 + pVCpu->iem.s.idxPrefix], bRm);
7510}
7511
7512
7513/**
7514 * Common 'bswap register' helper.
7515 */
7516FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7517{
7518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7519 switch (pVCpu->iem.s.enmEffOpSize)
7520 {
7521 case IEMMODE_16BIT:
7522 IEM_MC_BEGIN(1, 0);
7523 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7524 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7525 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7526 IEM_MC_ADVANCE_RIP();
7527 IEM_MC_END();
7528 return VINF_SUCCESS;
7529
7530 case IEMMODE_32BIT:
7531 IEM_MC_BEGIN(1, 0);
7532 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7533 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7534 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7535 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7536 IEM_MC_ADVANCE_RIP();
7537 IEM_MC_END();
7538 return VINF_SUCCESS;
7539
7540 case IEMMODE_64BIT:
7541 IEM_MC_BEGIN(1, 0);
7542 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7543 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7544 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7545 IEM_MC_ADVANCE_RIP();
7546 IEM_MC_END();
7547 return VINF_SUCCESS;
7548
7549 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7550 }
7551}
7552
7553
7554/** Opcode 0x0f 0xc8. */
7555FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7556{
7557 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7558 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7559 prefix. REX.B is the correct prefix it appears. For a parallel
7560 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7561 IEMOP_HLP_MIN_486();
7562 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7563}
7564
7565
7566/** Opcode 0x0f 0xc9. */
7567FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7568{
7569 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7570 IEMOP_HLP_MIN_486();
7571 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7572}
7573
7574
7575/** Opcode 0x0f 0xca. */
7576FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7577{
7578 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7579 IEMOP_HLP_MIN_486();
7580 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7581}
7582
7583
7584/** Opcode 0x0f 0xcb. */
7585FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7586{
7587 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7588 IEMOP_HLP_MIN_486();
7589 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7590}
7591
7592
7593/** Opcode 0x0f 0xcc. */
7594FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7595{
7596 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7597 IEMOP_HLP_MIN_486();
7598 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7599}
7600
7601
7602/** Opcode 0x0f 0xcd. */
7603FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7604{
7605 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7606 IEMOP_HLP_MIN_486();
7607 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7608}
7609
7610
7611/** Opcode 0x0f 0xce. */
7612FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7613{
7614 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7615 IEMOP_HLP_MIN_486();
7616 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7617}
7618
7619
7620/** Opcode 0x0f 0xcf. */
7621FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7622{
7623 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7624 IEMOP_HLP_MIN_486();
7625 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7626}
7627
7628
7629/* Opcode 0x0f 0xd0 - invalid */
7630/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
7631FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
7632/* Opcode 0xf3 0x0f 0xd0 - invalid */
7633/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
7634FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
7635
7636/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7637FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7638/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
7639FNIEMOP_STUB(iemOp_psrlw_Vx_W);
7640/* Opcode 0xf3 0x0f 0xd1 - invalid */
7641/* Opcode 0xf2 0x0f 0xd1 - invalid */
7642
7643/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7644FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7645/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
7646FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
7647/* Opcode 0xf3 0x0f 0xd2 - invalid */
7648/* Opcode 0xf2 0x0f 0xd2 - invalid */
7649
7650/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7651FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7652/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
7653FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
7654/* Opcode 0xf3 0x0f 0xd3 - invalid */
7655/* Opcode 0xf2 0x0f 0xd3 - invalid */
7656
7657/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7658FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7659/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
7660FNIEMOP_STUB(iemOp_paddq_Vx_W);
7661/* Opcode 0xf3 0x0f 0xd4 - invalid */
7662/* Opcode 0xf2 0x0f 0xd4 - invalid */
7663
7664/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7665FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7666/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
7667FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
7668/* Opcode 0xf3 0x0f 0xd5 - invalid */
7669/* Opcode 0xf2 0x0f 0xd5 - invalid */
7670
7671/* Opcode 0x0f 0xd6 - invalid */
7672
7673/**
7674 * @opcode 0xd6
7675 * @oppfx 0x66
7676 * @opcpuid sse2
7677 * @opgroup og_sse2_pcksclr_datamove
7678 * @opxcpttype none
7679 * @optest op1=-1 op2=2 -> op1=2
7680 * @optest op1=0 op2=-42 -> op1=-42
7681 */
7682FNIEMOP_DEF(iemOp_movq_Wq_Vq)
7683{
7684 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
7685 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7686 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7687 {
7688 /*
7689 * Register, register.
7690 */
7691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7692 IEM_MC_BEGIN(0, 2);
7693 IEM_MC_LOCAL(uint64_t, uSrc);
7694
7695 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7696 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7697
7698 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7699 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
7700
7701 IEM_MC_ADVANCE_RIP();
7702 IEM_MC_END();
7703 }
7704 else
7705 {
7706 /*
7707 * Memory, register.
7708 */
7709 IEM_MC_BEGIN(0, 2);
7710 IEM_MC_LOCAL(uint64_t, uSrc);
7711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7712
7713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7715 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7716 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7717
7718 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7719 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7720
7721 IEM_MC_ADVANCE_RIP();
7722 IEM_MC_END();
7723 }
7724 return VINF_SUCCESS;
7725}
7726
7727
7728/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7729FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7730/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7731FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7732#if 0
7733FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7734{
7735 /* Docs says register only. */
7736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7737
7738 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7739 {
7740 case IEM_OP_PRF_SIZE_OP: /* SSE */
7741 I E M O P _ M N E M O N I C(movq_Wq_Vq, "movq Wq,Vq");
7742 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7743 IEM_MC_BEGIN(2, 0);
7744 IEM_MC_ARG(uint64_t *, pDst, 0);
7745 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
7746 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7747 IEM_MC_PREPARE_SSE_USAGE();
7748 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7749 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7750 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7751 IEM_MC_ADVANCE_RIP();
7752 IEM_MC_END();
7753 return VINF_SUCCESS;
7754
7755 case 0: /* MMX */
7756 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7757 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7758 IEM_MC_BEGIN(2, 0);
7759 IEM_MC_ARG(uint64_t *, pDst, 0);
7760 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7761 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7762 IEM_MC_PREPARE_FPU_USAGE();
7763 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7764 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7765 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7766 IEM_MC_ADVANCE_RIP();
7767 IEM_MC_END();
7768 return VINF_SUCCESS;
7769
7770 default:
7771 return IEMOP_RAISE_INVALID_OPCODE();
7772 }
7773}
7774#endif
7775
7776
7777/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7778FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7779{
7780 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7781 /** @todo testcase: Check that the instruction implicitly clears the high
7782 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7783 * and opcode modifications are made to work with the whole width (not
7784 * just 128). */
7785 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
7786 /* Docs says register only. */
7787 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7788 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7789 {
7790 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7791 IEM_MC_BEGIN(2, 0);
7792 IEM_MC_ARG(uint64_t *, pDst, 0);
7793 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7794 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7795 IEM_MC_PREPARE_FPU_USAGE();
7796 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7797 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7798 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7799 IEM_MC_ADVANCE_RIP();
7800 IEM_MC_END();
7801 return VINF_SUCCESS;
7802 }
7803 return IEMOP_RAISE_INVALID_OPCODE();
7804}
7805
7806/** Opcode 0x66 0x0f 0xd7 - */
7807FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
7808{
7809 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7810 /** @todo testcase: Check that the instruction implicitly clears the high
7811 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7812 * and opcode modifications are made to work with the whole width (not
7813 * just 128). */
7814 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
7815 /* Docs says register only. */
7816 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7817 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7818 {
7819 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7820 IEM_MC_BEGIN(2, 0);
7821 IEM_MC_ARG(uint64_t *, pDst, 0);
7822 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
7823 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7824 IEM_MC_PREPARE_SSE_USAGE();
7825 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7826 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7827 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7828 IEM_MC_ADVANCE_RIP();
7829 IEM_MC_END();
7830 return VINF_SUCCESS;
7831 }
7832 return IEMOP_RAISE_INVALID_OPCODE();
7833}
7834
7835/* Opcode 0xf3 0x0f 0xd7 - invalid */
7836/* Opcode 0xf2 0x0f 0xd7 - invalid */
7837
7838
7839/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7840FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7841/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
7842FNIEMOP_STUB(iemOp_psubusb_Vx_W);
7843/* Opcode 0xf3 0x0f 0xd8 - invalid */
7844/* Opcode 0xf2 0x0f 0xd8 - invalid */
7845
7846/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7847FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7848/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
7849FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
7850/* Opcode 0xf3 0x0f 0xd9 - invalid */
7851/* Opcode 0xf2 0x0f 0xd9 - invalid */
7852
7853/** Opcode 0x0f 0xda - pminub Pq, Qq */
7854FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7855/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
7856FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
7857/* Opcode 0xf3 0x0f 0xda - invalid */
7858/* Opcode 0xf2 0x0f 0xda - invalid */
7859
7860/** Opcode 0x0f 0xdb - pand Pq, Qq */
7861FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7862/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
7863FNIEMOP_STUB(iemOp_pand_Vx_W);
7864/* Opcode 0xf3 0x0f 0xdb - invalid */
7865/* Opcode 0xf2 0x0f 0xdb - invalid */
7866
7867/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7868FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7869/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
7870FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
7871/* Opcode 0xf3 0x0f 0xdc - invalid */
7872/* Opcode 0xf2 0x0f 0xdc - invalid */
7873
7874/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7875FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7876/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
7877FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
7878/* Opcode 0xf3 0x0f 0xdd - invalid */
7879/* Opcode 0xf2 0x0f 0xdd - invalid */
7880
7881/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7882FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7883/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
7884FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
7885/* Opcode 0xf3 0x0f 0xde - invalid */
7886/* Opcode 0xf2 0x0f 0xde - invalid */
7887
7888/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7889FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7890/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
7891FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
7892/* Opcode 0xf3 0x0f 0xdf - invalid */
7893/* Opcode 0xf2 0x0f 0xdf - invalid */
7894
7895/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7896FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7897/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
7898FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
7899/* Opcode 0xf3 0x0f 0xe0 - invalid */
7900/* Opcode 0xf2 0x0f 0xe0 - invalid */
7901
7902/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7903FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7904/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
7905FNIEMOP_STUB(iemOp_psraw_Vx_W);
7906/* Opcode 0xf3 0x0f 0xe1 - invalid */
7907/* Opcode 0xf2 0x0f 0xe1 - invalid */
7908
7909/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7910FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7911/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
7912FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
7913/* Opcode 0xf3 0x0f 0xe2 - invalid */
7914/* Opcode 0xf2 0x0f 0xe2 - invalid */
7915
7916/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7917FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7918/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
7919FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
7920/* Opcode 0xf3 0x0f 0xe3 - invalid */
7921/* Opcode 0xf2 0x0f 0xe3 - invalid */
7922
7923/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7924FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7925/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
7926FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
7927/* Opcode 0xf3 0x0f 0xe4 - invalid */
7928/* Opcode 0xf2 0x0f 0xe4 - invalid */
7929
7930/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7931FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7932/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
7933FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
7934/* Opcode 0xf3 0x0f 0xe5 - invalid */
7935/* Opcode 0xf2 0x0f 0xe5 - invalid */
7936
7937/* Opcode 0x0f 0xe6 - invalid */
7938/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
7939FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
7940/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
7941FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
7942/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
7943FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
7944
7945
7946/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
7947FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
7948{
7949 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7951 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7952 {
7953 /* Register, memory. */
7954 IEM_MC_BEGIN(0, 2);
7955 IEM_MC_LOCAL(uint64_t, uSrc);
7956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7957
7958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7960 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7961 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7962
7963 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7964 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7965
7966 IEM_MC_ADVANCE_RIP();
7967 IEM_MC_END();
7968 return VINF_SUCCESS;
7969 }
7970 /* The register, register encoding is invalid. */
7971 return IEMOP_RAISE_INVALID_OPCODE();
7972}
7973
7974/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
7975FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
7976{
7977 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7978 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7979 {
7980 /* Register, memory. */
7981 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
7982 IEM_MC_BEGIN(0, 2);
7983 IEM_MC_LOCAL(RTUINT128U, uSrc);
7984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7985
7986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7988 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7989 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7990
7991 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7992 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7993
7994 IEM_MC_ADVANCE_RIP();
7995 IEM_MC_END();
7996 return VINF_SUCCESS;
7997 }
7998
7999 /* The register, register encoding is invalid. */
8000 return IEMOP_RAISE_INVALID_OPCODE();
8001}
8002
8003/* Opcode 0xf3 0x0f 0xe7 - invalid */
8004/* Opcode 0xf2 0x0f 0xe7 - invalid */
8005
8006
8007/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8008FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8009/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8010FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8011/* Opcode 0xf3 0x0f 0xe8 - invalid */
8012/* Opcode 0xf2 0x0f 0xe8 - invalid */
8013
8014/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8015FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8016/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8017FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8018/* Opcode 0xf3 0x0f 0xe9 - invalid */
8019/* Opcode 0xf2 0x0f 0xe9 - invalid */
8020
8021/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8022FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8023/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8024FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8025/* Opcode 0xf3 0x0f 0xea - invalid */
8026/* Opcode 0xf2 0x0f 0xea - invalid */
8027
8028/** Opcode 0x0f 0xeb - por Pq, Qq */
8029FNIEMOP_STUB(iemOp_por_Pq_Qq);
8030/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8031FNIEMOP_STUB(iemOp_por_Vx_W);
8032/* Opcode 0xf3 0x0f 0xeb - invalid */
8033/* Opcode 0xf2 0x0f 0xeb - invalid */
8034
8035/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8036FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8037/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8038FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8039/* Opcode 0xf3 0x0f 0xec - invalid */
8040/* Opcode 0xf2 0x0f 0xec - invalid */
8041
8042/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8043FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8044/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8045FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8046/* Opcode 0xf3 0x0f 0xed - invalid */
8047/* Opcode 0xf2 0x0f 0xed - invalid */
8048
8049/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8050FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8051/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8052FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8053/* Opcode 0xf3 0x0f 0xee - invalid */
8054/* Opcode 0xf2 0x0f 0xee - invalid */
8055
8056
8057/** Opcode 0x0f 0xef - pxor Pq, Qq */
8058FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8059{
8060 IEMOP_MNEMONIC(pxor, "pxor");
8061 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8062}
8063
8064/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8065FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8066{
8067 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8068 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8069}
8070
8071/* Opcode 0xf3 0x0f 0xef - invalid */
8072/* Opcode 0xf2 0x0f 0xef - invalid */
8073
8074/* Opcode 0x0f 0xf0 - invalid */
8075/* Opcode 0x66 0x0f 0xf0 - invalid */
8076/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8077FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8078
8079/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8080FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8081/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8082FNIEMOP_STUB(iemOp_psllw_Vx_W);
8083/* Opcode 0xf2 0x0f 0xf1 - invalid */
8084
8085/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8086FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8087/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8088FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8089/* Opcode 0xf2 0x0f 0xf2 - invalid */
8090
8091/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8092FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8093/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8094FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8095/* Opcode 0xf2 0x0f 0xf3 - invalid */
8096
8097/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8098FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8099/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8100FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8101/* Opcode 0xf2 0x0f 0xf4 - invalid */
8102
8103/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8104FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8105/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
8106FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
8107/* Opcode 0xf2 0x0f 0xf5 - invalid */
8108
8109/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8110FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8111/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
8112FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
8113/* Opcode 0xf2 0x0f 0xf6 - invalid */
8114
8115/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8116FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8117/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
8118FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
8119/* Opcode 0xf2 0x0f 0xf7 - invalid */
8120
8121/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8122FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8123/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
8124FNIEMOP_STUB(iemOp_psubb_Vx_W);
8125/* Opcode 0xf2 0x0f 0xf8 - invalid */
8126
8127/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8128FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8129/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
8130FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
8131/* Opcode 0xf2 0x0f 0xf9 - invalid */
8132
8133/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8134FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8135/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
8136FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
8137/* Opcode 0xf2 0x0f 0xfa - invalid */
8138
8139/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8140FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8141/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
8142FNIEMOP_STUB(iemOp_psubq_Vx_W);
8143/* Opcode 0xf2 0x0f 0xfb - invalid */
8144
8145/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8146FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8147/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
8148FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
8149/* Opcode 0xf2 0x0f 0xfc - invalid */
8150
8151/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8152FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8153/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
8154FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
8155/* Opcode 0xf2 0x0f 0xfd - invalid */
8156
8157/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8158FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8159/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
8160FNIEMOP_STUB(iemOp_paddd_Vx_W);
8161/* Opcode 0xf2 0x0f 0xfe - invalid */
8162
8163
8164/** Opcode **** 0x0f 0xff - UD0 */
8165FNIEMOP_DEF(iemOp_ud0)
8166{
8167 IEMOP_MNEMONIC(ud0, "ud0");
8168 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8169 {
8170 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8171#ifndef TST_IEM_CHECK_MC
8172 RTGCPTR GCPtrEff;
8173 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8174 if (rcStrict != VINF_SUCCESS)
8175 return rcStrict;
8176#endif
8177 IEMOP_HLP_DONE_DECODING();
8178 }
8179 return IEMOP_RAISE_INVALID_OPCODE();
8180}
8181
8182
8183
8184/**
8185 * Two byte opcode map, first byte 0x0f.
8186 *
8187 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
8188 * check if it needs updating as well when making changes.
8189 */
8190IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8191{
8192 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8193 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8194 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8195 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8196 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8197 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8198 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8199 /* 0x06 */ IEMOP_X4(iemOp_clts),
8200 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8201 /* 0x08 */ IEMOP_X4(iemOp_invd),
8202 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8203 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8204 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8205 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8206 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8207 /* 0x0e */ IEMOP_X4(iemOp_femms),
8208 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8209
8210 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vx_Wsd,
8211 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
8212 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
8213 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8214 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8215 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8216 /* 0x16 */ iemOp_movhpsv1_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpdv1_Vdq_Mq, iemOp_movshdup_Vx_Wx, iemOp_InvalidNeedRM,
8217 /* 0x17 */ iemOp_movhpsv1_Mq_Vq, iemOp_movhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8218 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8219 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8220 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8221 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8222 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8223 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8224 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8225 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8226
8227 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8228 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8229 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8230 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8231 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8232 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8233 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8234 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8235 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8236 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8237 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
8238 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8239 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
8240 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
8241 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8242 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8243
8244 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8245 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8246 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8247 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8248 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8249 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8250 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8251 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8252 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
8253 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8254 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
8255 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8256 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8257 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8258 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8259 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8260
8261 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8262 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8263 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8264 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8265 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8266 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8267 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8268 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8269 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8270 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8271 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8272 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8273 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8274 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8275 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8276 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8277
8278 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8279 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
8280 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
8281 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
8282 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8283 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8284 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8285 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8286 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
8287 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
8288 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
8289 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8290 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
8291 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
8292 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
8293 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
8294
8295 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8296 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8297 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8298 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8299 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8300 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8301 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8302 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8303 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8304 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8305 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8306 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8307 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8308 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8309 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8310 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vx_Wx, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM,
8311
8312 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
8313 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8314 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8315 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8316 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8317 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8318 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8319 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8320
8321 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8322 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8323 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8324 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8325 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
8326 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
8327 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
8328 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
8329
8330 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8331 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8332 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8333 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8334 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8335 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8336 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8337 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8338 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8339 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8340 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8341 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8342 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8343 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8344 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8345 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8346
8347 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8348 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8349 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8350 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8351 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8352 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8353 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8354 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8355 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8356 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8357 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8358 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8359 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8360 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8361 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8362 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8363
8364 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8365 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8366 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8367 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8368 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8369 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8370 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8371 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8372 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8373 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8374 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8375 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8376 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8377 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8378 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8379 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8380
8381 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8382 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8383 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8384 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8385 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8386 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8387 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8388 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8389 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8390 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8391 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8392 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8393 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8394 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8395 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8396 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8397
8398 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8399 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8400 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
8401 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8402 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8403 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8404 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8405 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8406 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8407 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8408 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8409 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8410 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8411 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8412 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8413 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8414
8415 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
8416 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8417 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8418 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8419 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8420 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8421 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8422 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8423 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8424 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8425 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8426 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8427 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8428 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8429 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8430 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8431
8432 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8433 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8434 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8435 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8436 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8437 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8438 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
8439 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8440 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8441 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8442 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8443 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8444 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8445 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8446 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8447 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8448
8449 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
8450 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8451 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8452 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8453 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8454 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8455 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8456 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8457 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8458 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8459 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8460 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8461 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8462 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8463 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8464 /* 0xff */ IEMOP_X4(iemOp_ud0),
8465};
8466AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8467
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette