VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 72516

Last change on this file since 72516 was 72516, checked in by vboxsync, 7 years ago

IEM: Implemented invd for the purpose of SVM intercepting.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 332.4 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 72516 2018-06-11 14:49:11Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.215389.xyz. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
38 }
39
40 /* Ignore operand size here, memory refs are always 16-bit. */
41 IEM_MC_BEGIN(2, 0);
42 IEM_MC_ARG(uint16_t, iEffSeg, 0);
43 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
44 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
45 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
46 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
47 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
48 IEM_MC_END();
49 return VINF_SUCCESS;
50}
51
52
53/** Opcode 0x0f 0x00 /1. */
54FNIEMOPRM_DEF(iemOp_Grp6_str)
55{
56 IEMOP_MNEMONIC(str, "str Rv/Mw");
57 IEMOP_HLP_MIN_286();
58 IEMOP_HLP_NO_REAL_OR_V86_MODE();
59
60
61 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
62 {
63 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
64 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
65 }
66
67 /* Ignore operand size here, memory refs are always 16-bit. */
68 IEM_MC_BEGIN(2, 0);
69 IEM_MC_ARG(uint16_t, iEffSeg, 0);
70 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
71 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
72 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
73 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
74 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
75 IEM_MC_END();
76 return VINF_SUCCESS;
77}
78
79
80/** Opcode 0x0f 0x00 /2. */
81FNIEMOPRM_DEF(iemOp_Grp6_lldt)
82{
83 IEMOP_MNEMONIC(lldt, "lldt Ew");
84 IEMOP_HLP_MIN_286();
85 IEMOP_HLP_NO_REAL_OR_V86_MODE();
86
87 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
88 {
89 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
90 IEM_MC_BEGIN(1, 0);
91 IEM_MC_ARG(uint16_t, u16Sel, 0);
92 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
93 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
94 IEM_MC_END();
95 }
96 else
97 {
98 IEM_MC_BEGIN(1, 1);
99 IEM_MC_ARG(uint16_t, u16Sel, 0);
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
102 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
103 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
104 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
105 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
106 IEM_MC_END();
107 }
108 return VINF_SUCCESS;
109}
110
111
112/** Opcode 0x0f 0x00 /3. */
113FNIEMOPRM_DEF(iemOp_Grp6_ltr)
114{
115 IEMOP_MNEMONIC(ltr, "ltr Ew");
116 IEMOP_HLP_MIN_286();
117 IEMOP_HLP_NO_REAL_OR_V86_MODE();
118
119 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
120 {
121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
122 IEM_MC_BEGIN(1, 0);
123 IEM_MC_ARG(uint16_t, u16Sel, 0);
124 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
125 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
126 IEM_MC_END();
127 }
128 else
129 {
130 IEM_MC_BEGIN(1, 1);
131 IEM_MC_ARG(uint16_t, u16Sel, 0);
132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
135 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
136 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
137 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
138 IEM_MC_END();
139 }
140 return VINF_SUCCESS;
141}
142
143
144/** Opcode 0x0f 0x00 /3. */
145FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
146{
147 IEMOP_HLP_MIN_286();
148 IEMOP_HLP_NO_REAL_OR_V86_MODE();
149
150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
151 {
152 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
153 IEM_MC_BEGIN(2, 0);
154 IEM_MC_ARG(uint16_t, u16Sel, 0);
155 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
156 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
157 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
158 IEM_MC_END();
159 }
160 else
161 {
162 IEM_MC_BEGIN(2, 1);
163 IEM_MC_ARG(uint16_t, u16Sel, 0);
164 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
167 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
168 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
169 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
170 IEM_MC_END();
171 }
172 return VINF_SUCCESS;
173}
174
175
176/** Opcode 0x0f 0x00 /4. */
177FNIEMOPRM_DEF(iemOp_Grp6_verr)
178{
179 IEMOP_MNEMONIC(verr, "verr Ew");
180 IEMOP_HLP_MIN_286();
181 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
182}
183
184
185/** Opcode 0x0f 0x00 /5. */
186FNIEMOPRM_DEF(iemOp_Grp6_verw)
187{
188 IEMOP_MNEMONIC(verw, "verw Ew");
189 IEMOP_HLP_MIN_286();
190 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
191}
192
193
194/**
195 * Group 6 jump table.
196 */
197IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
198{
199 iemOp_Grp6_sldt,
200 iemOp_Grp6_str,
201 iemOp_Grp6_lldt,
202 iemOp_Grp6_ltr,
203 iemOp_Grp6_verr,
204 iemOp_Grp6_verw,
205 iemOp_InvalidWithRM,
206 iemOp_InvalidWithRM
207};
208
209/** Opcode 0x0f 0x00. */
210FNIEMOP_DEF(iemOp_Grp6)
211{
212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
213 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
214}
215
216
217/** Opcode 0x0f 0x01 /0. */
218FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
219{
220 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_64BIT_OP_SIZE();
223 IEM_MC_BEGIN(2, 1);
224 IEM_MC_ARG(uint8_t, iEffSeg, 0);
225 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
228 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
229 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
230 IEM_MC_END();
231 return VINF_SUCCESS;
232}
233
234
235/** Opcode 0x0f 0x01 /0. */
236FNIEMOP_DEF(iemOp_Grp7_vmcall)
237{
238 IEMOP_MNEMONIC(vmcall, "vmcall");
239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
240
241 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
242 want all hypercalls regardless of instruction used, and if a
243 hypercall isn't handled by GIM or HMSvm will raise an #UD.
244 (NEM/win makes ASSUMPTIONS about this behavior.) */
245 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
246}
247
248
249/** Opcode 0x0f 0x01 /0. */
250FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
251{
252 IEMOP_BITCH_ABOUT_STUB();
253 return IEMOP_RAISE_INVALID_OPCODE();
254}
255
256
257/** Opcode 0x0f 0x01 /0. */
258FNIEMOP_DEF(iemOp_Grp7_vmresume)
259{
260 IEMOP_BITCH_ABOUT_STUB();
261 return IEMOP_RAISE_INVALID_OPCODE();
262}
263
264
265/** Opcode 0x0f 0x01 /0. */
266FNIEMOP_DEF(iemOp_Grp7_vmxoff)
267{
268 IEMOP_BITCH_ABOUT_STUB();
269 return IEMOP_RAISE_INVALID_OPCODE();
270}
271
272
273/** Opcode 0x0f 0x01 /1. */
274FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
275{
276 IEMOP_MNEMONIC(sidt, "sidt Ms");
277 IEMOP_HLP_MIN_286();
278 IEMOP_HLP_64BIT_OP_SIZE();
279 IEM_MC_BEGIN(2, 1);
280 IEM_MC_ARG(uint8_t, iEffSeg, 0);
281 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
284 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
285 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
286 IEM_MC_END();
287 return VINF_SUCCESS;
288}
289
290
291/** Opcode 0x0f 0x01 /1. */
292FNIEMOP_DEF(iemOp_Grp7_monitor)
293{
294 IEMOP_MNEMONIC(monitor, "monitor");
295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
296 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
297}
298
299
300/** Opcode 0x0f 0x01 /1. */
301FNIEMOP_DEF(iemOp_Grp7_mwait)
302{
303 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
305 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
306}
307
308
309/** Opcode 0x0f 0x01 /2. */
310FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
311{
312 IEMOP_MNEMONIC(lgdt, "lgdt");
313 IEMOP_HLP_64BIT_OP_SIZE();
314 IEM_MC_BEGIN(3, 1);
315 IEM_MC_ARG(uint8_t, iEffSeg, 0);
316 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
317 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
320 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
321 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
322 IEM_MC_END();
323 return VINF_SUCCESS;
324}
325
326
327/** Opcode 0x0f 0x01 0xd0. */
328FNIEMOP_DEF(iemOp_Grp7_xgetbv)
329{
330 IEMOP_MNEMONIC(xgetbv, "xgetbv");
331 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
332 {
333 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
334 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
335 }
336 return IEMOP_RAISE_INVALID_OPCODE();
337}
338
339
340/** Opcode 0x0f 0x01 0xd1. */
341FNIEMOP_DEF(iemOp_Grp7_xsetbv)
342{
343 IEMOP_MNEMONIC(xsetbv, "xsetbv");
344 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
345 {
346 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
347 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
348 }
349 return IEMOP_RAISE_INVALID_OPCODE();
350}
351
352
353/** Opcode 0x0f 0x01 /3. */
354FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
355{
356 IEMOP_MNEMONIC(lidt, "lidt");
357 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
358 ? IEMMODE_64BIT
359 : pVCpu->iem.s.enmEffOpSize;
360 IEM_MC_BEGIN(3, 1);
361 IEM_MC_ARG(uint8_t, iEffSeg, 0);
362 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
363 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
367 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
368 IEM_MC_END();
369 return VINF_SUCCESS;
370}
371
372
373/** Opcode 0x0f 0x01 0xd8. */
374#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
375FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
376{
377 IEMOP_MNEMONIC(vmrun, "vmrun");
378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
379 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
380}
381#else
382FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
383#endif
384
385/** Opcode 0x0f 0x01 0xd9. */
386FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
387{
388 IEMOP_MNEMONIC(vmmcall, "vmmcall");
389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
390
391 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
392 want all hypercalls regardless of instruction used, and if a
393 hypercall isn't handled by GIM or HMSvm will raise an #UD.
394 (NEM/win makes ASSUMPTIONS about this behavior.) */
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
396}
397
398/** Opcode 0x0f 0x01 0xda. */
399#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
400FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
401{
402 IEMOP_MNEMONIC(vmload, "vmload");
403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
404 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
405}
406#else
407FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
408#endif
409
410
411/** Opcode 0x0f 0x01 0xdb. */
412#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
413FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
414{
415 IEMOP_MNEMONIC(vmsave, "vmsave");
416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
417 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
418}
419#else
420FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
421#endif
422
423
424/** Opcode 0x0f 0x01 0xdc. */
425#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
426FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
427{
428 IEMOP_MNEMONIC(stgi, "stgi");
429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
430 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
431}
432#else
433FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
434#endif
435
436
437/** Opcode 0x0f 0x01 0xdd. */
438#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
439FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
440{
441 IEMOP_MNEMONIC(clgi, "clgi");
442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
443 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
444}
445#else
446FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
447#endif
448
449
450/** Opcode 0x0f 0x01 0xdf. */
451#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
452FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
453{
454 IEMOP_MNEMONIC(invlpga, "invlpga");
455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
456 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
457}
458#else
459FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
460#endif
461
462
463/** Opcode 0x0f 0x01 0xde. */
464#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
465FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
466{
467 IEMOP_MNEMONIC(skinit, "skinit");
468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
469 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
470}
471#else
472FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
473#endif
474
475
476/** Opcode 0x0f 0x01 /4. */
477FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
478{
479 IEMOP_MNEMONIC(smsw, "smsw");
480 IEMOP_HLP_MIN_286();
481 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
482 {
483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
484 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
485 }
486
487 /* Ignore operand size here, memory refs are always 16-bit. */
488 IEM_MC_BEGIN(2, 0);
489 IEM_MC_ARG(uint16_t, iEffSeg, 0);
490 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
494 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
495 IEM_MC_END();
496 return VINF_SUCCESS;
497}
498
499
500/** Opcode 0x0f 0x01 /6. */
501FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
502{
503 /* The operand size is effectively ignored, all is 16-bit and only the
504 lower 3-bits are used. */
505 IEMOP_MNEMONIC(lmsw, "lmsw");
506 IEMOP_HLP_MIN_286();
507 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
508 {
509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
510 IEM_MC_BEGIN(1, 0);
511 IEM_MC_ARG(uint16_t, u16Tmp, 0);
512 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
513 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
514 IEM_MC_END();
515 }
516 else
517 {
518 IEM_MC_BEGIN(1, 1);
519 IEM_MC_ARG(uint16_t, u16Tmp, 0);
520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
523 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
524 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
525 IEM_MC_END();
526 }
527 return VINF_SUCCESS;
528}
529
530
531/** Opcode 0x0f 0x01 /7. */
532FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
533{
534 IEMOP_MNEMONIC(invlpg, "invlpg");
535 IEMOP_HLP_MIN_486();
536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
537 IEM_MC_BEGIN(1, 1);
538 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
540 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
541 IEM_MC_END();
542 return VINF_SUCCESS;
543}
544
545
546/** Opcode 0x0f 0x01 /7. */
547FNIEMOP_DEF(iemOp_Grp7_swapgs)
548{
549 IEMOP_MNEMONIC(swapgs, "swapgs");
550 IEMOP_HLP_ONLY_64BIT();
551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
552 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
553}
554
555
556/** Opcode 0x0f 0x01 /7. */
557FNIEMOP_DEF(iemOp_Grp7_rdtscp)
558{
559 IEMOP_MNEMONIC(rdtscp, "rdtscp");
560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
561 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
562}
563
564
565/**
566 * Group 7 jump table, memory variant.
567 */
568IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
569{
570 iemOp_Grp7_sgdt,
571 iemOp_Grp7_sidt,
572 iemOp_Grp7_lgdt,
573 iemOp_Grp7_lidt,
574 iemOp_Grp7_smsw,
575 iemOp_InvalidWithRM,
576 iemOp_Grp7_lmsw,
577 iemOp_Grp7_invlpg
578};
579
580
581/** Opcode 0x0f 0x01. */
582FNIEMOP_DEF(iemOp_Grp7)
583{
584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
585 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
586 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
587
588 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
589 {
590 case 0:
591 switch (bRm & X86_MODRM_RM_MASK)
592 {
593 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
594 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
595 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
596 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
597 }
598 return IEMOP_RAISE_INVALID_OPCODE();
599
600 case 1:
601 switch (bRm & X86_MODRM_RM_MASK)
602 {
603 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
604 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
605 }
606 return IEMOP_RAISE_INVALID_OPCODE();
607
608 case 2:
609 switch (bRm & X86_MODRM_RM_MASK)
610 {
611 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
612 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
613 }
614 return IEMOP_RAISE_INVALID_OPCODE();
615
616 case 3:
617 switch (bRm & X86_MODRM_RM_MASK)
618 {
619 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
620 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
621 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
622 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
623 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
624 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
625 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
626 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
627 IEM_NOT_REACHED_DEFAULT_CASE_RET();
628 }
629
630 case 4:
631 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
632
633 case 5:
634 return IEMOP_RAISE_INVALID_OPCODE();
635
636 case 6:
637 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
638
639 case 7:
640 switch (bRm & X86_MODRM_RM_MASK)
641 {
642 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
643 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
644 }
645 return IEMOP_RAISE_INVALID_OPCODE();
646
647 IEM_NOT_REACHED_DEFAULT_CASE_RET();
648 }
649}
650
651/** Opcode 0x0f 0x00 /3. */
652FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
653{
654 IEMOP_HLP_NO_REAL_OR_V86_MODE();
655 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
656
657 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
658 {
659 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
660 switch (pVCpu->iem.s.enmEffOpSize)
661 {
662 case IEMMODE_16BIT:
663 {
664 IEM_MC_BEGIN(3, 0);
665 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
666 IEM_MC_ARG(uint16_t, u16Sel, 1);
667 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
668
669 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
670 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
671 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
672
673 IEM_MC_END();
674 return VINF_SUCCESS;
675 }
676
677 case IEMMODE_32BIT:
678 case IEMMODE_64BIT:
679 {
680 IEM_MC_BEGIN(3, 0);
681 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
682 IEM_MC_ARG(uint16_t, u16Sel, 1);
683 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
684
685 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
686 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
687 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
688
689 IEM_MC_END();
690 return VINF_SUCCESS;
691 }
692
693 IEM_NOT_REACHED_DEFAULT_CASE_RET();
694 }
695 }
696 else
697 {
698 switch (pVCpu->iem.s.enmEffOpSize)
699 {
700 case IEMMODE_16BIT:
701 {
702 IEM_MC_BEGIN(3, 1);
703 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
704 IEM_MC_ARG(uint16_t, u16Sel, 1);
705 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
707
708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
709 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
710
711 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
712 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
713 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
714
715 IEM_MC_END();
716 return VINF_SUCCESS;
717 }
718
719 case IEMMODE_32BIT:
720 case IEMMODE_64BIT:
721 {
722 IEM_MC_BEGIN(3, 1);
723 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
724 IEM_MC_ARG(uint16_t, u16Sel, 1);
725 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
727
728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
729 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
730/** @todo testcase: make sure it's a 16-bit read. */
731
732 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
733 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
734 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
735
736 IEM_MC_END();
737 return VINF_SUCCESS;
738 }
739
740 IEM_NOT_REACHED_DEFAULT_CASE_RET();
741 }
742 }
743}
744
745
746
747/** Opcode 0x0f 0x02. */
748FNIEMOP_DEF(iemOp_lar_Gv_Ew)
749{
750 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
751 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
752}
753
754
755/** Opcode 0x0f 0x03. */
756FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
757{
758 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
759 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
760}
761
762
763/** Opcode 0x0f 0x05. */
764FNIEMOP_DEF(iemOp_syscall)
765{
766 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
768 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
769}
770
771
772/** Opcode 0x0f 0x06. */
773FNIEMOP_DEF(iemOp_clts)
774{
775 IEMOP_MNEMONIC(clts, "clts");
776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
777 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
778}
779
780
781/** Opcode 0x0f 0x07. */
782FNIEMOP_DEF(iemOp_sysret)
783{
784 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
786 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
787}
788
789
790/** Opcode 0x0f 0x08. */
791FNIEMOP_DEF(iemOp_invd)
792{
793 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
794 IEMOP_HLP_MIN_486();
795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
796 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
797}
798
799
800/** Opcode 0x0f 0x09. */
801FNIEMOP_DEF(iemOp_wbinvd)
802{
803 IEMOP_MNEMONIC(wbinvd, "wbinvd");
804 IEMOP_HLP_MIN_486();
805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
806 IEM_MC_BEGIN(0, 0);
807 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
808 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
809 IEM_MC_ADVANCE_RIP();
810 IEM_MC_END();
811 return VINF_SUCCESS; /* ignore for now */
812}
813
814
815/** Opcode 0x0f 0x0b. */
816FNIEMOP_DEF(iemOp_ud2)
817{
818 IEMOP_MNEMONIC(ud2, "ud2");
819 return IEMOP_RAISE_INVALID_OPCODE();
820}
821
822/** Opcode 0x0f 0x0d. */
823FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
824{
825 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
826 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
827 {
828 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
829 return IEMOP_RAISE_INVALID_OPCODE();
830 }
831
832 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
833 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
834 {
835 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
836 return IEMOP_RAISE_INVALID_OPCODE();
837 }
838
839 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
840 {
841 case 2: /* Aliased to /0 for the time being. */
842 case 4: /* Aliased to /0 for the time being. */
843 case 5: /* Aliased to /0 for the time being. */
844 case 6: /* Aliased to /0 for the time being. */
845 case 7: /* Aliased to /0 for the time being. */
846 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
847 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
848 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
850 }
851
852 IEM_MC_BEGIN(0, 1);
853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
856 /* Currently a NOP. */
857 NOREF(GCPtrEffSrc);
858 IEM_MC_ADVANCE_RIP();
859 IEM_MC_END();
860 return VINF_SUCCESS;
861}
862
863
864/** Opcode 0x0f 0x0e. */
865FNIEMOP_DEF(iemOp_femms)
866{
867 IEMOP_MNEMONIC(femms, "femms");
868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
869
870 IEM_MC_BEGIN(0,0);
871 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
872 IEM_MC_MAYBE_RAISE_FPU_XCPT();
873 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
874 IEM_MC_FPU_FROM_MMX_MODE();
875 IEM_MC_ADVANCE_RIP();
876 IEM_MC_END();
877 return VINF_SUCCESS;
878}
879
880
881/** Opcode 0x0f 0x0f. */
882FNIEMOP_DEF(iemOp_3Dnow)
883{
884 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
885 {
886 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
887 return IEMOP_RAISE_INVALID_OPCODE();
888 }
889
890#ifdef IEM_WITH_3DNOW
891 /* This is pretty sparse, use switch instead of table. */
892 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
893 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
894#else
895 IEMOP_BITCH_ABOUT_STUB();
896 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
897#endif
898}
899
900
901/**
902 * @opcode 0x10
903 * @oppfx none
904 * @opcpuid sse
905 * @opgroup og_sse_simdfp_datamove
906 * @opxcpttype 4UA
907 * @optest op1=1 op2=2 -> op1=2
908 * @optest op1=0 op2=-22 -> op1=-22
909 */
910FNIEMOP_DEF(iemOp_movups_Vps_Wps)
911{
912 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
913 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
914 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
915 {
916 /*
917 * Register, register.
918 */
919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
920 IEM_MC_BEGIN(0, 0);
921 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
922 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
923 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
924 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
925 IEM_MC_ADVANCE_RIP();
926 IEM_MC_END();
927 }
928 else
929 {
930 /*
931 * Memory, register.
932 */
933 IEM_MC_BEGIN(0, 2);
934 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
936
937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
939 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
940 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
941
942 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
943 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
944
945 IEM_MC_ADVANCE_RIP();
946 IEM_MC_END();
947 }
948 return VINF_SUCCESS;
949
950}
951
952
953/**
954 * @opcode 0x10
955 * @oppfx 0x66
956 * @opcpuid sse2
957 * @opgroup og_sse2_pcksclr_datamove
958 * @opxcpttype 4UA
959 * @optest op1=1 op2=2 -> op1=2
960 * @optest op1=0 op2=-42 -> op1=-42
961 */
962FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
963{
964 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
966 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
967 {
968 /*
969 * Register, register.
970 */
971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
972 IEM_MC_BEGIN(0, 0);
973 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
974 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
975 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
976 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
977 IEM_MC_ADVANCE_RIP();
978 IEM_MC_END();
979 }
980 else
981 {
982 /*
983 * Memory, register.
984 */
985 IEM_MC_BEGIN(0, 2);
986 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
988
989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
991 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
992 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
993
994 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
995 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
996
997 IEM_MC_ADVANCE_RIP();
998 IEM_MC_END();
999 }
1000 return VINF_SUCCESS;
1001}
1002
1003
1004/**
1005 * @opcode 0x10
1006 * @oppfx 0xf3
1007 * @opcpuid sse
1008 * @opgroup og_sse_simdfp_datamove
1009 * @opxcpttype 5
1010 * @optest op1=1 op2=2 -> op1=2
1011 * @optest op1=0 op2=-22 -> op1=-22
1012 */
1013FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1014{
1015 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1016 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1017 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1018 {
1019 /*
1020 * Register, register.
1021 */
1022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1023 IEM_MC_BEGIN(0, 1);
1024 IEM_MC_LOCAL(uint32_t, uSrc);
1025
1026 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1027 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1028 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1029 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1030
1031 IEM_MC_ADVANCE_RIP();
1032 IEM_MC_END();
1033 }
1034 else
1035 {
1036 /*
1037 * Memory, register.
1038 */
1039 IEM_MC_BEGIN(0, 2);
1040 IEM_MC_LOCAL(uint32_t, uSrc);
1041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1042
1043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1045 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1046 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1047
1048 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1049 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1050
1051 IEM_MC_ADVANCE_RIP();
1052 IEM_MC_END();
1053 }
1054 return VINF_SUCCESS;
1055}
1056
1057
1058/**
1059 * @opcode 0x10
1060 * @oppfx 0xf2
1061 * @opcpuid sse2
1062 * @opgroup og_sse2_pcksclr_datamove
1063 * @opxcpttype 5
1064 * @optest op1=1 op2=2 -> op1=2
1065 * @optest op1=0 op2=-42 -> op1=-42
1066 */
1067FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1068{
1069 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1071 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1072 {
1073 /*
1074 * Register, register.
1075 */
1076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1077 IEM_MC_BEGIN(0, 1);
1078 IEM_MC_LOCAL(uint64_t, uSrc);
1079
1080 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1081 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1082 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1083 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1084
1085 IEM_MC_ADVANCE_RIP();
1086 IEM_MC_END();
1087 }
1088 else
1089 {
1090 /*
1091 * Memory, register.
1092 */
1093 IEM_MC_BEGIN(0, 2);
1094 IEM_MC_LOCAL(uint64_t, uSrc);
1095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1096
1097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1099 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1100 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1101
1102 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1103 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1104
1105 IEM_MC_ADVANCE_RIP();
1106 IEM_MC_END();
1107 }
1108 return VINF_SUCCESS;
1109}
1110
1111
1112/**
1113 * @opcode 0x11
1114 * @oppfx none
1115 * @opcpuid sse
1116 * @opgroup og_sse_simdfp_datamove
1117 * @opxcpttype 4UA
1118 * @optest op1=1 op2=2 -> op1=2
1119 * @optest op1=0 op2=-42 -> op1=-42
1120 */
1121FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1122{
1123 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1125 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1126 {
1127 /*
1128 * Register, register.
1129 */
1130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1131 IEM_MC_BEGIN(0, 0);
1132 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1133 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1134 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1135 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1136 IEM_MC_ADVANCE_RIP();
1137 IEM_MC_END();
1138 }
1139 else
1140 {
1141 /*
1142 * Memory, register.
1143 */
1144 IEM_MC_BEGIN(0, 2);
1145 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1147
1148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1150 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1151 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1152
1153 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1154 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1155
1156 IEM_MC_ADVANCE_RIP();
1157 IEM_MC_END();
1158 }
1159 return VINF_SUCCESS;
1160}
1161
1162
1163/**
1164 * @opcode 0x11
1165 * @oppfx 0x66
1166 * @opcpuid sse2
1167 * @opgroup og_sse2_pcksclr_datamove
1168 * @opxcpttype 4UA
1169 * @optest op1=1 op2=2 -> op1=2
1170 * @optest op1=0 op2=-42 -> op1=-42
1171 */
1172FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1173{
1174 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1176 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1177 {
1178 /*
1179 * Register, register.
1180 */
1181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1182 IEM_MC_BEGIN(0, 0);
1183 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1184 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1185 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1186 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1187 IEM_MC_ADVANCE_RIP();
1188 IEM_MC_END();
1189 }
1190 else
1191 {
1192 /*
1193 * Memory, register.
1194 */
1195 IEM_MC_BEGIN(0, 2);
1196 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1198
1199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1201 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1202 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1203
1204 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1205 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1206
1207 IEM_MC_ADVANCE_RIP();
1208 IEM_MC_END();
1209 }
1210 return VINF_SUCCESS;
1211}
1212
1213
1214/**
1215 * @opcode 0x11
1216 * @oppfx 0xf3
1217 * @opcpuid sse
1218 * @opgroup og_sse_simdfp_datamove
1219 * @opxcpttype 5
1220 * @optest op1=1 op2=2 -> op1=2
1221 * @optest op1=0 op2=-22 -> op1=-22
1222 */
1223FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1224{
1225 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1227 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1228 {
1229 /*
1230 * Register, register.
1231 */
1232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1233 IEM_MC_BEGIN(0, 1);
1234 IEM_MC_LOCAL(uint32_t, uSrc);
1235
1236 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1237 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1238 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1239 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1240
1241 IEM_MC_ADVANCE_RIP();
1242 IEM_MC_END();
1243 }
1244 else
1245 {
1246 /*
1247 * Memory, register.
1248 */
1249 IEM_MC_BEGIN(0, 2);
1250 IEM_MC_LOCAL(uint32_t, uSrc);
1251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1252
1253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1255 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1256 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1257
1258 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1259 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1260
1261 IEM_MC_ADVANCE_RIP();
1262 IEM_MC_END();
1263 }
1264 return VINF_SUCCESS;
1265}
1266
1267
1268/**
1269 * @opcode 0x11
1270 * @oppfx 0xf2
1271 * @opcpuid sse2
1272 * @opgroup og_sse2_pcksclr_datamove
1273 * @opxcpttype 5
1274 * @optest op1=1 op2=2 -> op1=2
1275 * @optest op1=0 op2=-42 -> op1=-42
1276 */
1277FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1278{
1279 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1280 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1281 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1282 {
1283 /*
1284 * Register, register.
1285 */
1286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1287 IEM_MC_BEGIN(0, 1);
1288 IEM_MC_LOCAL(uint64_t, uSrc);
1289
1290 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1291 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1292 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1293 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1294
1295 IEM_MC_ADVANCE_RIP();
1296 IEM_MC_END();
1297 }
1298 else
1299 {
1300 /*
1301 * Memory, register.
1302 */
1303 IEM_MC_BEGIN(0, 2);
1304 IEM_MC_LOCAL(uint64_t, uSrc);
1305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1306
1307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1309 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1310 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1311
1312 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1313 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1314
1315 IEM_MC_ADVANCE_RIP();
1316 IEM_MC_END();
1317 }
1318 return VINF_SUCCESS;
1319}
1320
1321
1322FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1323{
1324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1325 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1326 {
1327 /**
1328 * @opcode 0x12
1329 * @opcodesub 11 mr/reg
1330 * @oppfx none
1331 * @opcpuid sse
1332 * @opgroup og_sse_simdfp_datamove
1333 * @opxcpttype 5
1334 * @optest op1=1 op2=2 -> op1=2
1335 * @optest op1=0 op2=-42 -> op1=-42
1336 */
1337 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1338
1339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1340 IEM_MC_BEGIN(0, 1);
1341 IEM_MC_LOCAL(uint64_t, uSrc);
1342
1343 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1344 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1345 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1346 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1347
1348 IEM_MC_ADVANCE_RIP();
1349 IEM_MC_END();
1350 }
1351 else
1352 {
1353 /**
1354 * @opdone
1355 * @opcode 0x12
1356 * @opcodesub !11 mr/reg
1357 * @oppfx none
1358 * @opcpuid sse
1359 * @opgroup og_sse_simdfp_datamove
1360 * @opxcpttype 5
1361 * @optest op1=1 op2=2 -> op1=2
1362 * @optest op1=0 op2=-42 -> op1=-42
1363 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1364 */
1365 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1366
1367 IEM_MC_BEGIN(0, 2);
1368 IEM_MC_LOCAL(uint64_t, uSrc);
1369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1370
1371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1373 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1374 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1375
1376 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1377 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1378
1379 IEM_MC_ADVANCE_RIP();
1380 IEM_MC_END();
1381 }
1382 return VINF_SUCCESS;
1383}
1384
1385
1386/**
1387 * @opcode 0x12
1388 * @opcodesub !11 mr/reg
1389 * @oppfx 0x66
1390 * @opcpuid sse2
1391 * @opgroup og_sse2_pcksclr_datamove
1392 * @opxcpttype 5
1393 * @optest op1=1 op2=2 -> op1=2
1394 * @optest op1=0 op2=-42 -> op1=-42
1395 */
1396FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1397{
1398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1399 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1400 {
1401 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1402
1403 IEM_MC_BEGIN(0, 2);
1404 IEM_MC_LOCAL(uint64_t, uSrc);
1405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1406
1407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1409 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1410 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1411
1412 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1413 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1414
1415 IEM_MC_ADVANCE_RIP();
1416 IEM_MC_END();
1417 return VINF_SUCCESS;
1418 }
1419
1420 /**
1421 * @opdone
1422 * @opmnemonic ud660f12m3
1423 * @opcode 0x12
1424 * @opcodesub 11 mr/reg
1425 * @oppfx 0x66
1426 * @opunused immediate
1427 * @opcpuid sse
1428 * @optest ->
1429 */
1430 return IEMOP_RAISE_INVALID_OPCODE();
1431}
1432
1433
1434/**
1435 * @opcode 0x12
1436 * @oppfx 0xf3
1437 * @opcpuid sse3
1438 * @opgroup og_sse3_pcksclr_datamove
1439 * @opxcpttype 4
1440 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1441 * op1=0x00000002000000020000000100000001
1442 */
1443FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1444{
1445 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1447 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1448 {
1449 /*
1450 * Register, register.
1451 */
1452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1453 IEM_MC_BEGIN(2, 0);
1454 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1455 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1456
1457 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1458 IEM_MC_PREPARE_SSE_USAGE();
1459
1460 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1461 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1462 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1463
1464 IEM_MC_ADVANCE_RIP();
1465 IEM_MC_END();
1466 }
1467 else
1468 {
1469 /*
1470 * Register, memory.
1471 */
1472 IEM_MC_BEGIN(2, 2);
1473 IEM_MC_LOCAL(RTUINT128U, uSrc);
1474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1475 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1476 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1477
1478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1480 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1481 IEM_MC_PREPARE_SSE_USAGE();
1482
1483 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1484 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1485 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1486
1487 IEM_MC_ADVANCE_RIP();
1488 IEM_MC_END();
1489 }
1490 return VINF_SUCCESS;
1491}
1492
1493
1494/**
1495 * @opcode 0x12
1496 * @oppfx 0xf2
1497 * @opcpuid sse3
1498 * @opgroup og_sse3_pcksclr_datamove
1499 * @opxcpttype 5
1500 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1501 * op1=0x22222222111111112222222211111111
1502 */
1503FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1504{
1505 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1507 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1508 {
1509 /*
1510 * Register, register.
1511 */
1512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1513 IEM_MC_BEGIN(2, 0);
1514 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1515 IEM_MC_ARG(uint64_t, uSrc, 1);
1516
1517 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1518 IEM_MC_PREPARE_SSE_USAGE();
1519
1520 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1521 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1522 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1523
1524 IEM_MC_ADVANCE_RIP();
1525 IEM_MC_END();
1526 }
1527 else
1528 {
1529 /*
1530 * Register, memory.
1531 */
1532 IEM_MC_BEGIN(2, 2);
1533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1534 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1535 IEM_MC_ARG(uint64_t, uSrc, 1);
1536
1537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1539 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1540 IEM_MC_PREPARE_SSE_USAGE();
1541
1542 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1543 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1544 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1545
1546 IEM_MC_ADVANCE_RIP();
1547 IEM_MC_END();
1548 }
1549 return VINF_SUCCESS;
1550}
1551
1552
1553/**
1554 * @opcode 0x13
1555 * @opcodesub !11 mr/reg
1556 * @oppfx none
1557 * @opcpuid sse
1558 * @opgroup og_sse_simdfp_datamove
1559 * @opxcpttype 5
1560 * @optest op1=1 op2=2 -> op1=2
1561 * @optest op1=0 op2=-42 -> op1=-42
1562 */
1563FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1564{
1565 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1566 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1567 {
1568 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1569
1570 IEM_MC_BEGIN(0, 2);
1571 IEM_MC_LOCAL(uint64_t, uSrc);
1572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1573
1574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1576 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1577 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1578
1579 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1580 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1581
1582 IEM_MC_ADVANCE_RIP();
1583 IEM_MC_END();
1584 return VINF_SUCCESS;
1585 }
1586
1587 /**
1588 * @opdone
1589 * @opmnemonic ud0f13m3
1590 * @opcode 0x13
1591 * @opcodesub 11 mr/reg
1592 * @oppfx none
1593 * @opunused immediate
1594 * @opcpuid sse
1595 * @optest ->
1596 */
1597 return IEMOP_RAISE_INVALID_OPCODE();
1598}
1599
1600
1601/**
1602 * @opcode 0x13
1603 * @opcodesub !11 mr/reg
1604 * @oppfx 0x66
1605 * @opcpuid sse2
1606 * @opgroup og_sse2_pcksclr_datamove
1607 * @opxcpttype 5
1608 * @optest op1=1 op2=2 -> op1=2
1609 * @optest op1=0 op2=-42 -> op1=-42
1610 */
1611FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1612{
1613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1614 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1615 {
1616 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1617 IEM_MC_BEGIN(0, 2);
1618 IEM_MC_LOCAL(uint64_t, uSrc);
1619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1620
1621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1623 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1624 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1625
1626 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1627 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1628
1629 IEM_MC_ADVANCE_RIP();
1630 IEM_MC_END();
1631 return VINF_SUCCESS;
1632 }
1633
1634 /**
1635 * @opdone
1636 * @opmnemonic ud660f13m3
1637 * @opcode 0x13
1638 * @opcodesub 11 mr/reg
1639 * @oppfx 0x66
1640 * @opunused immediate
1641 * @opcpuid sse
1642 * @optest ->
1643 */
1644 return IEMOP_RAISE_INVALID_OPCODE();
1645}
1646
1647
1648/**
1649 * @opmnemonic udf30f13
1650 * @opcode 0x13
1651 * @oppfx 0xf3
1652 * @opunused intel-modrm
1653 * @opcpuid sse
1654 * @optest ->
1655 * @opdone
1656 */
1657
1658/**
1659 * @opmnemonic udf20f13
1660 * @opcode 0x13
1661 * @oppfx 0xf2
1662 * @opunused intel-modrm
1663 * @opcpuid sse
1664 * @optest ->
1665 * @opdone
1666 */
1667
1668/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1669FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1670/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1671FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1672
1673/**
1674 * @opdone
1675 * @opmnemonic udf30f14
1676 * @opcode 0x14
1677 * @oppfx 0xf3
1678 * @opunused intel-modrm
1679 * @opcpuid sse
1680 * @optest ->
1681 * @opdone
1682 */
1683
1684/**
1685 * @opmnemonic udf20f14
1686 * @opcode 0x14
1687 * @oppfx 0xf2
1688 * @opunused intel-modrm
1689 * @opcpuid sse
1690 * @optest ->
1691 * @opdone
1692 */
1693
1694/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1695FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1696/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1697FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1698/* Opcode 0xf3 0x0f 0x15 - invalid */
1699/* Opcode 0xf2 0x0f 0x15 - invalid */
1700
1701/**
1702 * @opdone
1703 * @opmnemonic udf30f15
1704 * @opcode 0x15
1705 * @oppfx 0xf3
1706 * @opunused intel-modrm
1707 * @opcpuid sse
1708 * @optest ->
1709 * @opdone
1710 */
1711
1712/**
1713 * @opmnemonic udf20f15
1714 * @opcode 0x15
1715 * @oppfx 0xf2
1716 * @opunused intel-modrm
1717 * @opcpuid sse
1718 * @optest ->
1719 * @opdone
1720 */
1721
1722FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1723{
1724 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1726 {
1727 /**
1728 * @opcode 0x16
1729 * @opcodesub 11 mr/reg
1730 * @oppfx none
1731 * @opcpuid sse
1732 * @opgroup og_sse_simdfp_datamove
1733 * @opxcpttype 5
1734 * @optest op1=1 op2=2 -> op1=2
1735 * @optest op1=0 op2=-42 -> op1=-42
1736 */
1737 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1738
1739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1740 IEM_MC_BEGIN(0, 1);
1741 IEM_MC_LOCAL(uint64_t, uSrc);
1742
1743 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1744 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1745 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1746 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1747
1748 IEM_MC_ADVANCE_RIP();
1749 IEM_MC_END();
1750 }
1751 else
1752 {
1753 /**
1754 * @opdone
1755 * @opcode 0x16
1756 * @opcodesub !11 mr/reg
1757 * @oppfx none
1758 * @opcpuid sse
1759 * @opgroup og_sse_simdfp_datamove
1760 * @opxcpttype 5
1761 * @optest op1=1 op2=2 -> op1=2
1762 * @optest op1=0 op2=-42 -> op1=-42
1763 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1764 */
1765 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1766
1767 IEM_MC_BEGIN(0, 2);
1768 IEM_MC_LOCAL(uint64_t, uSrc);
1769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1770
1771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1773 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1774 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1775
1776 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1777 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1778
1779 IEM_MC_ADVANCE_RIP();
1780 IEM_MC_END();
1781 }
1782 return VINF_SUCCESS;
1783}
1784
1785
1786/**
1787 * @opcode 0x16
1788 * @opcodesub !11 mr/reg
1789 * @oppfx 0x66
1790 * @opcpuid sse2
1791 * @opgroup og_sse2_pcksclr_datamove
1792 * @opxcpttype 5
1793 * @optest op1=1 op2=2 -> op1=2
1794 * @optest op1=0 op2=-42 -> op1=-42
1795 */
1796FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1797{
1798 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1799 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1800 {
1801 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1802 IEM_MC_BEGIN(0, 2);
1803 IEM_MC_LOCAL(uint64_t, uSrc);
1804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1805
1806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1808 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1809 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1810
1811 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1812 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1813
1814 IEM_MC_ADVANCE_RIP();
1815 IEM_MC_END();
1816 return VINF_SUCCESS;
1817 }
1818
1819 /**
1820 * @opdone
1821 * @opmnemonic ud660f16m3
1822 * @opcode 0x16
1823 * @opcodesub 11 mr/reg
1824 * @oppfx 0x66
1825 * @opunused immediate
1826 * @opcpuid sse
1827 * @optest ->
1828 */
1829 return IEMOP_RAISE_INVALID_OPCODE();
1830}
1831
1832
1833/**
1834 * @opcode 0x16
1835 * @oppfx 0xf3
1836 * @opcpuid sse3
1837 * @opgroup og_sse3_pcksclr_datamove
1838 * @opxcpttype 4
1839 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1840 * op1=0x00000002000000020000000100000001
1841 */
1842FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1843{
1844 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1847 {
1848 /*
1849 * Register, register.
1850 */
1851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1852 IEM_MC_BEGIN(2, 0);
1853 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1854 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1855
1856 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1857 IEM_MC_PREPARE_SSE_USAGE();
1858
1859 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1860 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1861 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1862
1863 IEM_MC_ADVANCE_RIP();
1864 IEM_MC_END();
1865 }
1866 else
1867 {
1868 /*
1869 * Register, memory.
1870 */
1871 IEM_MC_BEGIN(2, 2);
1872 IEM_MC_LOCAL(RTUINT128U, uSrc);
1873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1874 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1875 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1876
1877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1879 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1880 IEM_MC_PREPARE_SSE_USAGE();
1881
1882 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1883 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1884 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1885
1886 IEM_MC_ADVANCE_RIP();
1887 IEM_MC_END();
1888 }
1889 return VINF_SUCCESS;
1890}
1891
1892/**
1893 * @opdone
1894 * @opmnemonic udf30f16
1895 * @opcode 0x16
1896 * @oppfx 0xf2
1897 * @opunused intel-modrm
1898 * @opcpuid sse
1899 * @optest ->
1900 * @opdone
1901 */
1902
1903
1904/**
1905 * @opcode 0x17
1906 * @opcodesub !11 mr/reg
1907 * @oppfx none
1908 * @opcpuid sse
1909 * @opgroup og_sse_simdfp_datamove
1910 * @opxcpttype 5
1911 * @optest op1=1 op2=2 -> op1=2
1912 * @optest op1=0 op2=-42 -> op1=-42
1913 */
1914FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
1915{
1916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1917 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1918 {
1919 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1920
1921 IEM_MC_BEGIN(0, 2);
1922 IEM_MC_LOCAL(uint64_t, uSrc);
1923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1924
1925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1927 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1928 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1929
1930 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1931 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1932
1933 IEM_MC_ADVANCE_RIP();
1934 IEM_MC_END();
1935 return VINF_SUCCESS;
1936 }
1937
1938 /**
1939 * @opdone
1940 * @opmnemonic ud0f17m3
1941 * @opcode 0x17
1942 * @opcodesub 11 mr/reg
1943 * @oppfx none
1944 * @opunused immediate
1945 * @opcpuid sse
1946 * @optest ->
1947 */
1948 return IEMOP_RAISE_INVALID_OPCODE();
1949}
1950
1951
1952/**
1953 * @opcode 0x17
1954 * @opcodesub !11 mr/reg
1955 * @oppfx 0x66
1956 * @opcpuid sse2
1957 * @opgroup og_sse2_pcksclr_datamove
1958 * @opxcpttype 5
1959 * @optest op1=1 op2=2 -> op1=2
1960 * @optest op1=0 op2=-42 -> op1=-42
1961 */
1962FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
1963{
1964 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1965 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1966 {
1967 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1968
1969 IEM_MC_BEGIN(0, 2);
1970 IEM_MC_LOCAL(uint64_t, uSrc);
1971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1972
1973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1975 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1976 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1977
1978 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1979 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1980
1981 IEM_MC_ADVANCE_RIP();
1982 IEM_MC_END();
1983 return VINF_SUCCESS;
1984 }
1985
1986 /**
1987 * @opdone
1988 * @opmnemonic ud660f17m3
1989 * @opcode 0x17
1990 * @opcodesub 11 mr/reg
1991 * @oppfx 0x66
1992 * @opunused immediate
1993 * @opcpuid sse
1994 * @optest ->
1995 */
1996 return IEMOP_RAISE_INVALID_OPCODE();
1997}
1998
1999
2000/**
2001 * @opdone
2002 * @opmnemonic udf30f17
2003 * @opcode 0x17
2004 * @oppfx 0xf3
2005 * @opunused intel-modrm
2006 * @opcpuid sse
2007 * @optest ->
2008 * @opdone
2009 */
2010
2011/**
2012 * @opmnemonic udf20f17
2013 * @opcode 0x17
2014 * @oppfx 0xf2
2015 * @opunused intel-modrm
2016 * @opcpuid sse
2017 * @optest ->
2018 * @opdone
2019 */
2020
2021
2022/** Opcode 0x0f 0x18. */
2023FNIEMOP_DEF(iemOp_prefetch_Grp16)
2024{
2025 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2026 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2027 {
2028 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2029 {
2030 case 4: /* Aliased to /0 for the time being according to AMD. */
2031 case 5: /* Aliased to /0 for the time being according to AMD. */
2032 case 6: /* Aliased to /0 for the time being according to AMD. */
2033 case 7: /* Aliased to /0 for the time being according to AMD. */
2034 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2035 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2036 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2037 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2038 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2039 }
2040
2041 IEM_MC_BEGIN(0, 1);
2042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2045 /* Currently a NOP. */
2046 NOREF(GCPtrEffSrc);
2047 IEM_MC_ADVANCE_RIP();
2048 IEM_MC_END();
2049 return VINF_SUCCESS;
2050 }
2051
2052 return IEMOP_RAISE_INVALID_OPCODE();
2053}
2054
2055
2056/** Opcode 0x0f 0x19..0x1f. */
2057FNIEMOP_DEF(iemOp_nop_Ev)
2058{
2059 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2061 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2062 {
2063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2064 IEM_MC_BEGIN(0, 0);
2065 IEM_MC_ADVANCE_RIP();
2066 IEM_MC_END();
2067 }
2068 else
2069 {
2070 IEM_MC_BEGIN(0, 1);
2071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2074 /* Currently a NOP. */
2075 NOREF(GCPtrEffSrc);
2076 IEM_MC_ADVANCE_RIP();
2077 IEM_MC_END();
2078 }
2079 return VINF_SUCCESS;
2080}
2081
2082
2083/** Opcode 0x0f 0x20. */
2084FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2085{
2086 /* mod is ignored, as is operand size overrides. */
2087 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2088 IEMOP_HLP_MIN_386();
2089 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2090 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2091 else
2092 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2093
2094 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2095 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2096 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2097 {
2098 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2099 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2100 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2101 iCrReg |= 8;
2102 }
2103 switch (iCrReg)
2104 {
2105 case 0: case 2: case 3: case 4: case 8:
2106 break;
2107 default:
2108 return IEMOP_RAISE_INVALID_OPCODE();
2109 }
2110 IEMOP_HLP_DONE_DECODING();
2111
2112 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2113}
2114
2115
2116/** Opcode 0x0f 0x21. */
2117FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2118{
2119 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2120 IEMOP_HLP_MIN_386();
2121 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2123 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2124 return IEMOP_RAISE_INVALID_OPCODE();
2125 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2126 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2127 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2128}
2129
2130
2131/** Opcode 0x0f 0x22. */
2132FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2133{
2134 /* mod is ignored, as is operand size overrides. */
2135 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2136 IEMOP_HLP_MIN_386();
2137 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2138 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2139 else
2140 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2141
2142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2143 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2144 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2145 {
2146 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2147 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2148 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2149 iCrReg |= 8;
2150 }
2151 switch (iCrReg)
2152 {
2153 case 0: case 2: case 3: case 4: case 8:
2154 break;
2155 default:
2156 return IEMOP_RAISE_INVALID_OPCODE();
2157 }
2158 IEMOP_HLP_DONE_DECODING();
2159
2160 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2161}
2162
2163
2164/** Opcode 0x0f 0x23. */
2165FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2166{
2167 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2168 IEMOP_HLP_MIN_386();
2169 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2171 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2172 return IEMOP_RAISE_INVALID_OPCODE();
2173 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2174 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2175 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2176}
2177
2178
2179/** Opcode 0x0f 0x24. */
2180FNIEMOP_DEF(iemOp_mov_Rd_Td)
2181{
2182 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2183 /** @todo works on 386 and 486. */
2184 /* The RM byte is not considered, see testcase. */
2185 return IEMOP_RAISE_INVALID_OPCODE();
2186}
2187
2188
2189/** Opcode 0x0f 0x26. */
2190FNIEMOP_DEF(iemOp_mov_Td_Rd)
2191{
2192 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2193 /** @todo works on 386 and 486. */
2194 /* The RM byte is not considered, see testcase. */
2195 return IEMOP_RAISE_INVALID_OPCODE();
2196}
2197
2198
2199/**
2200 * @opcode 0x28
2201 * @oppfx none
2202 * @opcpuid sse
2203 * @opgroup og_sse_simdfp_datamove
2204 * @opxcpttype 1
2205 * @optest op1=1 op2=2 -> op1=2
2206 * @optest op1=0 op2=-42 -> op1=-42
2207 */
2208FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2209{
2210 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2212 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2213 {
2214 /*
2215 * Register, register.
2216 */
2217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2218 IEM_MC_BEGIN(0, 0);
2219 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2220 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2221 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2222 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2223 IEM_MC_ADVANCE_RIP();
2224 IEM_MC_END();
2225 }
2226 else
2227 {
2228 /*
2229 * Register, memory.
2230 */
2231 IEM_MC_BEGIN(0, 2);
2232 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2234
2235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2237 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2238 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2239
2240 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2241 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2242
2243 IEM_MC_ADVANCE_RIP();
2244 IEM_MC_END();
2245 }
2246 return VINF_SUCCESS;
2247}
2248
2249/**
2250 * @opcode 0x28
2251 * @oppfx 66
2252 * @opcpuid sse2
2253 * @opgroup og_sse2_pcksclr_datamove
2254 * @opxcpttype 1
2255 * @optest op1=1 op2=2 -> op1=2
2256 * @optest op1=0 op2=-42 -> op1=-42
2257 */
2258FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2259{
2260 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2262 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2263 {
2264 /*
2265 * Register, register.
2266 */
2267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2268 IEM_MC_BEGIN(0, 0);
2269 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2270 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2271 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2272 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2273 IEM_MC_ADVANCE_RIP();
2274 IEM_MC_END();
2275 }
2276 else
2277 {
2278 /*
2279 * Register, memory.
2280 */
2281 IEM_MC_BEGIN(0, 2);
2282 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2284
2285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2287 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2288 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2289
2290 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2291 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2292
2293 IEM_MC_ADVANCE_RIP();
2294 IEM_MC_END();
2295 }
2296 return VINF_SUCCESS;
2297}
2298
2299/* Opcode 0xf3 0x0f 0x28 - invalid */
2300/* Opcode 0xf2 0x0f 0x28 - invalid */
2301
2302/**
2303 * @opcode 0x29
2304 * @oppfx none
2305 * @opcpuid sse
2306 * @opgroup og_sse_simdfp_datamove
2307 * @opxcpttype 1
2308 * @optest op1=1 op2=2 -> op1=2
2309 * @optest op1=0 op2=-42 -> op1=-42
2310 */
2311FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2312{
2313 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2316 {
2317 /*
2318 * Register, register.
2319 */
2320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2321 IEM_MC_BEGIN(0, 0);
2322 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2323 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2324 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2325 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2326 IEM_MC_ADVANCE_RIP();
2327 IEM_MC_END();
2328 }
2329 else
2330 {
2331 /*
2332 * Memory, register.
2333 */
2334 IEM_MC_BEGIN(0, 2);
2335 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2337
2338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2340 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2341 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2342
2343 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2344 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2345
2346 IEM_MC_ADVANCE_RIP();
2347 IEM_MC_END();
2348 }
2349 return VINF_SUCCESS;
2350}
2351
2352/**
2353 * @opcode 0x29
2354 * @oppfx 66
2355 * @opcpuid sse2
2356 * @opgroup og_sse2_pcksclr_datamove
2357 * @opxcpttype 1
2358 * @optest op1=1 op2=2 -> op1=2
2359 * @optest op1=0 op2=-42 -> op1=-42
2360 */
2361FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2362{
2363 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2365 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2366 {
2367 /*
2368 * Register, register.
2369 */
2370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2371 IEM_MC_BEGIN(0, 0);
2372 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2373 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2374 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2375 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2376 IEM_MC_ADVANCE_RIP();
2377 IEM_MC_END();
2378 }
2379 else
2380 {
2381 /*
2382 * Memory, register.
2383 */
2384 IEM_MC_BEGIN(0, 2);
2385 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2387
2388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2390 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2391 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2392
2393 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2394 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2395
2396 IEM_MC_ADVANCE_RIP();
2397 IEM_MC_END();
2398 }
2399 return VINF_SUCCESS;
2400}
2401
2402/* Opcode 0xf3 0x0f 0x29 - invalid */
2403/* Opcode 0xf2 0x0f 0x29 - invalid */
2404
2405
2406/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2407FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2408/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2409FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2410/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2411FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2412/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2413FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2414
2415
2416/**
2417 * @opcode 0x2b
2418 * @opcodesub !11 mr/reg
2419 * @oppfx none
2420 * @opcpuid sse
2421 * @opgroup og_sse1_cachect
2422 * @opxcpttype 1
2423 * @optest op1=1 op2=2 -> op1=2
2424 * @optest op1=0 op2=-42 -> op1=-42
2425 */
2426FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2427{
2428 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2429 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2430 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2431 {
2432 /*
2433 * memory, register.
2434 */
2435 IEM_MC_BEGIN(0, 2);
2436 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2438
2439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2441 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2442 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2443
2444 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2445 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2446
2447 IEM_MC_ADVANCE_RIP();
2448 IEM_MC_END();
2449 }
2450 /* The register, register encoding is invalid. */
2451 else
2452 return IEMOP_RAISE_INVALID_OPCODE();
2453 return VINF_SUCCESS;
2454}
2455
2456/**
2457 * @opcode 0x2b
2458 * @opcodesub !11 mr/reg
2459 * @oppfx 0x66
2460 * @opcpuid sse2
2461 * @opgroup og_sse2_cachect
2462 * @opxcpttype 1
2463 * @optest op1=1 op2=2 -> op1=2
2464 * @optest op1=0 op2=-42 -> op1=-42
2465 */
2466FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2467{
2468 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2470 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2471 {
2472 /*
2473 * memory, register.
2474 */
2475 IEM_MC_BEGIN(0, 2);
2476 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2478
2479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2481 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2483
2484 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2485 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2486
2487 IEM_MC_ADVANCE_RIP();
2488 IEM_MC_END();
2489 }
2490 /* The register, register encoding is invalid. */
2491 else
2492 return IEMOP_RAISE_INVALID_OPCODE();
2493 return VINF_SUCCESS;
2494}
2495/* Opcode 0xf3 0x0f 0x2b - invalid */
2496/* Opcode 0xf2 0x0f 0x2b - invalid */
2497
2498
2499/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2500FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2501/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2502FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2503/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2504FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2505/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2506FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2507
2508/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2509FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2510/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2511FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2512/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2513FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2514/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2515FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2516
2517/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2518FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2519/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2520FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2521/* Opcode 0xf3 0x0f 0x2e - invalid */
2522/* Opcode 0xf2 0x0f 0x2e - invalid */
2523
2524/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2525FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2526/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2527FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2528/* Opcode 0xf3 0x0f 0x2f - invalid */
2529/* Opcode 0xf2 0x0f 0x2f - invalid */
2530
2531/** Opcode 0x0f 0x30. */
2532FNIEMOP_DEF(iemOp_wrmsr)
2533{
2534 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2536 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2537}
2538
2539
2540/** Opcode 0x0f 0x31. */
2541FNIEMOP_DEF(iemOp_rdtsc)
2542{
2543 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2545 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2546}
2547
2548
2549/** Opcode 0x0f 0x33. */
2550FNIEMOP_DEF(iemOp_rdmsr)
2551{
2552 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2554 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2555}
2556
2557
2558/** Opcode 0x0f 0x34. */
2559FNIEMOP_DEF(iemOp_rdpmc)
2560{
2561 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2563 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2564}
2565
2566
2567/** Opcode 0x0f 0x34. */
2568FNIEMOP_STUB(iemOp_sysenter);
2569/** Opcode 0x0f 0x35. */
2570FNIEMOP_STUB(iemOp_sysexit);
2571/** Opcode 0x0f 0x37. */
2572FNIEMOP_STUB(iemOp_getsec);
2573
2574
2575/** Opcode 0x0f 0x38. */
2576FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2577{
2578#ifdef IEM_WITH_THREE_0F_38
2579 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2580 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2581#else
2582 IEMOP_BITCH_ABOUT_STUB();
2583 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2584#endif
2585}
2586
2587
2588/** Opcode 0x0f 0x3a. */
2589FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2590{
2591#ifdef IEM_WITH_THREE_0F_3A
2592 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2593 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2594#else
2595 IEMOP_BITCH_ABOUT_STUB();
2596 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2597#endif
2598}
2599
2600
2601/**
2602 * Implements a conditional move.
2603 *
2604 * Wish there was an obvious way to do this where we could share and reduce
2605 * code bloat.
2606 *
2607 * @param a_Cnd The conditional "microcode" operation.
2608 */
2609#define CMOV_X(a_Cnd) \
2610 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2611 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2612 { \
2613 switch (pVCpu->iem.s.enmEffOpSize) \
2614 { \
2615 case IEMMODE_16BIT: \
2616 IEM_MC_BEGIN(0, 1); \
2617 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2618 a_Cnd { \
2619 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2620 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2621 } IEM_MC_ENDIF(); \
2622 IEM_MC_ADVANCE_RIP(); \
2623 IEM_MC_END(); \
2624 return VINF_SUCCESS; \
2625 \
2626 case IEMMODE_32BIT: \
2627 IEM_MC_BEGIN(0, 1); \
2628 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2629 a_Cnd { \
2630 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2631 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2632 } IEM_MC_ELSE() { \
2633 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2634 } IEM_MC_ENDIF(); \
2635 IEM_MC_ADVANCE_RIP(); \
2636 IEM_MC_END(); \
2637 return VINF_SUCCESS; \
2638 \
2639 case IEMMODE_64BIT: \
2640 IEM_MC_BEGIN(0, 1); \
2641 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2642 a_Cnd { \
2643 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2644 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2645 } IEM_MC_ENDIF(); \
2646 IEM_MC_ADVANCE_RIP(); \
2647 IEM_MC_END(); \
2648 return VINF_SUCCESS; \
2649 \
2650 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2651 } \
2652 } \
2653 else \
2654 { \
2655 switch (pVCpu->iem.s.enmEffOpSize) \
2656 { \
2657 case IEMMODE_16BIT: \
2658 IEM_MC_BEGIN(0, 2); \
2659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2660 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2662 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2663 a_Cnd { \
2664 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2665 } IEM_MC_ENDIF(); \
2666 IEM_MC_ADVANCE_RIP(); \
2667 IEM_MC_END(); \
2668 return VINF_SUCCESS; \
2669 \
2670 case IEMMODE_32BIT: \
2671 IEM_MC_BEGIN(0, 2); \
2672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2673 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2675 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2676 a_Cnd { \
2677 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2678 } IEM_MC_ELSE() { \
2679 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2680 } IEM_MC_ENDIF(); \
2681 IEM_MC_ADVANCE_RIP(); \
2682 IEM_MC_END(); \
2683 return VINF_SUCCESS; \
2684 \
2685 case IEMMODE_64BIT: \
2686 IEM_MC_BEGIN(0, 2); \
2687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2688 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2690 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2691 a_Cnd { \
2692 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2693 } IEM_MC_ENDIF(); \
2694 IEM_MC_ADVANCE_RIP(); \
2695 IEM_MC_END(); \
2696 return VINF_SUCCESS; \
2697 \
2698 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2699 } \
2700 } do {} while (0)
2701
2702
2703
2704/** Opcode 0x0f 0x40. */
2705FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2706{
2707 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2708 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2709}
2710
2711
2712/** Opcode 0x0f 0x41. */
2713FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2714{
2715 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2716 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2717}
2718
2719
2720/** Opcode 0x0f 0x42. */
2721FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2722{
2723 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2724 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2725}
2726
2727
2728/** Opcode 0x0f 0x43. */
2729FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2730{
2731 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2732 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2733}
2734
2735
2736/** Opcode 0x0f 0x44. */
2737FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2738{
2739 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2740 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2741}
2742
2743
2744/** Opcode 0x0f 0x45. */
2745FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2746{
2747 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2748 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2749}
2750
2751
2752/** Opcode 0x0f 0x46. */
2753FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2754{
2755 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2756 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2757}
2758
2759
2760/** Opcode 0x0f 0x47. */
2761FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2762{
2763 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2764 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2765}
2766
2767
2768/** Opcode 0x0f 0x48. */
2769FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2770{
2771 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2772 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2773}
2774
2775
2776/** Opcode 0x0f 0x49. */
2777FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2778{
2779 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2780 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2781}
2782
2783
2784/** Opcode 0x0f 0x4a. */
2785FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2786{
2787 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2788 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2789}
2790
2791
2792/** Opcode 0x0f 0x4b. */
2793FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2794{
2795 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2796 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2797}
2798
2799
2800/** Opcode 0x0f 0x4c. */
2801FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2802{
2803 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2804 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2805}
2806
2807
2808/** Opcode 0x0f 0x4d. */
2809FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2810{
2811 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2812 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2813}
2814
2815
2816/** Opcode 0x0f 0x4e. */
2817FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2818{
2819 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2820 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2821}
2822
2823
2824/** Opcode 0x0f 0x4f. */
2825FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2826{
2827 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2828 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2829}
2830
2831#undef CMOV_X
2832
2833/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2834FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2835/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2836FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2837/* Opcode 0xf3 0x0f 0x50 - invalid */
2838/* Opcode 0xf2 0x0f 0x50 - invalid */
2839
2840/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2841FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2842/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2843FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2844/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2845FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2846/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2847FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2848
2849/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2850FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2851/* Opcode 0x66 0x0f 0x52 - invalid */
2852/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2853FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2854/* Opcode 0xf2 0x0f 0x52 - invalid */
2855
2856/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2857FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2858/* Opcode 0x66 0x0f 0x53 - invalid */
2859/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2860FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2861/* Opcode 0xf2 0x0f 0x53 - invalid */
2862
2863/** Opcode 0x0f 0x54 - andps Vps, Wps */
2864FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2865/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2866FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2867/* Opcode 0xf3 0x0f 0x54 - invalid */
2868/* Opcode 0xf2 0x0f 0x54 - invalid */
2869
2870/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2871FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2872/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2873FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2874/* Opcode 0xf3 0x0f 0x55 - invalid */
2875/* Opcode 0xf2 0x0f 0x55 - invalid */
2876
2877/** Opcode 0x0f 0x56 - orps Vps, Wps */
2878FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2879/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2880FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2881/* Opcode 0xf3 0x0f 0x56 - invalid */
2882/* Opcode 0xf2 0x0f 0x56 - invalid */
2883
2884/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2885FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2886/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2887FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2888/* Opcode 0xf3 0x0f 0x57 - invalid */
2889/* Opcode 0xf2 0x0f 0x57 - invalid */
2890
2891/** Opcode 0x0f 0x58 - addps Vps, Wps */
2892FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2893/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2894FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2895/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2896FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2897/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2898FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2899
2900/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2901FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2902/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2903FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2904/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2905FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2906/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2907FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2908
2909/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2910FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2911/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2912FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2913/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2914FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2915/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2916FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2917
2918/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2919FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2920/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2921FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2922/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2923FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2924/* Opcode 0xf2 0x0f 0x5b - invalid */
2925
2926/** Opcode 0x0f 0x5c - subps Vps, Wps */
2927FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2928/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2929FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2930/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2931FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2932/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2933FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2934
2935/** Opcode 0x0f 0x5d - minps Vps, Wps */
2936FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2937/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2938FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2939/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2940FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2941/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2942FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2943
2944/** Opcode 0x0f 0x5e - divps Vps, Wps */
2945FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2946/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2947FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2948/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2949FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2950/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2951FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2952
2953/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2954FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2955/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2956FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2957/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2958FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2959/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2960FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2961
2962/**
2963 * Common worker for MMX instructions on the forms:
2964 * pxxxx mm1, mm2/mem32
2965 *
2966 * The 2nd operand is the first half of a register, which in the memory case
2967 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2968 * memory accessed for MMX.
2969 *
2970 * Exceptions type 4.
2971 */
2972FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2973{
2974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2975 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2976 {
2977 /*
2978 * Register, register.
2979 */
2980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2981 IEM_MC_BEGIN(2, 0);
2982 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2983 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2984 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2985 IEM_MC_PREPARE_SSE_USAGE();
2986 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2987 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2988 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2989 IEM_MC_ADVANCE_RIP();
2990 IEM_MC_END();
2991 }
2992 else
2993 {
2994 /*
2995 * Register, memory.
2996 */
2997 IEM_MC_BEGIN(2, 2);
2998 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2999 IEM_MC_LOCAL(uint64_t, uSrc);
3000 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3002
3003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3005 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3006 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3007
3008 IEM_MC_PREPARE_SSE_USAGE();
3009 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3010 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3011
3012 IEM_MC_ADVANCE_RIP();
3013 IEM_MC_END();
3014 }
3015 return VINF_SUCCESS;
3016}
3017
3018
3019/**
3020 * Common worker for SSE2 instructions on the forms:
3021 * pxxxx xmm1, xmm2/mem128
3022 *
3023 * The 2nd operand is the first half of a register, which in the memory case
3024 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3025 * memory accessed for MMX.
3026 *
3027 * Exceptions type 4.
3028 */
3029FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3030{
3031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3032 if (!pImpl->pfnU64)
3033 return IEMOP_RAISE_INVALID_OPCODE();
3034 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3035 {
3036 /*
3037 * Register, register.
3038 */
3039 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3040 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3042 IEM_MC_BEGIN(2, 0);
3043 IEM_MC_ARG(uint64_t *, pDst, 0);
3044 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3045 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3046 IEM_MC_PREPARE_FPU_USAGE();
3047 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3048 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3049 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3050 IEM_MC_ADVANCE_RIP();
3051 IEM_MC_END();
3052 }
3053 else
3054 {
3055 /*
3056 * Register, memory.
3057 */
3058 IEM_MC_BEGIN(2, 2);
3059 IEM_MC_ARG(uint64_t *, pDst, 0);
3060 IEM_MC_LOCAL(uint32_t, uSrc);
3061 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3063
3064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3066 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3067 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3068
3069 IEM_MC_PREPARE_FPU_USAGE();
3070 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3071 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3072
3073 IEM_MC_ADVANCE_RIP();
3074 IEM_MC_END();
3075 }
3076 return VINF_SUCCESS;
3077}
3078
3079
3080/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3081FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3082{
3083 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3084 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3085}
3086
3087/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3088FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3089{
3090 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3091 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3092}
3093
3094/* Opcode 0xf3 0x0f 0x60 - invalid */
3095
3096
3097/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3098FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3099{
3100 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3101 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3102}
3103
3104/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3105FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3106{
3107 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3108 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3109}
3110
3111/* Opcode 0xf3 0x0f 0x61 - invalid */
3112
3113
3114/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3115FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3116{
3117 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3118 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3119}
3120
3121/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3122FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3123{
3124 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3125 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3126}
3127
3128/* Opcode 0xf3 0x0f 0x62 - invalid */
3129
3130
3131
3132/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3133FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3134/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3135FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3136/* Opcode 0xf3 0x0f 0x63 - invalid */
3137
3138/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3139FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3140/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3141FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3142/* Opcode 0xf3 0x0f 0x64 - invalid */
3143
3144/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3145FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3146/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3147FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3148/* Opcode 0xf3 0x0f 0x65 - invalid */
3149
3150/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3151FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3152/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3153FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3154/* Opcode 0xf3 0x0f 0x66 - invalid */
3155
3156/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3157FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3158/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3159FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3160/* Opcode 0xf3 0x0f 0x67 - invalid */
3161
3162
3163/**
3164 * Common worker for MMX instructions on the form:
3165 * pxxxx mm1, mm2/mem64
3166 *
3167 * The 2nd operand is the second half of a register, which in the memory case
3168 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3169 * where it may read the full 128 bits or only the upper 64 bits.
3170 *
3171 * Exceptions type 4.
3172 */
3173FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3174{
3175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3176 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3177 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3178 {
3179 /*
3180 * Register, register.
3181 */
3182 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3183 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3185 IEM_MC_BEGIN(2, 0);
3186 IEM_MC_ARG(uint64_t *, pDst, 0);
3187 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3188 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3189 IEM_MC_PREPARE_FPU_USAGE();
3190 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3191 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3192 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3193 IEM_MC_ADVANCE_RIP();
3194 IEM_MC_END();
3195 }
3196 else
3197 {
3198 /*
3199 * Register, memory.
3200 */
3201 IEM_MC_BEGIN(2, 2);
3202 IEM_MC_ARG(uint64_t *, pDst, 0);
3203 IEM_MC_LOCAL(uint64_t, uSrc);
3204 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3206
3207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3209 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3210 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3211
3212 IEM_MC_PREPARE_FPU_USAGE();
3213 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3214 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3215
3216 IEM_MC_ADVANCE_RIP();
3217 IEM_MC_END();
3218 }
3219 return VINF_SUCCESS;
3220}
3221
3222
3223/**
3224 * Common worker for SSE2 instructions on the form:
3225 * pxxxx xmm1, xmm2/mem128
3226 *
3227 * The 2nd operand is the second half of a register, which in the memory case
3228 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3229 * where it may read the full 128 bits or only the upper 64 bits.
3230 *
3231 * Exceptions type 4.
3232 */
3233FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3234{
3235 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3236 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3237 {
3238 /*
3239 * Register, register.
3240 */
3241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3242 IEM_MC_BEGIN(2, 0);
3243 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3244 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3245 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3246 IEM_MC_PREPARE_SSE_USAGE();
3247 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3248 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3249 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3250 IEM_MC_ADVANCE_RIP();
3251 IEM_MC_END();
3252 }
3253 else
3254 {
3255 /*
3256 * Register, memory.
3257 */
3258 IEM_MC_BEGIN(2, 2);
3259 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3260 IEM_MC_LOCAL(RTUINT128U, uSrc);
3261 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3263
3264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3266 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3267 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3268
3269 IEM_MC_PREPARE_SSE_USAGE();
3270 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3271 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3272
3273 IEM_MC_ADVANCE_RIP();
3274 IEM_MC_END();
3275 }
3276 return VINF_SUCCESS;
3277}
3278
3279
3280/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3281FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3282{
3283 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3284 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3285}
3286
3287/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3288FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3289{
3290 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3291 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3292}
3293/* Opcode 0xf3 0x0f 0x68 - invalid */
3294
3295
3296/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3297FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3298{
3299 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3300 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3301}
3302
3303/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3304FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3305{
3306 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3307 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3308
3309}
3310/* Opcode 0xf3 0x0f 0x69 - invalid */
3311
3312
3313/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3314FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3315{
3316 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3317 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3318}
3319
3320/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3321FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3322{
3323 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3324 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3325}
3326/* Opcode 0xf3 0x0f 0x6a - invalid */
3327
3328
3329/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3330FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3331/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3332FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3333/* Opcode 0xf3 0x0f 0x6b - invalid */
3334
3335
3336/* Opcode 0x0f 0x6c - invalid */
3337
3338/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3339FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3340{
3341 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3342 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3343}
3344
3345/* Opcode 0xf3 0x0f 0x6c - invalid */
3346/* Opcode 0xf2 0x0f 0x6c - invalid */
3347
3348
3349/* Opcode 0x0f 0x6d - invalid */
3350
3351/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3352FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3353{
3354 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3355 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3356}
3357
3358/* Opcode 0xf3 0x0f 0x6d - invalid */
3359
3360
3361FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3362{
3363 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3364 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3365 {
3366 /**
3367 * @opcode 0x6e
3368 * @opcodesub rex.w=1
3369 * @oppfx none
3370 * @opcpuid mmx
3371 * @opgroup og_mmx_datamove
3372 * @opxcpttype 5
3373 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3374 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3375 */
3376 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3377 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3378 {
3379 /* MMX, greg64 */
3380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3381 IEM_MC_BEGIN(0, 1);
3382 IEM_MC_LOCAL(uint64_t, u64Tmp);
3383
3384 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3385 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3386
3387 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3388 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3389 IEM_MC_FPU_TO_MMX_MODE();
3390
3391 IEM_MC_ADVANCE_RIP();
3392 IEM_MC_END();
3393 }
3394 else
3395 {
3396 /* MMX, [mem64] */
3397 IEM_MC_BEGIN(0, 2);
3398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3399 IEM_MC_LOCAL(uint64_t, u64Tmp);
3400
3401 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3403 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3404 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3405
3406 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3407 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3408 IEM_MC_FPU_TO_MMX_MODE();
3409
3410 IEM_MC_ADVANCE_RIP();
3411 IEM_MC_END();
3412 }
3413 }
3414 else
3415 {
3416 /**
3417 * @opdone
3418 * @opcode 0x6e
3419 * @opcodesub rex.w=0
3420 * @oppfx none
3421 * @opcpuid mmx
3422 * @opgroup og_mmx_datamove
3423 * @opxcpttype 5
3424 * @opfunction iemOp_movd_q_Pd_Ey
3425 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3426 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3427 */
3428 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3429 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3430 {
3431 /* MMX, greg */
3432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3433 IEM_MC_BEGIN(0, 1);
3434 IEM_MC_LOCAL(uint64_t, u64Tmp);
3435
3436 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3437 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3438
3439 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3440 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3441 IEM_MC_FPU_TO_MMX_MODE();
3442
3443 IEM_MC_ADVANCE_RIP();
3444 IEM_MC_END();
3445 }
3446 else
3447 {
3448 /* MMX, [mem] */
3449 IEM_MC_BEGIN(0, 2);
3450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3451 IEM_MC_LOCAL(uint32_t, u32Tmp);
3452
3453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3455 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3456 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3457
3458 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3459 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3460 IEM_MC_FPU_TO_MMX_MODE();
3461
3462 IEM_MC_ADVANCE_RIP();
3463 IEM_MC_END();
3464 }
3465 }
3466 return VINF_SUCCESS;
3467}
3468
3469FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3470{
3471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3472 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3473 {
3474 /**
3475 * @opcode 0x6e
3476 * @opcodesub rex.w=1
3477 * @oppfx 0x66
3478 * @opcpuid sse2
3479 * @opgroup og_sse2_simdint_datamove
3480 * @opxcpttype 5
3481 * @optest 64-bit / op1=1 op2=2 -> op1=2
3482 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3483 */
3484 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3485 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3486 {
3487 /* XMM, greg64 */
3488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3489 IEM_MC_BEGIN(0, 1);
3490 IEM_MC_LOCAL(uint64_t, u64Tmp);
3491
3492 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3493 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3494
3495 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3496 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3497
3498 IEM_MC_ADVANCE_RIP();
3499 IEM_MC_END();
3500 }
3501 else
3502 {
3503 /* XMM, [mem64] */
3504 IEM_MC_BEGIN(0, 2);
3505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3506 IEM_MC_LOCAL(uint64_t, u64Tmp);
3507
3508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3510 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3511 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3512
3513 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3514 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3515
3516 IEM_MC_ADVANCE_RIP();
3517 IEM_MC_END();
3518 }
3519 }
3520 else
3521 {
3522 /**
3523 * @opdone
3524 * @opcode 0x6e
3525 * @opcodesub rex.w=0
3526 * @oppfx 0x66
3527 * @opcpuid sse2
3528 * @opgroup og_sse2_simdint_datamove
3529 * @opxcpttype 5
3530 * @opfunction iemOp_movd_q_Vy_Ey
3531 * @optest op1=1 op2=2 -> op1=2
3532 * @optest op1=0 op2=-42 -> op1=-42
3533 */
3534 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3535 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3536 {
3537 /* XMM, greg32 */
3538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3539 IEM_MC_BEGIN(0, 1);
3540 IEM_MC_LOCAL(uint32_t, u32Tmp);
3541
3542 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3543 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3544
3545 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3546 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3547
3548 IEM_MC_ADVANCE_RIP();
3549 IEM_MC_END();
3550 }
3551 else
3552 {
3553 /* XMM, [mem32] */
3554 IEM_MC_BEGIN(0, 2);
3555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3556 IEM_MC_LOCAL(uint32_t, u32Tmp);
3557
3558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3560 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3561 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3562
3563 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3564 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3565
3566 IEM_MC_ADVANCE_RIP();
3567 IEM_MC_END();
3568 }
3569 }
3570 return VINF_SUCCESS;
3571}
3572
3573/* Opcode 0xf3 0x0f 0x6e - invalid */
3574
3575
3576/**
3577 * @opcode 0x6f
3578 * @oppfx none
3579 * @opcpuid mmx
3580 * @opgroup og_mmx_datamove
3581 * @opxcpttype 5
3582 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3583 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3584 */
3585FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3586{
3587 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3589 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3590 {
3591 /*
3592 * Register, register.
3593 */
3594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3595 IEM_MC_BEGIN(0, 1);
3596 IEM_MC_LOCAL(uint64_t, u64Tmp);
3597
3598 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3599 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3600
3601 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3602 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3603 IEM_MC_FPU_TO_MMX_MODE();
3604
3605 IEM_MC_ADVANCE_RIP();
3606 IEM_MC_END();
3607 }
3608 else
3609 {
3610 /*
3611 * Register, memory.
3612 */
3613 IEM_MC_BEGIN(0, 2);
3614 IEM_MC_LOCAL(uint64_t, u64Tmp);
3615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3616
3617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3619 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3620 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3621
3622 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3623 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3624 IEM_MC_FPU_TO_MMX_MODE();
3625
3626 IEM_MC_ADVANCE_RIP();
3627 IEM_MC_END();
3628 }
3629 return VINF_SUCCESS;
3630}
3631
3632/**
3633 * @opcode 0x6f
3634 * @oppfx 0x66
3635 * @opcpuid sse2
3636 * @opgroup og_sse2_simdint_datamove
3637 * @opxcpttype 1
3638 * @optest op1=1 op2=2 -> op1=2
3639 * @optest op1=0 op2=-42 -> op1=-42
3640 */
3641FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
3642{
3643 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3645 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3646 {
3647 /*
3648 * Register, register.
3649 */
3650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3651 IEM_MC_BEGIN(0, 0);
3652
3653 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3654 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3655
3656 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3657 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3658 IEM_MC_ADVANCE_RIP();
3659 IEM_MC_END();
3660 }
3661 else
3662 {
3663 /*
3664 * Register, memory.
3665 */
3666 IEM_MC_BEGIN(0, 2);
3667 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3669
3670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3672 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3673 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3674
3675 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3676 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3677
3678 IEM_MC_ADVANCE_RIP();
3679 IEM_MC_END();
3680 }
3681 return VINF_SUCCESS;
3682}
3683
3684/**
3685 * @opcode 0x6f
3686 * @oppfx 0xf3
3687 * @opcpuid sse2
3688 * @opgroup og_sse2_simdint_datamove
3689 * @opxcpttype 4UA
3690 * @optest op1=1 op2=2 -> op1=2
3691 * @optest op1=0 op2=-42 -> op1=-42
3692 */
3693FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
3694{
3695 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3696 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3697 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3698 {
3699 /*
3700 * Register, register.
3701 */
3702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3703 IEM_MC_BEGIN(0, 0);
3704 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3705 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3706 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3707 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3708 IEM_MC_ADVANCE_RIP();
3709 IEM_MC_END();
3710 }
3711 else
3712 {
3713 /*
3714 * Register, memory.
3715 */
3716 IEM_MC_BEGIN(0, 2);
3717 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3719
3720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3722 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3723 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3724 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3725 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3726
3727 IEM_MC_ADVANCE_RIP();
3728 IEM_MC_END();
3729 }
3730 return VINF_SUCCESS;
3731}
3732
3733
3734/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3735FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3736{
3737 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3738 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3739 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3740 {
3741 /*
3742 * Register, register.
3743 */
3744 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3746
3747 IEM_MC_BEGIN(3, 0);
3748 IEM_MC_ARG(uint64_t *, pDst, 0);
3749 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3750 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3751 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3752 IEM_MC_PREPARE_FPU_USAGE();
3753 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3754 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3755 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3756 IEM_MC_ADVANCE_RIP();
3757 IEM_MC_END();
3758 }
3759 else
3760 {
3761 /*
3762 * Register, memory.
3763 */
3764 IEM_MC_BEGIN(3, 2);
3765 IEM_MC_ARG(uint64_t *, pDst, 0);
3766 IEM_MC_LOCAL(uint64_t, uSrc);
3767 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3769
3770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3771 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3772 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3774 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3775
3776 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3777 IEM_MC_PREPARE_FPU_USAGE();
3778 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3779 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3780
3781 IEM_MC_ADVANCE_RIP();
3782 IEM_MC_END();
3783 }
3784 return VINF_SUCCESS;
3785}
3786
3787/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3788FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3789{
3790 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3791 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3792 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3793 {
3794 /*
3795 * Register, register.
3796 */
3797 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3799
3800 IEM_MC_BEGIN(3, 0);
3801 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3802 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3803 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3804 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3805 IEM_MC_PREPARE_SSE_USAGE();
3806 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3807 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3808 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3809 IEM_MC_ADVANCE_RIP();
3810 IEM_MC_END();
3811 }
3812 else
3813 {
3814 /*
3815 * Register, memory.
3816 */
3817 IEM_MC_BEGIN(3, 2);
3818 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3819 IEM_MC_LOCAL(RTUINT128U, uSrc);
3820 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3822
3823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3824 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3825 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3827 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3828
3829 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3830 IEM_MC_PREPARE_SSE_USAGE();
3831 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3832 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3833
3834 IEM_MC_ADVANCE_RIP();
3835 IEM_MC_END();
3836 }
3837 return VINF_SUCCESS;
3838}
3839
3840/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3841FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3842{
3843 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3844 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3845 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3846 {
3847 /*
3848 * Register, register.
3849 */
3850 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3852
3853 IEM_MC_BEGIN(3, 0);
3854 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3855 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3856 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3857 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3858 IEM_MC_PREPARE_SSE_USAGE();
3859 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3860 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3861 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3862 IEM_MC_ADVANCE_RIP();
3863 IEM_MC_END();
3864 }
3865 else
3866 {
3867 /*
3868 * Register, memory.
3869 */
3870 IEM_MC_BEGIN(3, 2);
3871 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3872 IEM_MC_LOCAL(RTUINT128U, uSrc);
3873 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3875
3876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3877 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3878 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3880 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3881
3882 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3883 IEM_MC_PREPARE_SSE_USAGE();
3884 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3885 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3886
3887 IEM_MC_ADVANCE_RIP();
3888 IEM_MC_END();
3889 }
3890 return VINF_SUCCESS;
3891}
3892
3893/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3894FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3895{
3896 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3897 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3898 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3899 {
3900 /*
3901 * Register, register.
3902 */
3903 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3905
3906 IEM_MC_BEGIN(3, 0);
3907 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3908 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3909 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3910 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3911 IEM_MC_PREPARE_SSE_USAGE();
3912 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3913 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3914 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3915 IEM_MC_ADVANCE_RIP();
3916 IEM_MC_END();
3917 }
3918 else
3919 {
3920 /*
3921 * Register, memory.
3922 */
3923 IEM_MC_BEGIN(3, 2);
3924 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3925 IEM_MC_LOCAL(RTUINT128U, uSrc);
3926 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3927 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3928
3929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3930 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3931 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3933 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3934
3935 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3936 IEM_MC_PREPARE_SSE_USAGE();
3937 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3938 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3939
3940 IEM_MC_ADVANCE_RIP();
3941 IEM_MC_END();
3942 }
3943 return VINF_SUCCESS;
3944}
3945
3946
3947/** Opcode 0x0f 0x71 11/2. */
3948FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3949
3950/** Opcode 0x66 0x0f 0x71 11/2. */
3951FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3952
3953/** Opcode 0x0f 0x71 11/4. */
3954FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3955
3956/** Opcode 0x66 0x0f 0x71 11/4. */
3957FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3958
3959/** Opcode 0x0f 0x71 11/6. */
3960FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3961
3962/** Opcode 0x66 0x0f 0x71 11/6. */
3963FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3964
3965
3966/**
3967 * Group 12 jump table for register variant.
3968 */
3969IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3970{
3971 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3972 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3973 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3974 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3975 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3976 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3977 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3978 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3979};
3980AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3981
3982
3983/** Opcode 0x0f 0x71. */
3984FNIEMOP_DEF(iemOp_Grp12)
3985{
3986 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3987 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3988 /* register, register */
3989 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3990 + pVCpu->iem.s.idxPrefix], bRm);
3991 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3992}
3993
3994
3995/** Opcode 0x0f 0x72 11/2. */
3996FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3997
3998/** Opcode 0x66 0x0f 0x72 11/2. */
3999FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
4000
4001/** Opcode 0x0f 0x72 11/4. */
4002FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
4003
4004/** Opcode 0x66 0x0f 0x72 11/4. */
4005FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4006
4007/** Opcode 0x0f 0x72 11/6. */
4008FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4009
4010/** Opcode 0x66 0x0f 0x72 11/6. */
4011FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4012
4013
4014/**
4015 * Group 13 jump table for register variant.
4016 */
4017IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4018{
4019 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4020 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4021 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4022 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4023 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4024 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4025 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4026 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4027};
4028AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4029
4030/** Opcode 0x0f 0x72. */
4031FNIEMOP_DEF(iemOp_Grp13)
4032{
4033 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4034 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4035 /* register, register */
4036 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4037 + pVCpu->iem.s.idxPrefix], bRm);
4038 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4039}
4040
4041
4042/** Opcode 0x0f 0x73 11/2. */
4043FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4044
4045/** Opcode 0x66 0x0f 0x73 11/2. */
4046FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4047
4048/** Opcode 0x66 0x0f 0x73 11/3. */
4049FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4050
4051/** Opcode 0x0f 0x73 11/6. */
4052FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4053
4054/** Opcode 0x66 0x0f 0x73 11/6. */
4055FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4056
4057/** Opcode 0x66 0x0f 0x73 11/7. */
4058FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4059
4060/**
4061 * Group 14 jump table for register variant.
4062 */
4063IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4064{
4065 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4066 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4067 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4068 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4069 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4070 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4071 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4072 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4073};
4074AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4075
4076
4077/** Opcode 0x0f 0x73. */
4078FNIEMOP_DEF(iemOp_Grp14)
4079{
4080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4081 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4082 /* register, register */
4083 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4084 + pVCpu->iem.s.idxPrefix], bRm);
4085 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4086}
4087
4088
4089/**
4090 * Common worker for MMX instructions on the form:
4091 * pxxx mm1, mm2/mem64
4092 */
4093FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4094{
4095 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4096 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4097 {
4098 /*
4099 * Register, register.
4100 */
4101 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4102 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4104 IEM_MC_BEGIN(2, 0);
4105 IEM_MC_ARG(uint64_t *, pDst, 0);
4106 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4107 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4108 IEM_MC_PREPARE_FPU_USAGE();
4109 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4110 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4111 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4112 IEM_MC_ADVANCE_RIP();
4113 IEM_MC_END();
4114 }
4115 else
4116 {
4117 /*
4118 * Register, memory.
4119 */
4120 IEM_MC_BEGIN(2, 2);
4121 IEM_MC_ARG(uint64_t *, pDst, 0);
4122 IEM_MC_LOCAL(uint64_t, uSrc);
4123 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4125
4126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4128 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4129 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4130
4131 IEM_MC_PREPARE_FPU_USAGE();
4132 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4133 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4134
4135 IEM_MC_ADVANCE_RIP();
4136 IEM_MC_END();
4137 }
4138 return VINF_SUCCESS;
4139}
4140
4141
4142/**
4143 * Common worker for SSE2 instructions on the forms:
4144 * pxxx xmm1, xmm2/mem128
4145 *
4146 * Proper alignment of the 128-bit operand is enforced.
4147 * Exceptions type 4. SSE2 cpuid checks.
4148 */
4149FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4150{
4151 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4152 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4153 {
4154 /*
4155 * Register, register.
4156 */
4157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4158 IEM_MC_BEGIN(2, 0);
4159 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4160 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4161 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4162 IEM_MC_PREPARE_SSE_USAGE();
4163 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4164 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4165 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4166 IEM_MC_ADVANCE_RIP();
4167 IEM_MC_END();
4168 }
4169 else
4170 {
4171 /*
4172 * Register, memory.
4173 */
4174 IEM_MC_BEGIN(2, 2);
4175 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4176 IEM_MC_LOCAL(RTUINT128U, uSrc);
4177 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4179
4180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4182 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4183 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4184
4185 IEM_MC_PREPARE_SSE_USAGE();
4186 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4187 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4188
4189 IEM_MC_ADVANCE_RIP();
4190 IEM_MC_END();
4191 }
4192 return VINF_SUCCESS;
4193}
4194
4195
4196/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4197FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4198{
4199 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4200 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4201}
4202
4203/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4204FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4205{
4206 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4207 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4208}
4209
4210/* Opcode 0xf3 0x0f 0x74 - invalid */
4211/* Opcode 0xf2 0x0f 0x74 - invalid */
4212
4213
4214/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4215FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4216{
4217 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4218 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4219}
4220
4221/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4222FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4223{
4224 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4225 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4226}
4227
4228/* Opcode 0xf3 0x0f 0x75 - invalid */
4229/* Opcode 0xf2 0x0f 0x75 - invalid */
4230
4231
4232/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4233FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4234{
4235 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4236 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4237}
4238
4239/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4240FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4241{
4242 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4243 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4244}
4245
4246/* Opcode 0xf3 0x0f 0x76 - invalid */
4247/* Opcode 0xf2 0x0f 0x76 - invalid */
4248
4249
4250/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4251FNIEMOP_DEF(iemOp_emms)
4252{
4253 IEMOP_MNEMONIC(emms, "emms");
4254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4255
4256 IEM_MC_BEGIN(0,0);
4257 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
4258 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4259 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4260 IEM_MC_FPU_FROM_MMX_MODE();
4261 IEM_MC_ADVANCE_RIP();
4262 IEM_MC_END();
4263 return VINF_SUCCESS;
4264}
4265
4266/* Opcode 0x66 0x0f 0x77 - invalid */
4267/* Opcode 0xf3 0x0f 0x77 - invalid */
4268/* Opcode 0xf2 0x0f 0x77 - invalid */
4269
4270/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4271FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4272/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4273FNIEMOP_STUB(iemOp_AmdGrp17);
4274/* Opcode 0xf3 0x0f 0x78 - invalid */
4275/* Opcode 0xf2 0x0f 0x78 - invalid */
4276
4277/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4278FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4279/* Opcode 0x66 0x0f 0x79 - invalid */
4280/* Opcode 0xf3 0x0f 0x79 - invalid */
4281/* Opcode 0xf2 0x0f 0x79 - invalid */
4282
4283/* Opcode 0x0f 0x7a - invalid */
4284/* Opcode 0x66 0x0f 0x7a - invalid */
4285/* Opcode 0xf3 0x0f 0x7a - invalid */
4286/* Opcode 0xf2 0x0f 0x7a - invalid */
4287
4288/* Opcode 0x0f 0x7b - invalid */
4289/* Opcode 0x66 0x0f 0x7b - invalid */
4290/* Opcode 0xf3 0x0f 0x7b - invalid */
4291/* Opcode 0xf2 0x0f 0x7b - invalid */
4292
4293/* Opcode 0x0f 0x7c - invalid */
4294/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4295FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4296/* Opcode 0xf3 0x0f 0x7c - invalid */
4297/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4298FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4299
4300/* Opcode 0x0f 0x7d - invalid */
4301/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4302FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4303/* Opcode 0xf3 0x0f 0x7d - invalid */
4304/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4305FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4306
4307
4308/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4309FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4310{
4311 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4312 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4313 {
4314 /**
4315 * @opcode 0x7e
4316 * @opcodesub rex.w=1
4317 * @oppfx none
4318 * @opcpuid mmx
4319 * @opgroup og_mmx_datamove
4320 * @opxcpttype 5
4321 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4322 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4323 */
4324 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4325 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4326 {
4327 /* greg64, MMX */
4328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4329 IEM_MC_BEGIN(0, 1);
4330 IEM_MC_LOCAL(uint64_t, u64Tmp);
4331
4332 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4333 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4334
4335 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4336 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4337 IEM_MC_FPU_TO_MMX_MODE();
4338
4339 IEM_MC_ADVANCE_RIP();
4340 IEM_MC_END();
4341 }
4342 else
4343 {
4344 /* [mem64], MMX */
4345 IEM_MC_BEGIN(0, 2);
4346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4347 IEM_MC_LOCAL(uint64_t, u64Tmp);
4348
4349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4351 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4352 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4353
4354 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4355 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4356 IEM_MC_FPU_TO_MMX_MODE();
4357
4358 IEM_MC_ADVANCE_RIP();
4359 IEM_MC_END();
4360 }
4361 }
4362 else
4363 {
4364 /**
4365 * @opdone
4366 * @opcode 0x7e
4367 * @opcodesub rex.w=0
4368 * @oppfx none
4369 * @opcpuid mmx
4370 * @opgroup og_mmx_datamove
4371 * @opxcpttype 5
4372 * @opfunction iemOp_movd_q_Pd_Ey
4373 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4374 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4375 */
4376 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4377 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4378 {
4379 /* greg32, MMX */
4380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4381 IEM_MC_BEGIN(0, 1);
4382 IEM_MC_LOCAL(uint32_t, u32Tmp);
4383
4384 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4385 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4386
4387 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4388 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4389 IEM_MC_FPU_TO_MMX_MODE();
4390
4391 IEM_MC_ADVANCE_RIP();
4392 IEM_MC_END();
4393 }
4394 else
4395 {
4396 /* [mem32], MMX */
4397 IEM_MC_BEGIN(0, 2);
4398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4399 IEM_MC_LOCAL(uint32_t, u32Tmp);
4400
4401 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4403 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4404 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4405
4406 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4407 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4408 IEM_MC_FPU_TO_MMX_MODE();
4409
4410 IEM_MC_ADVANCE_RIP();
4411 IEM_MC_END();
4412 }
4413 }
4414 return VINF_SUCCESS;
4415
4416}
4417
4418
4419FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4420{
4421 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4422 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4423 {
4424 /**
4425 * @opcode 0x7e
4426 * @opcodesub rex.w=1
4427 * @oppfx 0x66
4428 * @opcpuid sse2
4429 * @opgroup og_sse2_simdint_datamove
4430 * @opxcpttype 5
4431 * @optest 64-bit / op1=1 op2=2 -> op1=2
4432 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4433 */
4434 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4435 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4436 {
4437 /* greg64, XMM */
4438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4439 IEM_MC_BEGIN(0, 1);
4440 IEM_MC_LOCAL(uint64_t, u64Tmp);
4441
4442 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4443 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4444
4445 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4446 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4447
4448 IEM_MC_ADVANCE_RIP();
4449 IEM_MC_END();
4450 }
4451 else
4452 {
4453 /* [mem64], XMM */
4454 IEM_MC_BEGIN(0, 2);
4455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4456 IEM_MC_LOCAL(uint64_t, u64Tmp);
4457
4458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4460 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4461 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4462
4463 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4464 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4465
4466 IEM_MC_ADVANCE_RIP();
4467 IEM_MC_END();
4468 }
4469 }
4470 else
4471 {
4472 /**
4473 * @opdone
4474 * @opcode 0x7e
4475 * @opcodesub rex.w=0
4476 * @oppfx 0x66
4477 * @opcpuid sse2
4478 * @opgroup og_sse2_simdint_datamove
4479 * @opxcpttype 5
4480 * @opfunction iemOp_movd_q_Vy_Ey
4481 * @optest op1=1 op2=2 -> op1=2
4482 * @optest op1=0 op2=-42 -> op1=-42
4483 */
4484 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4485 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4486 {
4487 /* greg32, XMM */
4488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4489 IEM_MC_BEGIN(0, 1);
4490 IEM_MC_LOCAL(uint32_t, u32Tmp);
4491
4492 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4493 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4494
4495 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4496 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4497
4498 IEM_MC_ADVANCE_RIP();
4499 IEM_MC_END();
4500 }
4501 else
4502 {
4503 /* [mem32], XMM */
4504 IEM_MC_BEGIN(0, 2);
4505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4506 IEM_MC_LOCAL(uint32_t, u32Tmp);
4507
4508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4510 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4511 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4512
4513 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4514 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4515
4516 IEM_MC_ADVANCE_RIP();
4517 IEM_MC_END();
4518 }
4519 }
4520 return VINF_SUCCESS;
4521
4522}
4523
4524/**
4525 * @opcode 0x7e
4526 * @oppfx 0xf3
4527 * @opcpuid sse2
4528 * @opgroup og_sse2_pcksclr_datamove
4529 * @opxcpttype none
4530 * @optest op1=1 op2=2 -> op1=2
4531 * @optest op1=0 op2=-42 -> op1=-42
4532 */
4533FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4534{
4535 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4536 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4537 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4538 {
4539 /*
4540 * Register, register.
4541 */
4542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4543 IEM_MC_BEGIN(0, 2);
4544 IEM_MC_LOCAL(uint64_t, uSrc);
4545
4546 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4547 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4548
4549 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4550 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4551
4552 IEM_MC_ADVANCE_RIP();
4553 IEM_MC_END();
4554 }
4555 else
4556 {
4557 /*
4558 * Memory, register.
4559 */
4560 IEM_MC_BEGIN(0, 2);
4561 IEM_MC_LOCAL(uint64_t, uSrc);
4562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4563
4564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4566 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4567 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4568
4569 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4570 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4571
4572 IEM_MC_ADVANCE_RIP();
4573 IEM_MC_END();
4574 }
4575 return VINF_SUCCESS;
4576}
4577
4578/* Opcode 0xf2 0x0f 0x7e - invalid */
4579
4580
4581/** Opcode 0x0f 0x7f - movq Qq, Pq */
4582FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4583{
4584 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4586 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4587 {
4588 /*
4589 * Register, register.
4590 */
4591 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4592 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4594 IEM_MC_BEGIN(0, 1);
4595 IEM_MC_LOCAL(uint64_t, u64Tmp);
4596 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4597 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4598 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4599 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4600 IEM_MC_ADVANCE_RIP();
4601 IEM_MC_END();
4602 }
4603 else
4604 {
4605 /*
4606 * Register, memory.
4607 */
4608 IEM_MC_BEGIN(0, 2);
4609 IEM_MC_LOCAL(uint64_t, u64Tmp);
4610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4611
4612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4614 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4615 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4616
4617 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4618 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4619
4620 IEM_MC_ADVANCE_RIP();
4621 IEM_MC_END();
4622 }
4623 return VINF_SUCCESS;
4624}
4625
4626/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4627FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4628{
4629 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4630 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4631 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4632 {
4633 /*
4634 * Register, register.
4635 */
4636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4637 IEM_MC_BEGIN(0, 0);
4638 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4639 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4640 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4641 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4642 IEM_MC_ADVANCE_RIP();
4643 IEM_MC_END();
4644 }
4645 else
4646 {
4647 /*
4648 * Register, memory.
4649 */
4650 IEM_MC_BEGIN(0, 2);
4651 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4653
4654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4656 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4657 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4658
4659 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4660 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4661
4662 IEM_MC_ADVANCE_RIP();
4663 IEM_MC_END();
4664 }
4665 return VINF_SUCCESS;
4666}
4667
4668/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4669FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4670{
4671 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4672 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4673 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4674 {
4675 /*
4676 * Register, register.
4677 */
4678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4679 IEM_MC_BEGIN(0, 0);
4680 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4681 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4682 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4683 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4684 IEM_MC_ADVANCE_RIP();
4685 IEM_MC_END();
4686 }
4687 else
4688 {
4689 /*
4690 * Register, memory.
4691 */
4692 IEM_MC_BEGIN(0, 2);
4693 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4695
4696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4698 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4699 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4700
4701 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4702 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4703
4704 IEM_MC_ADVANCE_RIP();
4705 IEM_MC_END();
4706 }
4707 return VINF_SUCCESS;
4708}
4709
4710/* Opcode 0xf2 0x0f 0x7f - invalid */
4711
4712
4713
4714/** Opcode 0x0f 0x80. */
4715FNIEMOP_DEF(iemOp_jo_Jv)
4716{
4717 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4718 IEMOP_HLP_MIN_386();
4719 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4720 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4721 {
4722 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4724
4725 IEM_MC_BEGIN(0, 0);
4726 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4727 IEM_MC_REL_JMP_S16(i16Imm);
4728 } IEM_MC_ELSE() {
4729 IEM_MC_ADVANCE_RIP();
4730 } IEM_MC_ENDIF();
4731 IEM_MC_END();
4732 }
4733 else
4734 {
4735 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4737
4738 IEM_MC_BEGIN(0, 0);
4739 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4740 IEM_MC_REL_JMP_S32(i32Imm);
4741 } IEM_MC_ELSE() {
4742 IEM_MC_ADVANCE_RIP();
4743 } IEM_MC_ENDIF();
4744 IEM_MC_END();
4745 }
4746 return VINF_SUCCESS;
4747}
4748
4749
4750/** Opcode 0x0f 0x81. */
4751FNIEMOP_DEF(iemOp_jno_Jv)
4752{
4753 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4754 IEMOP_HLP_MIN_386();
4755 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4756 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4757 {
4758 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4760
4761 IEM_MC_BEGIN(0, 0);
4762 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4763 IEM_MC_ADVANCE_RIP();
4764 } IEM_MC_ELSE() {
4765 IEM_MC_REL_JMP_S16(i16Imm);
4766 } IEM_MC_ENDIF();
4767 IEM_MC_END();
4768 }
4769 else
4770 {
4771 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4773
4774 IEM_MC_BEGIN(0, 0);
4775 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4776 IEM_MC_ADVANCE_RIP();
4777 } IEM_MC_ELSE() {
4778 IEM_MC_REL_JMP_S32(i32Imm);
4779 } IEM_MC_ENDIF();
4780 IEM_MC_END();
4781 }
4782 return VINF_SUCCESS;
4783}
4784
4785
4786/** Opcode 0x0f 0x82. */
4787FNIEMOP_DEF(iemOp_jc_Jv)
4788{
4789 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4790 IEMOP_HLP_MIN_386();
4791 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4792 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4793 {
4794 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4796
4797 IEM_MC_BEGIN(0, 0);
4798 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4799 IEM_MC_REL_JMP_S16(i16Imm);
4800 } IEM_MC_ELSE() {
4801 IEM_MC_ADVANCE_RIP();
4802 } IEM_MC_ENDIF();
4803 IEM_MC_END();
4804 }
4805 else
4806 {
4807 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4809
4810 IEM_MC_BEGIN(0, 0);
4811 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4812 IEM_MC_REL_JMP_S32(i32Imm);
4813 } IEM_MC_ELSE() {
4814 IEM_MC_ADVANCE_RIP();
4815 } IEM_MC_ENDIF();
4816 IEM_MC_END();
4817 }
4818 return VINF_SUCCESS;
4819}
4820
4821
4822/** Opcode 0x0f 0x83. */
4823FNIEMOP_DEF(iemOp_jnc_Jv)
4824{
4825 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4826 IEMOP_HLP_MIN_386();
4827 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4828 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4829 {
4830 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4832
4833 IEM_MC_BEGIN(0, 0);
4834 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4835 IEM_MC_ADVANCE_RIP();
4836 } IEM_MC_ELSE() {
4837 IEM_MC_REL_JMP_S16(i16Imm);
4838 } IEM_MC_ENDIF();
4839 IEM_MC_END();
4840 }
4841 else
4842 {
4843 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4845
4846 IEM_MC_BEGIN(0, 0);
4847 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4848 IEM_MC_ADVANCE_RIP();
4849 } IEM_MC_ELSE() {
4850 IEM_MC_REL_JMP_S32(i32Imm);
4851 } IEM_MC_ENDIF();
4852 IEM_MC_END();
4853 }
4854 return VINF_SUCCESS;
4855}
4856
4857
4858/** Opcode 0x0f 0x84. */
4859FNIEMOP_DEF(iemOp_je_Jv)
4860{
4861 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4862 IEMOP_HLP_MIN_386();
4863 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4864 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4865 {
4866 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4868
4869 IEM_MC_BEGIN(0, 0);
4870 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4871 IEM_MC_REL_JMP_S16(i16Imm);
4872 } IEM_MC_ELSE() {
4873 IEM_MC_ADVANCE_RIP();
4874 } IEM_MC_ENDIF();
4875 IEM_MC_END();
4876 }
4877 else
4878 {
4879 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4881
4882 IEM_MC_BEGIN(0, 0);
4883 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4884 IEM_MC_REL_JMP_S32(i32Imm);
4885 } IEM_MC_ELSE() {
4886 IEM_MC_ADVANCE_RIP();
4887 } IEM_MC_ENDIF();
4888 IEM_MC_END();
4889 }
4890 return VINF_SUCCESS;
4891}
4892
4893
4894/** Opcode 0x0f 0x85. */
4895FNIEMOP_DEF(iemOp_jne_Jv)
4896{
4897 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4898 IEMOP_HLP_MIN_386();
4899 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4900 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4901 {
4902 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4904
4905 IEM_MC_BEGIN(0, 0);
4906 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4907 IEM_MC_ADVANCE_RIP();
4908 } IEM_MC_ELSE() {
4909 IEM_MC_REL_JMP_S16(i16Imm);
4910 } IEM_MC_ENDIF();
4911 IEM_MC_END();
4912 }
4913 else
4914 {
4915 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4917
4918 IEM_MC_BEGIN(0, 0);
4919 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4920 IEM_MC_ADVANCE_RIP();
4921 } IEM_MC_ELSE() {
4922 IEM_MC_REL_JMP_S32(i32Imm);
4923 } IEM_MC_ENDIF();
4924 IEM_MC_END();
4925 }
4926 return VINF_SUCCESS;
4927}
4928
4929
4930/** Opcode 0x0f 0x86. */
4931FNIEMOP_DEF(iemOp_jbe_Jv)
4932{
4933 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4934 IEMOP_HLP_MIN_386();
4935 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4936 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4937 {
4938 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4940
4941 IEM_MC_BEGIN(0, 0);
4942 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4943 IEM_MC_REL_JMP_S16(i16Imm);
4944 } IEM_MC_ELSE() {
4945 IEM_MC_ADVANCE_RIP();
4946 } IEM_MC_ENDIF();
4947 IEM_MC_END();
4948 }
4949 else
4950 {
4951 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4953
4954 IEM_MC_BEGIN(0, 0);
4955 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4956 IEM_MC_REL_JMP_S32(i32Imm);
4957 } IEM_MC_ELSE() {
4958 IEM_MC_ADVANCE_RIP();
4959 } IEM_MC_ENDIF();
4960 IEM_MC_END();
4961 }
4962 return VINF_SUCCESS;
4963}
4964
4965
4966/** Opcode 0x0f 0x87. */
4967FNIEMOP_DEF(iemOp_jnbe_Jv)
4968{
4969 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4970 IEMOP_HLP_MIN_386();
4971 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4972 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4973 {
4974 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4976
4977 IEM_MC_BEGIN(0, 0);
4978 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4979 IEM_MC_ADVANCE_RIP();
4980 } IEM_MC_ELSE() {
4981 IEM_MC_REL_JMP_S16(i16Imm);
4982 } IEM_MC_ENDIF();
4983 IEM_MC_END();
4984 }
4985 else
4986 {
4987 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4989
4990 IEM_MC_BEGIN(0, 0);
4991 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4992 IEM_MC_ADVANCE_RIP();
4993 } IEM_MC_ELSE() {
4994 IEM_MC_REL_JMP_S32(i32Imm);
4995 } IEM_MC_ENDIF();
4996 IEM_MC_END();
4997 }
4998 return VINF_SUCCESS;
4999}
5000
5001
5002/** Opcode 0x0f 0x88. */
5003FNIEMOP_DEF(iemOp_js_Jv)
5004{
5005 IEMOP_MNEMONIC(js_Jv, "js Jv");
5006 IEMOP_HLP_MIN_386();
5007 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5008 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5009 {
5010 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5012
5013 IEM_MC_BEGIN(0, 0);
5014 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5015 IEM_MC_REL_JMP_S16(i16Imm);
5016 } IEM_MC_ELSE() {
5017 IEM_MC_ADVANCE_RIP();
5018 } IEM_MC_ENDIF();
5019 IEM_MC_END();
5020 }
5021 else
5022 {
5023 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5025
5026 IEM_MC_BEGIN(0, 0);
5027 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5028 IEM_MC_REL_JMP_S32(i32Imm);
5029 } IEM_MC_ELSE() {
5030 IEM_MC_ADVANCE_RIP();
5031 } IEM_MC_ENDIF();
5032 IEM_MC_END();
5033 }
5034 return VINF_SUCCESS;
5035}
5036
5037
5038/** Opcode 0x0f 0x89. */
5039FNIEMOP_DEF(iemOp_jns_Jv)
5040{
5041 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5042 IEMOP_HLP_MIN_386();
5043 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5044 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5045 {
5046 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5048
5049 IEM_MC_BEGIN(0, 0);
5050 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5051 IEM_MC_ADVANCE_RIP();
5052 } IEM_MC_ELSE() {
5053 IEM_MC_REL_JMP_S16(i16Imm);
5054 } IEM_MC_ENDIF();
5055 IEM_MC_END();
5056 }
5057 else
5058 {
5059 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5061
5062 IEM_MC_BEGIN(0, 0);
5063 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5064 IEM_MC_ADVANCE_RIP();
5065 } IEM_MC_ELSE() {
5066 IEM_MC_REL_JMP_S32(i32Imm);
5067 } IEM_MC_ENDIF();
5068 IEM_MC_END();
5069 }
5070 return VINF_SUCCESS;
5071}
5072
5073
5074/** Opcode 0x0f 0x8a. */
5075FNIEMOP_DEF(iemOp_jp_Jv)
5076{
5077 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5078 IEMOP_HLP_MIN_386();
5079 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5080 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5081 {
5082 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5084
5085 IEM_MC_BEGIN(0, 0);
5086 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5087 IEM_MC_REL_JMP_S16(i16Imm);
5088 } IEM_MC_ELSE() {
5089 IEM_MC_ADVANCE_RIP();
5090 } IEM_MC_ENDIF();
5091 IEM_MC_END();
5092 }
5093 else
5094 {
5095 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5097
5098 IEM_MC_BEGIN(0, 0);
5099 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5100 IEM_MC_REL_JMP_S32(i32Imm);
5101 } IEM_MC_ELSE() {
5102 IEM_MC_ADVANCE_RIP();
5103 } IEM_MC_ENDIF();
5104 IEM_MC_END();
5105 }
5106 return VINF_SUCCESS;
5107}
5108
5109
5110/** Opcode 0x0f 0x8b. */
5111FNIEMOP_DEF(iemOp_jnp_Jv)
5112{
5113 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5114 IEMOP_HLP_MIN_386();
5115 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5116 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5117 {
5118 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5120
5121 IEM_MC_BEGIN(0, 0);
5122 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5123 IEM_MC_ADVANCE_RIP();
5124 } IEM_MC_ELSE() {
5125 IEM_MC_REL_JMP_S16(i16Imm);
5126 } IEM_MC_ENDIF();
5127 IEM_MC_END();
5128 }
5129 else
5130 {
5131 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5133
5134 IEM_MC_BEGIN(0, 0);
5135 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5136 IEM_MC_ADVANCE_RIP();
5137 } IEM_MC_ELSE() {
5138 IEM_MC_REL_JMP_S32(i32Imm);
5139 } IEM_MC_ENDIF();
5140 IEM_MC_END();
5141 }
5142 return VINF_SUCCESS;
5143}
5144
5145
5146/** Opcode 0x0f 0x8c. */
5147FNIEMOP_DEF(iemOp_jl_Jv)
5148{
5149 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5150 IEMOP_HLP_MIN_386();
5151 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5152 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5153 {
5154 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5156
5157 IEM_MC_BEGIN(0, 0);
5158 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5159 IEM_MC_REL_JMP_S16(i16Imm);
5160 } IEM_MC_ELSE() {
5161 IEM_MC_ADVANCE_RIP();
5162 } IEM_MC_ENDIF();
5163 IEM_MC_END();
5164 }
5165 else
5166 {
5167 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5169
5170 IEM_MC_BEGIN(0, 0);
5171 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5172 IEM_MC_REL_JMP_S32(i32Imm);
5173 } IEM_MC_ELSE() {
5174 IEM_MC_ADVANCE_RIP();
5175 } IEM_MC_ENDIF();
5176 IEM_MC_END();
5177 }
5178 return VINF_SUCCESS;
5179}
5180
5181
5182/** Opcode 0x0f 0x8d. */
5183FNIEMOP_DEF(iemOp_jnl_Jv)
5184{
5185 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5186 IEMOP_HLP_MIN_386();
5187 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5188 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5189 {
5190 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5192
5193 IEM_MC_BEGIN(0, 0);
5194 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5195 IEM_MC_ADVANCE_RIP();
5196 } IEM_MC_ELSE() {
5197 IEM_MC_REL_JMP_S16(i16Imm);
5198 } IEM_MC_ENDIF();
5199 IEM_MC_END();
5200 }
5201 else
5202 {
5203 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5205
5206 IEM_MC_BEGIN(0, 0);
5207 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5208 IEM_MC_ADVANCE_RIP();
5209 } IEM_MC_ELSE() {
5210 IEM_MC_REL_JMP_S32(i32Imm);
5211 } IEM_MC_ENDIF();
5212 IEM_MC_END();
5213 }
5214 return VINF_SUCCESS;
5215}
5216
5217
5218/** Opcode 0x0f 0x8e. */
5219FNIEMOP_DEF(iemOp_jle_Jv)
5220{
5221 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5222 IEMOP_HLP_MIN_386();
5223 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5224 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5225 {
5226 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5228
5229 IEM_MC_BEGIN(0, 0);
5230 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5231 IEM_MC_REL_JMP_S16(i16Imm);
5232 } IEM_MC_ELSE() {
5233 IEM_MC_ADVANCE_RIP();
5234 } IEM_MC_ENDIF();
5235 IEM_MC_END();
5236 }
5237 else
5238 {
5239 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5241
5242 IEM_MC_BEGIN(0, 0);
5243 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5244 IEM_MC_REL_JMP_S32(i32Imm);
5245 } IEM_MC_ELSE() {
5246 IEM_MC_ADVANCE_RIP();
5247 } IEM_MC_ENDIF();
5248 IEM_MC_END();
5249 }
5250 return VINF_SUCCESS;
5251}
5252
5253
5254/** Opcode 0x0f 0x8f. */
5255FNIEMOP_DEF(iemOp_jnle_Jv)
5256{
5257 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5258 IEMOP_HLP_MIN_386();
5259 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5260 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5261 {
5262 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5264
5265 IEM_MC_BEGIN(0, 0);
5266 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5267 IEM_MC_ADVANCE_RIP();
5268 } IEM_MC_ELSE() {
5269 IEM_MC_REL_JMP_S16(i16Imm);
5270 } IEM_MC_ENDIF();
5271 IEM_MC_END();
5272 }
5273 else
5274 {
5275 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5277
5278 IEM_MC_BEGIN(0, 0);
5279 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5280 IEM_MC_ADVANCE_RIP();
5281 } IEM_MC_ELSE() {
5282 IEM_MC_REL_JMP_S32(i32Imm);
5283 } IEM_MC_ENDIF();
5284 IEM_MC_END();
5285 }
5286 return VINF_SUCCESS;
5287}
5288
5289
5290/** Opcode 0x0f 0x90. */
5291FNIEMOP_DEF(iemOp_seto_Eb)
5292{
5293 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5294 IEMOP_HLP_MIN_386();
5295 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5296
5297 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5298 * any way. AMD says it's "unused", whatever that means. We're
5299 * ignoring for now. */
5300 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5301 {
5302 /* register target */
5303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5304 IEM_MC_BEGIN(0, 0);
5305 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5306 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5307 } IEM_MC_ELSE() {
5308 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5309 } IEM_MC_ENDIF();
5310 IEM_MC_ADVANCE_RIP();
5311 IEM_MC_END();
5312 }
5313 else
5314 {
5315 /* memory target */
5316 IEM_MC_BEGIN(0, 1);
5317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5320 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5321 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5322 } IEM_MC_ELSE() {
5323 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5324 } IEM_MC_ENDIF();
5325 IEM_MC_ADVANCE_RIP();
5326 IEM_MC_END();
5327 }
5328 return VINF_SUCCESS;
5329}
5330
5331
5332/** Opcode 0x0f 0x91. */
5333FNIEMOP_DEF(iemOp_setno_Eb)
5334{
5335 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5336 IEMOP_HLP_MIN_386();
5337 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5338
5339 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5340 * any way. AMD says it's "unused", whatever that means. We're
5341 * ignoring for now. */
5342 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5343 {
5344 /* register target */
5345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5346 IEM_MC_BEGIN(0, 0);
5347 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5348 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5349 } IEM_MC_ELSE() {
5350 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5351 } IEM_MC_ENDIF();
5352 IEM_MC_ADVANCE_RIP();
5353 IEM_MC_END();
5354 }
5355 else
5356 {
5357 /* memory target */
5358 IEM_MC_BEGIN(0, 1);
5359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5362 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5363 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5364 } IEM_MC_ELSE() {
5365 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5366 } IEM_MC_ENDIF();
5367 IEM_MC_ADVANCE_RIP();
5368 IEM_MC_END();
5369 }
5370 return VINF_SUCCESS;
5371}
5372
5373
5374/** Opcode 0x0f 0x92. */
5375FNIEMOP_DEF(iemOp_setc_Eb)
5376{
5377 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5378 IEMOP_HLP_MIN_386();
5379 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5380
5381 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5382 * any way. AMD says it's "unused", whatever that means. We're
5383 * ignoring for now. */
5384 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5385 {
5386 /* register target */
5387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5388 IEM_MC_BEGIN(0, 0);
5389 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5390 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5391 } IEM_MC_ELSE() {
5392 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5393 } IEM_MC_ENDIF();
5394 IEM_MC_ADVANCE_RIP();
5395 IEM_MC_END();
5396 }
5397 else
5398 {
5399 /* memory target */
5400 IEM_MC_BEGIN(0, 1);
5401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5404 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5405 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5406 } IEM_MC_ELSE() {
5407 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5408 } IEM_MC_ENDIF();
5409 IEM_MC_ADVANCE_RIP();
5410 IEM_MC_END();
5411 }
5412 return VINF_SUCCESS;
5413}
5414
5415
5416/** Opcode 0x0f 0x93. */
5417FNIEMOP_DEF(iemOp_setnc_Eb)
5418{
5419 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5420 IEMOP_HLP_MIN_386();
5421 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5422
5423 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5424 * any way. AMD says it's "unused", whatever that means. We're
5425 * ignoring for now. */
5426 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5427 {
5428 /* register target */
5429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5430 IEM_MC_BEGIN(0, 0);
5431 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5432 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5433 } IEM_MC_ELSE() {
5434 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5435 } IEM_MC_ENDIF();
5436 IEM_MC_ADVANCE_RIP();
5437 IEM_MC_END();
5438 }
5439 else
5440 {
5441 /* memory target */
5442 IEM_MC_BEGIN(0, 1);
5443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5446 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5447 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5448 } IEM_MC_ELSE() {
5449 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5450 } IEM_MC_ENDIF();
5451 IEM_MC_ADVANCE_RIP();
5452 IEM_MC_END();
5453 }
5454 return VINF_SUCCESS;
5455}
5456
5457
5458/** Opcode 0x0f 0x94. */
5459FNIEMOP_DEF(iemOp_sete_Eb)
5460{
5461 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5462 IEMOP_HLP_MIN_386();
5463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5464
5465 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5466 * any way. AMD says it's "unused", whatever that means. We're
5467 * ignoring for now. */
5468 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5469 {
5470 /* register target */
5471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5472 IEM_MC_BEGIN(0, 0);
5473 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5474 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5475 } IEM_MC_ELSE() {
5476 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5477 } IEM_MC_ENDIF();
5478 IEM_MC_ADVANCE_RIP();
5479 IEM_MC_END();
5480 }
5481 else
5482 {
5483 /* memory target */
5484 IEM_MC_BEGIN(0, 1);
5485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5488 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5489 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5490 } IEM_MC_ELSE() {
5491 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5492 } IEM_MC_ENDIF();
5493 IEM_MC_ADVANCE_RIP();
5494 IEM_MC_END();
5495 }
5496 return VINF_SUCCESS;
5497}
5498
5499
5500/** Opcode 0x0f 0x95. */
5501FNIEMOP_DEF(iemOp_setne_Eb)
5502{
5503 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5504 IEMOP_HLP_MIN_386();
5505 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5506
5507 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5508 * any way. AMD says it's "unused", whatever that means. We're
5509 * ignoring for now. */
5510 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5511 {
5512 /* register target */
5513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5514 IEM_MC_BEGIN(0, 0);
5515 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5516 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5517 } IEM_MC_ELSE() {
5518 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5519 } IEM_MC_ENDIF();
5520 IEM_MC_ADVANCE_RIP();
5521 IEM_MC_END();
5522 }
5523 else
5524 {
5525 /* memory target */
5526 IEM_MC_BEGIN(0, 1);
5527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5530 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5531 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5532 } IEM_MC_ELSE() {
5533 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5534 } IEM_MC_ENDIF();
5535 IEM_MC_ADVANCE_RIP();
5536 IEM_MC_END();
5537 }
5538 return VINF_SUCCESS;
5539}
5540
5541
5542/** Opcode 0x0f 0x96. */
5543FNIEMOP_DEF(iemOp_setbe_Eb)
5544{
5545 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5546 IEMOP_HLP_MIN_386();
5547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5548
5549 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5550 * any way. AMD says it's "unused", whatever that means. We're
5551 * ignoring for now. */
5552 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5553 {
5554 /* register target */
5555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5556 IEM_MC_BEGIN(0, 0);
5557 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5558 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5559 } IEM_MC_ELSE() {
5560 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5561 } IEM_MC_ENDIF();
5562 IEM_MC_ADVANCE_RIP();
5563 IEM_MC_END();
5564 }
5565 else
5566 {
5567 /* memory target */
5568 IEM_MC_BEGIN(0, 1);
5569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5572 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5573 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5574 } IEM_MC_ELSE() {
5575 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5576 } IEM_MC_ENDIF();
5577 IEM_MC_ADVANCE_RIP();
5578 IEM_MC_END();
5579 }
5580 return VINF_SUCCESS;
5581}
5582
5583
5584/** Opcode 0x0f 0x97. */
5585FNIEMOP_DEF(iemOp_setnbe_Eb)
5586{
5587 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5588 IEMOP_HLP_MIN_386();
5589 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5590
5591 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5592 * any way. AMD says it's "unused", whatever that means. We're
5593 * ignoring for now. */
5594 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5595 {
5596 /* register target */
5597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5598 IEM_MC_BEGIN(0, 0);
5599 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5600 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5601 } IEM_MC_ELSE() {
5602 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5603 } IEM_MC_ENDIF();
5604 IEM_MC_ADVANCE_RIP();
5605 IEM_MC_END();
5606 }
5607 else
5608 {
5609 /* memory target */
5610 IEM_MC_BEGIN(0, 1);
5611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5614 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5615 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5616 } IEM_MC_ELSE() {
5617 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5618 } IEM_MC_ENDIF();
5619 IEM_MC_ADVANCE_RIP();
5620 IEM_MC_END();
5621 }
5622 return VINF_SUCCESS;
5623}
5624
5625
5626/** Opcode 0x0f 0x98. */
5627FNIEMOP_DEF(iemOp_sets_Eb)
5628{
5629 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5630 IEMOP_HLP_MIN_386();
5631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5632
5633 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5634 * any way. AMD says it's "unused", whatever that means. We're
5635 * ignoring for now. */
5636 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5637 {
5638 /* register target */
5639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5640 IEM_MC_BEGIN(0, 0);
5641 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5642 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5643 } IEM_MC_ELSE() {
5644 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5645 } IEM_MC_ENDIF();
5646 IEM_MC_ADVANCE_RIP();
5647 IEM_MC_END();
5648 }
5649 else
5650 {
5651 /* memory target */
5652 IEM_MC_BEGIN(0, 1);
5653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5656 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5657 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5658 } IEM_MC_ELSE() {
5659 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5660 } IEM_MC_ENDIF();
5661 IEM_MC_ADVANCE_RIP();
5662 IEM_MC_END();
5663 }
5664 return VINF_SUCCESS;
5665}
5666
5667
5668/** Opcode 0x0f 0x99. */
5669FNIEMOP_DEF(iemOp_setns_Eb)
5670{
5671 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5672 IEMOP_HLP_MIN_386();
5673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5674
5675 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5676 * any way. AMD says it's "unused", whatever that means. We're
5677 * ignoring for now. */
5678 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5679 {
5680 /* register target */
5681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5682 IEM_MC_BEGIN(0, 0);
5683 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5684 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5685 } IEM_MC_ELSE() {
5686 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5687 } IEM_MC_ENDIF();
5688 IEM_MC_ADVANCE_RIP();
5689 IEM_MC_END();
5690 }
5691 else
5692 {
5693 /* memory target */
5694 IEM_MC_BEGIN(0, 1);
5695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5698 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5699 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5700 } IEM_MC_ELSE() {
5701 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5702 } IEM_MC_ENDIF();
5703 IEM_MC_ADVANCE_RIP();
5704 IEM_MC_END();
5705 }
5706 return VINF_SUCCESS;
5707}
5708
5709
5710/** Opcode 0x0f 0x9a. */
5711FNIEMOP_DEF(iemOp_setp_Eb)
5712{
5713 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5714 IEMOP_HLP_MIN_386();
5715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5716
5717 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5718 * any way. AMD says it's "unused", whatever that means. We're
5719 * ignoring for now. */
5720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5721 {
5722 /* register target */
5723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5724 IEM_MC_BEGIN(0, 0);
5725 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5726 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5727 } IEM_MC_ELSE() {
5728 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5729 } IEM_MC_ENDIF();
5730 IEM_MC_ADVANCE_RIP();
5731 IEM_MC_END();
5732 }
5733 else
5734 {
5735 /* memory target */
5736 IEM_MC_BEGIN(0, 1);
5737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5740 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5741 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5742 } IEM_MC_ELSE() {
5743 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5744 } IEM_MC_ENDIF();
5745 IEM_MC_ADVANCE_RIP();
5746 IEM_MC_END();
5747 }
5748 return VINF_SUCCESS;
5749}
5750
5751
5752/** Opcode 0x0f 0x9b. */
5753FNIEMOP_DEF(iemOp_setnp_Eb)
5754{
5755 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5756 IEMOP_HLP_MIN_386();
5757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5758
5759 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5760 * any way. AMD says it's "unused", whatever that means. We're
5761 * ignoring for now. */
5762 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5763 {
5764 /* register target */
5765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5766 IEM_MC_BEGIN(0, 0);
5767 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5768 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5769 } IEM_MC_ELSE() {
5770 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5771 } IEM_MC_ENDIF();
5772 IEM_MC_ADVANCE_RIP();
5773 IEM_MC_END();
5774 }
5775 else
5776 {
5777 /* memory target */
5778 IEM_MC_BEGIN(0, 1);
5779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5782 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5783 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5784 } IEM_MC_ELSE() {
5785 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5786 } IEM_MC_ENDIF();
5787 IEM_MC_ADVANCE_RIP();
5788 IEM_MC_END();
5789 }
5790 return VINF_SUCCESS;
5791}
5792
5793
5794/** Opcode 0x0f 0x9c. */
5795FNIEMOP_DEF(iemOp_setl_Eb)
5796{
5797 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5798 IEMOP_HLP_MIN_386();
5799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5800
5801 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5802 * any way. AMD says it's "unused", whatever that means. We're
5803 * ignoring for now. */
5804 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5805 {
5806 /* register target */
5807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5808 IEM_MC_BEGIN(0, 0);
5809 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5810 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5811 } IEM_MC_ELSE() {
5812 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5813 } IEM_MC_ENDIF();
5814 IEM_MC_ADVANCE_RIP();
5815 IEM_MC_END();
5816 }
5817 else
5818 {
5819 /* memory target */
5820 IEM_MC_BEGIN(0, 1);
5821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5824 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5825 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5826 } IEM_MC_ELSE() {
5827 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5828 } IEM_MC_ENDIF();
5829 IEM_MC_ADVANCE_RIP();
5830 IEM_MC_END();
5831 }
5832 return VINF_SUCCESS;
5833}
5834
5835
5836/** Opcode 0x0f 0x9d. */
5837FNIEMOP_DEF(iemOp_setnl_Eb)
5838{
5839 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5840 IEMOP_HLP_MIN_386();
5841 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5842
5843 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5844 * any way. AMD says it's "unused", whatever that means. We're
5845 * ignoring for now. */
5846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5847 {
5848 /* register target */
5849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5850 IEM_MC_BEGIN(0, 0);
5851 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5852 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5853 } IEM_MC_ELSE() {
5854 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5855 } IEM_MC_ENDIF();
5856 IEM_MC_ADVANCE_RIP();
5857 IEM_MC_END();
5858 }
5859 else
5860 {
5861 /* memory target */
5862 IEM_MC_BEGIN(0, 1);
5863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5866 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5867 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5868 } IEM_MC_ELSE() {
5869 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5870 } IEM_MC_ENDIF();
5871 IEM_MC_ADVANCE_RIP();
5872 IEM_MC_END();
5873 }
5874 return VINF_SUCCESS;
5875}
5876
5877
5878/** Opcode 0x0f 0x9e. */
5879FNIEMOP_DEF(iemOp_setle_Eb)
5880{
5881 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5882 IEMOP_HLP_MIN_386();
5883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5884
5885 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5886 * any way. AMD says it's "unused", whatever that means. We're
5887 * ignoring for now. */
5888 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5889 {
5890 /* register target */
5891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5892 IEM_MC_BEGIN(0, 0);
5893 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5894 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5895 } IEM_MC_ELSE() {
5896 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5897 } IEM_MC_ENDIF();
5898 IEM_MC_ADVANCE_RIP();
5899 IEM_MC_END();
5900 }
5901 else
5902 {
5903 /* memory target */
5904 IEM_MC_BEGIN(0, 1);
5905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5908 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5909 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5910 } IEM_MC_ELSE() {
5911 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5912 } IEM_MC_ENDIF();
5913 IEM_MC_ADVANCE_RIP();
5914 IEM_MC_END();
5915 }
5916 return VINF_SUCCESS;
5917}
5918
5919
5920/** Opcode 0x0f 0x9f. */
5921FNIEMOP_DEF(iemOp_setnle_Eb)
5922{
5923 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5924 IEMOP_HLP_MIN_386();
5925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5926
5927 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5928 * any way. AMD says it's "unused", whatever that means. We're
5929 * ignoring for now. */
5930 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5931 {
5932 /* register target */
5933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5934 IEM_MC_BEGIN(0, 0);
5935 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5936 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5937 } IEM_MC_ELSE() {
5938 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5939 } IEM_MC_ENDIF();
5940 IEM_MC_ADVANCE_RIP();
5941 IEM_MC_END();
5942 }
5943 else
5944 {
5945 /* memory target */
5946 IEM_MC_BEGIN(0, 1);
5947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5950 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5951 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5952 } IEM_MC_ELSE() {
5953 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5954 } IEM_MC_ENDIF();
5955 IEM_MC_ADVANCE_RIP();
5956 IEM_MC_END();
5957 }
5958 return VINF_SUCCESS;
5959}
5960
5961
5962/**
5963 * Common 'push segment-register' helper.
5964 */
5965FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5966{
5967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5968 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5969 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5970
5971 switch (pVCpu->iem.s.enmEffOpSize)
5972 {
5973 case IEMMODE_16BIT:
5974 IEM_MC_BEGIN(0, 1);
5975 IEM_MC_LOCAL(uint16_t, u16Value);
5976 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5977 IEM_MC_PUSH_U16(u16Value);
5978 IEM_MC_ADVANCE_RIP();
5979 IEM_MC_END();
5980 break;
5981
5982 case IEMMODE_32BIT:
5983 IEM_MC_BEGIN(0, 1);
5984 IEM_MC_LOCAL(uint32_t, u32Value);
5985 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5986 IEM_MC_PUSH_U32_SREG(u32Value);
5987 IEM_MC_ADVANCE_RIP();
5988 IEM_MC_END();
5989 break;
5990
5991 case IEMMODE_64BIT:
5992 IEM_MC_BEGIN(0, 1);
5993 IEM_MC_LOCAL(uint64_t, u64Value);
5994 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5995 IEM_MC_PUSH_U64(u64Value);
5996 IEM_MC_ADVANCE_RIP();
5997 IEM_MC_END();
5998 break;
5999 }
6000
6001 return VINF_SUCCESS;
6002}
6003
6004
6005/** Opcode 0x0f 0xa0. */
6006FNIEMOP_DEF(iemOp_push_fs)
6007{
6008 IEMOP_MNEMONIC(push_fs, "push fs");
6009 IEMOP_HLP_MIN_386();
6010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6011 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
6012}
6013
6014
6015/** Opcode 0x0f 0xa1. */
6016FNIEMOP_DEF(iemOp_pop_fs)
6017{
6018 IEMOP_MNEMONIC(pop_fs, "pop fs");
6019 IEMOP_HLP_MIN_386();
6020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6021 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
6022}
6023
6024
6025/** Opcode 0x0f 0xa2. */
6026FNIEMOP_DEF(iemOp_cpuid)
6027{
6028 IEMOP_MNEMONIC(cpuid, "cpuid");
6029 IEMOP_HLP_MIN_486(); /* not all 486es. */
6030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6031 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6032}
6033
6034
6035/**
6036 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6037 * iemOp_bts_Ev_Gv.
6038 */
6039FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6040{
6041 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6042 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6043
6044 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6045 {
6046 /* register destination. */
6047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6048 switch (pVCpu->iem.s.enmEffOpSize)
6049 {
6050 case IEMMODE_16BIT:
6051 IEM_MC_BEGIN(3, 0);
6052 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6053 IEM_MC_ARG(uint16_t, u16Src, 1);
6054 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6055
6056 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6057 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6058 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6059 IEM_MC_REF_EFLAGS(pEFlags);
6060 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6061
6062 IEM_MC_ADVANCE_RIP();
6063 IEM_MC_END();
6064 return VINF_SUCCESS;
6065
6066 case IEMMODE_32BIT:
6067 IEM_MC_BEGIN(3, 0);
6068 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6069 IEM_MC_ARG(uint32_t, u32Src, 1);
6070 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6071
6072 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6073 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6074 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6075 IEM_MC_REF_EFLAGS(pEFlags);
6076 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6077
6078 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6079 IEM_MC_ADVANCE_RIP();
6080 IEM_MC_END();
6081 return VINF_SUCCESS;
6082
6083 case IEMMODE_64BIT:
6084 IEM_MC_BEGIN(3, 0);
6085 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6086 IEM_MC_ARG(uint64_t, u64Src, 1);
6087 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6088
6089 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6090 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6091 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6092 IEM_MC_REF_EFLAGS(pEFlags);
6093 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6094
6095 IEM_MC_ADVANCE_RIP();
6096 IEM_MC_END();
6097 return VINF_SUCCESS;
6098
6099 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6100 }
6101 }
6102 else
6103 {
6104 /* memory destination. */
6105
6106 uint32_t fAccess;
6107 if (pImpl->pfnLockedU16)
6108 fAccess = IEM_ACCESS_DATA_RW;
6109 else /* BT */
6110 fAccess = IEM_ACCESS_DATA_R;
6111
6112 /** @todo test negative bit offsets! */
6113 switch (pVCpu->iem.s.enmEffOpSize)
6114 {
6115 case IEMMODE_16BIT:
6116 IEM_MC_BEGIN(3, 2);
6117 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6118 IEM_MC_ARG(uint16_t, u16Src, 1);
6119 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6121 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6122
6123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6124 if (pImpl->pfnLockedU16)
6125 IEMOP_HLP_DONE_DECODING();
6126 else
6127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6128 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6129 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6130 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6131 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6132 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6133 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6134 IEM_MC_FETCH_EFLAGS(EFlags);
6135
6136 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6137 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6138 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6139 else
6140 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6141 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6142
6143 IEM_MC_COMMIT_EFLAGS(EFlags);
6144 IEM_MC_ADVANCE_RIP();
6145 IEM_MC_END();
6146 return VINF_SUCCESS;
6147
6148 case IEMMODE_32BIT:
6149 IEM_MC_BEGIN(3, 2);
6150 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6151 IEM_MC_ARG(uint32_t, u32Src, 1);
6152 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6154 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6155
6156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6157 if (pImpl->pfnLockedU16)
6158 IEMOP_HLP_DONE_DECODING();
6159 else
6160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6161 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6162 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6163 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6164 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6165 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6166 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6167 IEM_MC_FETCH_EFLAGS(EFlags);
6168
6169 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6170 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6171 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6172 else
6173 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6174 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6175
6176 IEM_MC_COMMIT_EFLAGS(EFlags);
6177 IEM_MC_ADVANCE_RIP();
6178 IEM_MC_END();
6179 return VINF_SUCCESS;
6180
6181 case IEMMODE_64BIT:
6182 IEM_MC_BEGIN(3, 2);
6183 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6184 IEM_MC_ARG(uint64_t, u64Src, 1);
6185 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6187 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6188
6189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6190 if (pImpl->pfnLockedU16)
6191 IEMOP_HLP_DONE_DECODING();
6192 else
6193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6194 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6195 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6196 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6197 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6198 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6199 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6200 IEM_MC_FETCH_EFLAGS(EFlags);
6201
6202 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6203 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6204 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6205 else
6206 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6207 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6208
6209 IEM_MC_COMMIT_EFLAGS(EFlags);
6210 IEM_MC_ADVANCE_RIP();
6211 IEM_MC_END();
6212 return VINF_SUCCESS;
6213
6214 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6215 }
6216 }
6217}
6218
6219
6220/** Opcode 0x0f 0xa3. */
6221FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6222{
6223 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6224 IEMOP_HLP_MIN_386();
6225 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6226}
6227
6228
6229/**
6230 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6231 */
6232FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6233{
6234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6235 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6236
6237 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6238 {
6239 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6241
6242 switch (pVCpu->iem.s.enmEffOpSize)
6243 {
6244 case IEMMODE_16BIT:
6245 IEM_MC_BEGIN(4, 0);
6246 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6247 IEM_MC_ARG(uint16_t, u16Src, 1);
6248 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6249 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6250
6251 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6252 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6253 IEM_MC_REF_EFLAGS(pEFlags);
6254 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6255
6256 IEM_MC_ADVANCE_RIP();
6257 IEM_MC_END();
6258 return VINF_SUCCESS;
6259
6260 case IEMMODE_32BIT:
6261 IEM_MC_BEGIN(4, 0);
6262 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6263 IEM_MC_ARG(uint32_t, u32Src, 1);
6264 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6265 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6266
6267 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6268 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6269 IEM_MC_REF_EFLAGS(pEFlags);
6270 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6271
6272 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6273 IEM_MC_ADVANCE_RIP();
6274 IEM_MC_END();
6275 return VINF_SUCCESS;
6276
6277 case IEMMODE_64BIT:
6278 IEM_MC_BEGIN(4, 0);
6279 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6280 IEM_MC_ARG(uint64_t, u64Src, 1);
6281 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6282 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6283
6284 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6285 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6286 IEM_MC_REF_EFLAGS(pEFlags);
6287 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6288
6289 IEM_MC_ADVANCE_RIP();
6290 IEM_MC_END();
6291 return VINF_SUCCESS;
6292
6293 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6294 }
6295 }
6296 else
6297 {
6298 switch (pVCpu->iem.s.enmEffOpSize)
6299 {
6300 case IEMMODE_16BIT:
6301 IEM_MC_BEGIN(4, 2);
6302 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6303 IEM_MC_ARG(uint16_t, u16Src, 1);
6304 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6305 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6307
6308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6309 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6310 IEM_MC_ASSIGN(cShiftArg, cShift);
6311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6312 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6313 IEM_MC_FETCH_EFLAGS(EFlags);
6314 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6315 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6316
6317 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6318 IEM_MC_COMMIT_EFLAGS(EFlags);
6319 IEM_MC_ADVANCE_RIP();
6320 IEM_MC_END();
6321 return VINF_SUCCESS;
6322
6323 case IEMMODE_32BIT:
6324 IEM_MC_BEGIN(4, 2);
6325 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6326 IEM_MC_ARG(uint32_t, u32Src, 1);
6327 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6328 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6330
6331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6332 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6333 IEM_MC_ASSIGN(cShiftArg, cShift);
6334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6335 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6336 IEM_MC_FETCH_EFLAGS(EFlags);
6337 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6338 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6339
6340 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6341 IEM_MC_COMMIT_EFLAGS(EFlags);
6342 IEM_MC_ADVANCE_RIP();
6343 IEM_MC_END();
6344 return VINF_SUCCESS;
6345
6346 case IEMMODE_64BIT:
6347 IEM_MC_BEGIN(4, 2);
6348 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6349 IEM_MC_ARG(uint64_t, u64Src, 1);
6350 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6351 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6353
6354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6355 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6356 IEM_MC_ASSIGN(cShiftArg, cShift);
6357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6358 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6359 IEM_MC_FETCH_EFLAGS(EFlags);
6360 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6361 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6362
6363 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6364 IEM_MC_COMMIT_EFLAGS(EFlags);
6365 IEM_MC_ADVANCE_RIP();
6366 IEM_MC_END();
6367 return VINF_SUCCESS;
6368
6369 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6370 }
6371 }
6372}
6373
6374
6375/**
6376 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6377 */
6378FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6379{
6380 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6381 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6382
6383 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6384 {
6385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6386
6387 switch (pVCpu->iem.s.enmEffOpSize)
6388 {
6389 case IEMMODE_16BIT:
6390 IEM_MC_BEGIN(4, 0);
6391 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6392 IEM_MC_ARG(uint16_t, u16Src, 1);
6393 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6394 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6395
6396 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6397 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6398 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6399 IEM_MC_REF_EFLAGS(pEFlags);
6400 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6401
6402 IEM_MC_ADVANCE_RIP();
6403 IEM_MC_END();
6404 return VINF_SUCCESS;
6405
6406 case IEMMODE_32BIT:
6407 IEM_MC_BEGIN(4, 0);
6408 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6409 IEM_MC_ARG(uint32_t, u32Src, 1);
6410 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6411 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6412
6413 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6414 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6415 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6416 IEM_MC_REF_EFLAGS(pEFlags);
6417 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6418
6419 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6420 IEM_MC_ADVANCE_RIP();
6421 IEM_MC_END();
6422 return VINF_SUCCESS;
6423
6424 case IEMMODE_64BIT:
6425 IEM_MC_BEGIN(4, 0);
6426 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6427 IEM_MC_ARG(uint64_t, u64Src, 1);
6428 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6429 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6430
6431 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6432 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6433 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6434 IEM_MC_REF_EFLAGS(pEFlags);
6435 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6436
6437 IEM_MC_ADVANCE_RIP();
6438 IEM_MC_END();
6439 return VINF_SUCCESS;
6440
6441 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6442 }
6443 }
6444 else
6445 {
6446 switch (pVCpu->iem.s.enmEffOpSize)
6447 {
6448 case IEMMODE_16BIT:
6449 IEM_MC_BEGIN(4, 2);
6450 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6451 IEM_MC_ARG(uint16_t, u16Src, 1);
6452 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6453 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6455
6456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6458 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6459 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6460 IEM_MC_FETCH_EFLAGS(EFlags);
6461 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6462 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6463
6464 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6465 IEM_MC_COMMIT_EFLAGS(EFlags);
6466 IEM_MC_ADVANCE_RIP();
6467 IEM_MC_END();
6468 return VINF_SUCCESS;
6469
6470 case IEMMODE_32BIT:
6471 IEM_MC_BEGIN(4, 2);
6472 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6473 IEM_MC_ARG(uint32_t, u32Src, 1);
6474 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6475 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6477
6478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6480 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6481 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6482 IEM_MC_FETCH_EFLAGS(EFlags);
6483 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6484 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6485
6486 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6487 IEM_MC_COMMIT_EFLAGS(EFlags);
6488 IEM_MC_ADVANCE_RIP();
6489 IEM_MC_END();
6490 return VINF_SUCCESS;
6491
6492 case IEMMODE_64BIT:
6493 IEM_MC_BEGIN(4, 2);
6494 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6495 IEM_MC_ARG(uint64_t, u64Src, 1);
6496 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6497 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6499
6500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6502 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6503 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6504 IEM_MC_FETCH_EFLAGS(EFlags);
6505 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6506 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6507
6508 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6509 IEM_MC_COMMIT_EFLAGS(EFlags);
6510 IEM_MC_ADVANCE_RIP();
6511 IEM_MC_END();
6512 return VINF_SUCCESS;
6513
6514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6515 }
6516 }
6517}
6518
6519
6520
6521/** Opcode 0x0f 0xa4. */
6522FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6523{
6524 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6525 IEMOP_HLP_MIN_386();
6526 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6527}
6528
6529
6530/** Opcode 0x0f 0xa5. */
6531FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6532{
6533 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6534 IEMOP_HLP_MIN_386();
6535 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6536}
6537
6538
6539/** Opcode 0x0f 0xa8. */
6540FNIEMOP_DEF(iemOp_push_gs)
6541{
6542 IEMOP_MNEMONIC(push_gs, "push gs");
6543 IEMOP_HLP_MIN_386();
6544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6545 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6546}
6547
6548
6549/** Opcode 0x0f 0xa9. */
6550FNIEMOP_DEF(iemOp_pop_gs)
6551{
6552 IEMOP_MNEMONIC(pop_gs, "pop gs");
6553 IEMOP_HLP_MIN_386();
6554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6555 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6556}
6557
6558
6559/** Opcode 0x0f 0xaa. */
6560FNIEMOP_DEF(iemOp_rsm)
6561{
6562 IEMOP_MNEMONIC(rsm, "rsm");
6563 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
6564 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
6565 * intercept). */
6566 IEMOP_BITCH_ABOUT_STUB();
6567 return IEMOP_RAISE_INVALID_OPCODE();
6568}
6569
6570//IEMOP_HLP_MIN_386();
6571
6572
6573/** Opcode 0x0f 0xab. */
6574FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6575{
6576 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6577 IEMOP_HLP_MIN_386();
6578 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6579}
6580
6581
6582/** Opcode 0x0f 0xac. */
6583FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6584{
6585 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6586 IEMOP_HLP_MIN_386();
6587 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6588}
6589
6590
6591/** Opcode 0x0f 0xad. */
6592FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6593{
6594 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6595 IEMOP_HLP_MIN_386();
6596 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6597}
6598
6599
6600/** Opcode 0x0f 0xae mem/0. */
6601FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6602{
6603 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6604 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6605 return IEMOP_RAISE_INVALID_OPCODE();
6606
6607 IEM_MC_BEGIN(3, 1);
6608 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6609 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6610 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6613 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6614 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6615 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6616 IEM_MC_END();
6617 return VINF_SUCCESS;
6618}
6619
6620
6621/** Opcode 0x0f 0xae mem/1. */
6622FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6623{
6624 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6625 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6626 return IEMOP_RAISE_INVALID_OPCODE();
6627
6628 IEM_MC_BEGIN(3, 1);
6629 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6630 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6631 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6634 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6635 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6636 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6637 IEM_MC_END();
6638 return VINF_SUCCESS;
6639}
6640
6641
6642/**
6643 * @opmaps grp15
6644 * @opcode !11/2
6645 * @oppfx none
6646 * @opcpuid sse
6647 * @opgroup og_sse_mxcsrsm
6648 * @opxcpttype 5
6649 * @optest op1=0 -> mxcsr=0
6650 * @optest op1=0x2083 -> mxcsr=0x2083
6651 * @optest op1=0xfffffffe -> value.xcpt=0xd
6652 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6653 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6654 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6655 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6656 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6657 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6658 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6659 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6660 */
6661FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6662{
6663 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6664 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6665 return IEMOP_RAISE_INVALID_OPCODE();
6666
6667 IEM_MC_BEGIN(2, 0);
6668 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6669 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6672 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6673 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6674 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6675 IEM_MC_END();
6676 return VINF_SUCCESS;
6677}
6678
6679
6680/**
6681 * @opmaps grp15
6682 * @opcode !11/3
6683 * @oppfx none
6684 * @opcpuid sse
6685 * @opgroup og_sse_mxcsrsm
6686 * @opxcpttype 5
6687 * @optest mxcsr=0 -> op1=0
6688 * @optest mxcsr=0x2083 -> op1=0x2083
6689 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6690 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6691 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6692 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6693 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6694 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6695 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6696 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6697 */
6698FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6699{
6700 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6701 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6702 return IEMOP_RAISE_INVALID_OPCODE();
6703
6704 IEM_MC_BEGIN(2, 0);
6705 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6706 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6709 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6710 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6711 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6712 IEM_MC_END();
6713 return VINF_SUCCESS;
6714}
6715
6716
6717/**
6718 * @opmaps grp15
6719 * @opcode !11/4
6720 * @oppfx none
6721 * @opcpuid xsave
6722 * @opgroup og_system
6723 * @opxcpttype none
6724 */
6725FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6726{
6727 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6728 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6729 return IEMOP_RAISE_INVALID_OPCODE();
6730
6731 IEM_MC_BEGIN(3, 0);
6732 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6733 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6734 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6737 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6738 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6739 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6740 IEM_MC_END();
6741 return VINF_SUCCESS;
6742}
6743
6744
6745/**
6746 * @opmaps grp15
6747 * @opcode !11/5
6748 * @oppfx none
6749 * @opcpuid xsave
6750 * @opgroup og_system
6751 * @opxcpttype none
6752 */
6753FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6754{
6755 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6756 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6757 return IEMOP_RAISE_INVALID_OPCODE();
6758
6759 IEM_MC_BEGIN(3, 0);
6760 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6761 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6762 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6765 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6766 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6767 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6768 IEM_MC_END();
6769 return VINF_SUCCESS;
6770}
6771
6772/** Opcode 0x0f 0xae mem/6. */
6773FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6774
6775/**
6776 * @opmaps grp15
6777 * @opcode !11/7
6778 * @oppfx none
6779 * @opcpuid clfsh
6780 * @opgroup og_cachectl
6781 * @optest op1=1 ->
6782 */
6783FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6784{
6785 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6786 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6787 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6788
6789 IEM_MC_BEGIN(2, 0);
6790 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6791 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6794 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6795 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6796 IEM_MC_END();
6797 return VINF_SUCCESS;
6798}
6799
6800/**
6801 * @opmaps grp15
6802 * @opcode !11/7
6803 * @oppfx 0x66
6804 * @opcpuid clflushopt
6805 * @opgroup og_cachectl
6806 * @optest op1=1 ->
6807 */
6808FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6809{
6810 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6811 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6812 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6813
6814 IEM_MC_BEGIN(2, 0);
6815 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6816 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6819 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6820 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6821 IEM_MC_END();
6822 return VINF_SUCCESS;
6823}
6824
6825
6826/** Opcode 0x0f 0xae 11b/5. */
6827FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6828{
6829 RT_NOREF_PV(bRm);
6830 IEMOP_MNEMONIC(lfence, "lfence");
6831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6832 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6833 return IEMOP_RAISE_INVALID_OPCODE();
6834
6835 IEM_MC_BEGIN(0, 0);
6836 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6837 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6838 else
6839 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6840 IEM_MC_ADVANCE_RIP();
6841 IEM_MC_END();
6842 return VINF_SUCCESS;
6843}
6844
6845
6846/** Opcode 0x0f 0xae 11b/6. */
6847FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6848{
6849 RT_NOREF_PV(bRm);
6850 IEMOP_MNEMONIC(mfence, "mfence");
6851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6852 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6853 return IEMOP_RAISE_INVALID_OPCODE();
6854
6855 IEM_MC_BEGIN(0, 0);
6856 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6857 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6858 else
6859 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6860 IEM_MC_ADVANCE_RIP();
6861 IEM_MC_END();
6862 return VINF_SUCCESS;
6863}
6864
6865
6866/** Opcode 0x0f 0xae 11b/7. */
6867FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6868{
6869 RT_NOREF_PV(bRm);
6870 IEMOP_MNEMONIC(sfence, "sfence");
6871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6872 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6873 return IEMOP_RAISE_INVALID_OPCODE();
6874
6875 IEM_MC_BEGIN(0, 0);
6876 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6877 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6878 else
6879 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6880 IEM_MC_ADVANCE_RIP();
6881 IEM_MC_END();
6882 return VINF_SUCCESS;
6883}
6884
6885
6886/** Opcode 0xf3 0x0f 0xae 11b/0. */
6887FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
6888{
6889 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
6890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6891 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6892 {
6893 IEM_MC_BEGIN(1, 0);
6894 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6895 IEM_MC_ARG(uint64_t, u64Dst, 0);
6896 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
6897 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
6898 IEM_MC_ADVANCE_RIP();
6899 IEM_MC_END();
6900 }
6901 else
6902 {
6903 IEM_MC_BEGIN(1, 0);
6904 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6905 IEM_MC_ARG(uint32_t, u32Dst, 0);
6906 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
6907 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
6908 IEM_MC_ADVANCE_RIP();
6909 IEM_MC_END();
6910 }
6911 return VINF_SUCCESS;
6912}
6913
6914/** Opcode 0xf3 0x0f 0xae 11b/1. */
6915FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
6916{
6917 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
6918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6919 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6920 {
6921 IEM_MC_BEGIN(1, 0);
6922 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6923 IEM_MC_ARG(uint64_t, u64Dst, 0);
6924 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
6925 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
6926 IEM_MC_ADVANCE_RIP();
6927 IEM_MC_END();
6928 }
6929 else
6930 {
6931 IEM_MC_BEGIN(1, 0);
6932 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6933 IEM_MC_ARG(uint32_t, u32Dst, 0);
6934 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
6935 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
6936 IEM_MC_ADVANCE_RIP();
6937 IEM_MC_END();
6938 }
6939 return VINF_SUCCESS;
6940}
6941
6942/** Opcode 0xf3 0x0f 0xae 11b/2. */
6943FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
6944{
6945 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
6946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6947 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6948 {
6949 IEM_MC_BEGIN(1, 0);
6950 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6951 IEM_MC_ARG(uint64_t, u64Dst, 0);
6952 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6953 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
6954 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
6955 IEM_MC_ADVANCE_RIP();
6956 IEM_MC_END();
6957 }
6958 else
6959 {
6960 IEM_MC_BEGIN(1, 0);
6961 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6962 IEM_MC_ARG(uint32_t, u32Dst, 0);
6963 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6964 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
6965 IEM_MC_ADVANCE_RIP();
6966 IEM_MC_END();
6967 }
6968 return VINF_SUCCESS;
6969}
6970
6971/** Opcode 0xf3 0x0f 0xae 11b/3. */
6972FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
6973{
6974 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
6975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6976 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6977 {
6978 IEM_MC_BEGIN(1, 0);
6979 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6980 IEM_MC_ARG(uint64_t, u64Dst, 0);
6981 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6982 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
6983 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
6984 IEM_MC_ADVANCE_RIP();
6985 IEM_MC_END();
6986 }
6987 else
6988 {
6989 IEM_MC_BEGIN(1, 0);
6990 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6991 IEM_MC_ARG(uint32_t, u32Dst, 0);
6992 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6993 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
6994 IEM_MC_ADVANCE_RIP();
6995 IEM_MC_END();
6996 }
6997 return VINF_SUCCESS;
6998}
6999
7000
7001/**
7002 * Group 15 jump table for register variant.
7003 */
7004IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
7005{ /* pfx: none, 066h, 0f3h, 0f2h */
7006 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
7007 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
7008 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
7009 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
7010 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7011 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7012 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7013 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7014};
7015AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
7016
7017
7018/**
7019 * Group 15 jump table for memory variant.
7020 */
7021IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
7022{ /* pfx: none, 066h, 0f3h, 0f2h */
7023 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7024 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7025 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7026 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7027 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7028 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7029 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7030 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7031};
7032AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
7033
7034
7035/** Opcode 0x0f 0xae. */
7036FNIEMOP_DEF(iemOp_Grp15)
7037{
7038 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
7039 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7040 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7041 /* register, register */
7042 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7043 + pVCpu->iem.s.idxPrefix], bRm);
7044 /* memory, register */
7045 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7046 + pVCpu->iem.s.idxPrefix], bRm);
7047}
7048
7049
7050/** Opcode 0x0f 0xaf. */
7051FNIEMOP_DEF(iemOp_imul_Gv_Ev)
7052{
7053 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
7054 IEMOP_HLP_MIN_386();
7055 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7056 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
7057}
7058
7059
7060/** Opcode 0x0f 0xb0. */
7061FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
7062{
7063 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
7064 IEMOP_HLP_MIN_486();
7065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7066
7067 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7068 {
7069 IEMOP_HLP_DONE_DECODING();
7070 IEM_MC_BEGIN(4, 0);
7071 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7072 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7073 IEM_MC_ARG(uint8_t, u8Src, 2);
7074 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7075
7076 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7077 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7078 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
7079 IEM_MC_REF_EFLAGS(pEFlags);
7080 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7081 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7082 else
7083 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7084
7085 IEM_MC_ADVANCE_RIP();
7086 IEM_MC_END();
7087 }
7088 else
7089 {
7090 IEM_MC_BEGIN(4, 3);
7091 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7092 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7093 IEM_MC_ARG(uint8_t, u8Src, 2);
7094 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7096 IEM_MC_LOCAL(uint8_t, u8Al);
7097
7098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7099 IEMOP_HLP_DONE_DECODING();
7100 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7101 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7102 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
7103 IEM_MC_FETCH_EFLAGS(EFlags);
7104 IEM_MC_REF_LOCAL(pu8Al, u8Al);
7105 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7106 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7107 else
7108 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7109
7110 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7111 IEM_MC_COMMIT_EFLAGS(EFlags);
7112 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
7113 IEM_MC_ADVANCE_RIP();
7114 IEM_MC_END();
7115 }
7116 return VINF_SUCCESS;
7117}
7118
7119/** Opcode 0x0f 0xb1. */
7120FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
7121{
7122 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
7123 IEMOP_HLP_MIN_486();
7124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7125
7126 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7127 {
7128 IEMOP_HLP_DONE_DECODING();
7129 switch (pVCpu->iem.s.enmEffOpSize)
7130 {
7131 case IEMMODE_16BIT:
7132 IEM_MC_BEGIN(4, 0);
7133 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7134 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7135 IEM_MC_ARG(uint16_t, u16Src, 2);
7136 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7137
7138 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7139 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7140 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
7141 IEM_MC_REF_EFLAGS(pEFlags);
7142 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7143 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7144 else
7145 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7146
7147 IEM_MC_ADVANCE_RIP();
7148 IEM_MC_END();
7149 return VINF_SUCCESS;
7150
7151 case IEMMODE_32BIT:
7152 IEM_MC_BEGIN(4, 0);
7153 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7154 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7155 IEM_MC_ARG(uint32_t, u32Src, 2);
7156 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7157
7158 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7159 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7160 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7161 IEM_MC_REF_EFLAGS(pEFlags);
7162 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7163 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7164 else
7165 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7166
7167 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7168 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7169 IEM_MC_ADVANCE_RIP();
7170 IEM_MC_END();
7171 return VINF_SUCCESS;
7172
7173 case IEMMODE_64BIT:
7174 IEM_MC_BEGIN(4, 0);
7175 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7176 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7177#ifdef RT_ARCH_X86
7178 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7179#else
7180 IEM_MC_ARG(uint64_t, u64Src, 2);
7181#endif
7182 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7183
7184 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7185 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7186 IEM_MC_REF_EFLAGS(pEFlags);
7187#ifdef RT_ARCH_X86
7188 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7189 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7190 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7191 else
7192 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7193#else
7194 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7195 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7196 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7197 else
7198 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7199#endif
7200
7201 IEM_MC_ADVANCE_RIP();
7202 IEM_MC_END();
7203 return VINF_SUCCESS;
7204
7205 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7206 }
7207 }
7208 else
7209 {
7210 switch (pVCpu->iem.s.enmEffOpSize)
7211 {
7212 case IEMMODE_16BIT:
7213 IEM_MC_BEGIN(4, 3);
7214 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7215 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7216 IEM_MC_ARG(uint16_t, u16Src, 2);
7217 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7218 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7219 IEM_MC_LOCAL(uint16_t, u16Ax);
7220
7221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7222 IEMOP_HLP_DONE_DECODING();
7223 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7224 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7225 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7226 IEM_MC_FETCH_EFLAGS(EFlags);
7227 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7228 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7229 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7230 else
7231 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7232
7233 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7234 IEM_MC_COMMIT_EFLAGS(EFlags);
7235 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7236 IEM_MC_ADVANCE_RIP();
7237 IEM_MC_END();
7238 return VINF_SUCCESS;
7239
7240 case IEMMODE_32BIT:
7241 IEM_MC_BEGIN(4, 3);
7242 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7243 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7244 IEM_MC_ARG(uint32_t, u32Src, 2);
7245 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7247 IEM_MC_LOCAL(uint32_t, u32Eax);
7248
7249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7250 IEMOP_HLP_DONE_DECODING();
7251 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7252 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7253 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7254 IEM_MC_FETCH_EFLAGS(EFlags);
7255 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7256 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7257 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7258 else
7259 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7260
7261 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7262 IEM_MC_COMMIT_EFLAGS(EFlags);
7263 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7264 IEM_MC_ADVANCE_RIP();
7265 IEM_MC_END();
7266 return VINF_SUCCESS;
7267
7268 case IEMMODE_64BIT:
7269 IEM_MC_BEGIN(4, 3);
7270 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7271 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7272#ifdef RT_ARCH_X86
7273 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7274#else
7275 IEM_MC_ARG(uint64_t, u64Src, 2);
7276#endif
7277 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7279 IEM_MC_LOCAL(uint64_t, u64Rax);
7280
7281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7282 IEMOP_HLP_DONE_DECODING();
7283 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7284 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7285 IEM_MC_FETCH_EFLAGS(EFlags);
7286 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7287#ifdef RT_ARCH_X86
7288 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7289 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7290 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7291 else
7292 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7293#else
7294 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7295 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7296 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7297 else
7298 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7299#endif
7300
7301 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7302 IEM_MC_COMMIT_EFLAGS(EFlags);
7303 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7304 IEM_MC_ADVANCE_RIP();
7305 IEM_MC_END();
7306 return VINF_SUCCESS;
7307
7308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7309 }
7310 }
7311}
7312
7313
7314FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7315{
7316 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7317 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7318
7319 switch (pVCpu->iem.s.enmEffOpSize)
7320 {
7321 case IEMMODE_16BIT:
7322 IEM_MC_BEGIN(5, 1);
7323 IEM_MC_ARG(uint16_t, uSel, 0);
7324 IEM_MC_ARG(uint16_t, offSeg, 1);
7325 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7326 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7327 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7328 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7331 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7332 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7333 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7334 IEM_MC_END();
7335 return VINF_SUCCESS;
7336
7337 case IEMMODE_32BIT:
7338 IEM_MC_BEGIN(5, 1);
7339 IEM_MC_ARG(uint16_t, uSel, 0);
7340 IEM_MC_ARG(uint32_t, offSeg, 1);
7341 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7342 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7343 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7344 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7347 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7348 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7349 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7350 IEM_MC_END();
7351 return VINF_SUCCESS;
7352
7353 case IEMMODE_64BIT:
7354 IEM_MC_BEGIN(5, 1);
7355 IEM_MC_ARG(uint16_t, uSel, 0);
7356 IEM_MC_ARG(uint64_t, offSeg, 1);
7357 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7358 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7359 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7360 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7363 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7364 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7365 else
7366 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7367 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7368 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7369 IEM_MC_END();
7370 return VINF_SUCCESS;
7371
7372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7373 }
7374}
7375
7376
7377/** Opcode 0x0f 0xb2. */
7378FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7379{
7380 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7381 IEMOP_HLP_MIN_386();
7382 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7383 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7384 return IEMOP_RAISE_INVALID_OPCODE();
7385 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7386}
7387
7388
7389/** Opcode 0x0f 0xb3. */
7390FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7391{
7392 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7393 IEMOP_HLP_MIN_386();
7394 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7395}
7396
7397
7398/** Opcode 0x0f 0xb4. */
7399FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7400{
7401 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7402 IEMOP_HLP_MIN_386();
7403 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7404 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7405 return IEMOP_RAISE_INVALID_OPCODE();
7406 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7407}
7408
7409
7410/** Opcode 0x0f 0xb5. */
7411FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7412{
7413 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7414 IEMOP_HLP_MIN_386();
7415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7416 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7417 return IEMOP_RAISE_INVALID_OPCODE();
7418 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7419}
7420
7421
7422/** Opcode 0x0f 0xb6. */
7423FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7424{
7425 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7426 IEMOP_HLP_MIN_386();
7427
7428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7429
7430 /*
7431 * If rm is denoting a register, no more instruction bytes.
7432 */
7433 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7434 {
7435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7436 switch (pVCpu->iem.s.enmEffOpSize)
7437 {
7438 case IEMMODE_16BIT:
7439 IEM_MC_BEGIN(0, 1);
7440 IEM_MC_LOCAL(uint16_t, u16Value);
7441 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7442 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7443 IEM_MC_ADVANCE_RIP();
7444 IEM_MC_END();
7445 return VINF_SUCCESS;
7446
7447 case IEMMODE_32BIT:
7448 IEM_MC_BEGIN(0, 1);
7449 IEM_MC_LOCAL(uint32_t, u32Value);
7450 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7451 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7452 IEM_MC_ADVANCE_RIP();
7453 IEM_MC_END();
7454 return VINF_SUCCESS;
7455
7456 case IEMMODE_64BIT:
7457 IEM_MC_BEGIN(0, 1);
7458 IEM_MC_LOCAL(uint64_t, u64Value);
7459 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7460 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7461 IEM_MC_ADVANCE_RIP();
7462 IEM_MC_END();
7463 return VINF_SUCCESS;
7464
7465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7466 }
7467 }
7468 else
7469 {
7470 /*
7471 * We're loading a register from memory.
7472 */
7473 switch (pVCpu->iem.s.enmEffOpSize)
7474 {
7475 case IEMMODE_16BIT:
7476 IEM_MC_BEGIN(0, 2);
7477 IEM_MC_LOCAL(uint16_t, u16Value);
7478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7481 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7482 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7483 IEM_MC_ADVANCE_RIP();
7484 IEM_MC_END();
7485 return VINF_SUCCESS;
7486
7487 case IEMMODE_32BIT:
7488 IEM_MC_BEGIN(0, 2);
7489 IEM_MC_LOCAL(uint32_t, u32Value);
7490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7493 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7494 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7495 IEM_MC_ADVANCE_RIP();
7496 IEM_MC_END();
7497 return VINF_SUCCESS;
7498
7499 case IEMMODE_64BIT:
7500 IEM_MC_BEGIN(0, 2);
7501 IEM_MC_LOCAL(uint64_t, u64Value);
7502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7505 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7506 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7507 IEM_MC_ADVANCE_RIP();
7508 IEM_MC_END();
7509 return VINF_SUCCESS;
7510
7511 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7512 }
7513 }
7514}
7515
7516
7517/** Opcode 0x0f 0xb7. */
7518FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7519{
7520 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7521 IEMOP_HLP_MIN_386();
7522
7523 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7524
7525 /** @todo Not entirely sure how the operand size prefix is handled here,
7526 * assuming that it will be ignored. Would be nice to have a few
7527 * test for this. */
7528 /*
7529 * If rm is denoting a register, no more instruction bytes.
7530 */
7531 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7532 {
7533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7534 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7535 {
7536 IEM_MC_BEGIN(0, 1);
7537 IEM_MC_LOCAL(uint32_t, u32Value);
7538 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7539 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7540 IEM_MC_ADVANCE_RIP();
7541 IEM_MC_END();
7542 }
7543 else
7544 {
7545 IEM_MC_BEGIN(0, 1);
7546 IEM_MC_LOCAL(uint64_t, u64Value);
7547 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7548 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7549 IEM_MC_ADVANCE_RIP();
7550 IEM_MC_END();
7551 }
7552 }
7553 else
7554 {
7555 /*
7556 * We're loading a register from memory.
7557 */
7558 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7559 {
7560 IEM_MC_BEGIN(0, 2);
7561 IEM_MC_LOCAL(uint32_t, u32Value);
7562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7565 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7566 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7567 IEM_MC_ADVANCE_RIP();
7568 IEM_MC_END();
7569 }
7570 else
7571 {
7572 IEM_MC_BEGIN(0, 2);
7573 IEM_MC_LOCAL(uint64_t, u64Value);
7574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7577 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7578 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7579 IEM_MC_ADVANCE_RIP();
7580 IEM_MC_END();
7581 }
7582 }
7583 return VINF_SUCCESS;
7584}
7585
7586
7587/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7588FNIEMOP_UD_STUB(iemOp_jmpe);
7589/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7590FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7591
7592
7593/**
7594 * @opcode 0xb9
7595 * @opinvalid intel-modrm
7596 * @optest ->
7597 */
7598FNIEMOP_DEF(iemOp_Grp10)
7599{
7600 /*
7601 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7602 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7603 */
7604 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7605 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7606 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7607}
7608
7609
7610/** Opcode 0x0f 0xba. */
7611FNIEMOP_DEF(iemOp_Grp8)
7612{
7613 IEMOP_HLP_MIN_386();
7614 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7615 PCIEMOPBINSIZES pImpl;
7616 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7617 {
7618 case 0: case 1: case 2: case 3:
7619 /* Both AMD and Intel want full modr/m decoding and imm8. */
7620 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7621 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7622 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7623 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7624 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7626 }
7627 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7628
7629 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7630 {
7631 /* register destination. */
7632 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7634
7635 switch (pVCpu->iem.s.enmEffOpSize)
7636 {
7637 case IEMMODE_16BIT:
7638 IEM_MC_BEGIN(3, 0);
7639 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7640 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7641 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7642
7643 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7644 IEM_MC_REF_EFLAGS(pEFlags);
7645 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7646
7647 IEM_MC_ADVANCE_RIP();
7648 IEM_MC_END();
7649 return VINF_SUCCESS;
7650
7651 case IEMMODE_32BIT:
7652 IEM_MC_BEGIN(3, 0);
7653 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7654 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7655 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7656
7657 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7658 IEM_MC_REF_EFLAGS(pEFlags);
7659 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7660
7661 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7662 IEM_MC_ADVANCE_RIP();
7663 IEM_MC_END();
7664 return VINF_SUCCESS;
7665
7666 case IEMMODE_64BIT:
7667 IEM_MC_BEGIN(3, 0);
7668 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7669 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7670 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7671
7672 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7673 IEM_MC_REF_EFLAGS(pEFlags);
7674 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7675
7676 IEM_MC_ADVANCE_RIP();
7677 IEM_MC_END();
7678 return VINF_SUCCESS;
7679
7680 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7681 }
7682 }
7683 else
7684 {
7685 /* memory destination. */
7686
7687 uint32_t fAccess;
7688 if (pImpl->pfnLockedU16)
7689 fAccess = IEM_ACCESS_DATA_RW;
7690 else /* BT */
7691 fAccess = IEM_ACCESS_DATA_R;
7692
7693 /** @todo test negative bit offsets! */
7694 switch (pVCpu->iem.s.enmEffOpSize)
7695 {
7696 case IEMMODE_16BIT:
7697 IEM_MC_BEGIN(3, 1);
7698 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7699 IEM_MC_ARG(uint16_t, u16Src, 1);
7700 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7702
7703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7704 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7705 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7706 if (pImpl->pfnLockedU16)
7707 IEMOP_HLP_DONE_DECODING();
7708 else
7709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7710 IEM_MC_FETCH_EFLAGS(EFlags);
7711 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7712 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7713 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7714 else
7715 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7716 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7717
7718 IEM_MC_COMMIT_EFLAGS(EFlags);
7719 IEM_MC_ADVANCE_RIP();
7720 IEM_MC_END();
7721 return VINF_SUCCESS;
7722
7723 case IEMMODE_32BIT:
7724 IEM_MC_BEGIN(3, 1);
7725 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7726 IEM_MC_ARG(uint32_t, u32Src, 1);
7727 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7729
7730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7731 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7732 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7733 if (pImpl->pfnLockedU16)
7734 IEMOP_HLP_DONE_DECODING();
7735 else
7736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7737 IEM_MC_FETCH_EFLAGS(EFlags);
7738 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7739 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7740 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7741 else
7742 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7743 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7744
7745 IEM_MC_COMMIT_EFLAGS(EFlags);
7746 IEM_MC_ADVANCE_RIP();
7747 IEM_MC_END();
7748 return VINF_SUCCESS;
7749
7750 case IEMMODE_64BIT:
7751 IEM_MC_BEGIN(3, 1);
7752 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7753 IEM_MC_ARG(uint64_t, u64Src, 1);
7754 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7756
7757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7758 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7759 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7760 if (pImpl->pfnLockedU16)
7761 IEMOP_HLP_DONE_DECODING();
7762 else
7763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7764 IEM_MC_FETCH_EFLAGS(EFlags);
7765 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7766 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7767 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7768 else
7769 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7770 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7771
7772 IEM_MC_COMMIT_EFLAGS(EFlags);
7773 IEM_MC_ADVANCE_RIP();
7774 IEM_MC_END();
7775 return VINF_SUCCESS;
7776
7777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7778 }
7779 }
7780}
7781
7782
7783/** Opcode 0x0f 0xbb. */
7784FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7785{
7786 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7787 IEMOP_HLP_MIN_386();
7788 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7789}
7790
7791
7792/** Opcode 0x0f 0xbc. */
7793FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7794{
7795 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7796 IEMOP_HLP_MIN_386();
7797 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7798 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7799}
7800
7801
7802/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7803FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7804
7805
7806/** Opcode 0x0f 0xbd. */
7807FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7808{
7809 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7810 IEMOP_HLP_MIN_386();
7811 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7812 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7813}
7814
7815
7816/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7817FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7818
7819
7820/** Opcode 0x0f 0xbe. */
7821FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7822{
7823 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7824 IEMOP_HLP_MIN_386();
7825
7826 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7827
7828 /*
7829 * If rm is denoting a register, no more instruction bytes.
7830 */
7831 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7832 {
7833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7834 switch (pVCpu->iem.s.enmEffOpSize)
7835 {
7836 case IEMMODE_16BIT:
7837 IEM_MC_BEGIN(0, 1);
7838 IEM_MC_LOCAL(uint16_t, u16Value);
7839 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7840 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7841 IEM_MC_ADVANCE_RIP();
7842 IEM_MC_END();
7843 return VINF_SUCCESS;
7844
7845 case IEMMODE_32BIT:
7846 IEM_MC_BEGIN(0, 1);
7847 IEM_MC_LOCAL(uint32_t, u32Value);
7848 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7849 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7850 IEM_MC_ADVANCE_RIP();
7851 IEM_MC_END();
7852 return VINF_SUCCESS;
7853
7854 case IEMMODE_64BIT:
7855 IEM_MC_BEGIN(0, 1);
7856 IEM_MC_LOCAL(uint64_t, u64Value);
7857 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7858 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7859 IEM_MC_ADVANCE_RIP();
7860 IEM_MC_END();
7861 return VINF_SUCCESS;
7862
7863 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7864 }
7865 }
7866 else
7867 {
7868 /*
7869 * We're loading a register from memory.
7870 */
7871 switch (pVCpu->iem.s.enmEffOpSize)
7872 {
7873 case IEMMODE_16BIT:
7874 IEM_MC_BEGIN(0, 2);
7875 IEM_MC_LOCAL(uint16_t, u16Value);
7876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7879 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7880 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7881 IEM_MC_ADVANCE_RIP();
7882 IEM_MC_END();
7883 return VINF_SUCCESS;
7884
7885 case IEMMODE_32BIT:
7886 IEM_MC_BEGIN(0, 2);
7887 IEM_MC_LOCAL(uint32_t, u32Value);
7888 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7891 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7892 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7893 IEM_MC_ADVANCE_RIP();
7894 IEM_MC_END();
7895 return VINF_SUCCESS;
7896
7897 case IEMMODE_64BIT:
7898 IEM_MC_BEGIN(0, 2);
7899 IEM_MC_LOCAL(uint64_t, u64Value);
7900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7903 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7904 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7905 IEM_MC_ADVANCE_RIP();
7906 IEM_MC_END();
7907 return VINF_SUCCESS;
7908
7909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7910 }
7911 }
7912}
7913
7914
7915/** Opcode 0x0f 0xbf. */
7916FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7917{
7918 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7919 IEMOP_HLP_MIN_386();
7920
7921 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7922
7923 /** @todo Not entirely sure how the operand size prefix is handled here,
7924 * assuming that it will be ignored. Would be nice to have a few
7925 * test for this. */
7926 /*
7927 * If rm is denoting a register, no more instruction bytes.
7928 */
7929 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7930 {
7931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7932 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7933 {
7934 IEM_MC_BEGIN(0, 1);
7935 IEM_MC_LOCAL(uint32_t, u32Value);
7936 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7937 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7938 IEM_MC_ADVANCE_RIP();
7939 IEM_MC_END();
7940 }
7941 else
7942 {
7943 IEM_MC_BEGIN(0, 1);
7944 IEM_MC_LOCAL(uint64_t, u64Value);
7945 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7946 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7947 IEM_MC_ADVANCE_RIP();
7948 IEM_MC_END();
7949 }
7950 }
7951 else
7952 {
7953 /*
7954 * We're loading a register from memory.
7955 */
7956 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7957 {
7958 IEM_MC_BEGIN(0, 2);
7959 IEM_MC_LOCAL(uint32_t, u32Value);
7960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7963 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7964 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7965 IEM_MC_ADVANCE_RIP();
7966 IEM_MC_END();
7967 }
7968 else
7969 {
7970 IEM_MC_BEGIN(0, 2);
7971 IEM_MC_LOCAL(uint64_t, u64Value);
7972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7975 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7976 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7977 IEM_MC_ADVANCE_RIP();
7978 IEM_MC_END();
7979 }
7980 }
7981 return VINF_SUCCESS;
7982}
7983
7984
7985/** Opcode 0x0f 0xc0. */
7986FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7987{
7988 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7989 IEMOP_HLP_MIN_486();
7990 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7991
7992 /*
7993 * If rm is denoting a register, no more instruction bytes.
7994 */
7995 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7996 {
7997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7998
7999 IEM_MC_BEGIN(3, 0);
8000 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8001 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8002 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8003
8004 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8005 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8006 IEM_MC_REF_EFLAGS(pEFlags);
8007 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8008
8009 IEM_MC_ADVANCE_RIP();
8010 IEM_MC_END();
8011 }
8012 else
8013 {
8014 /*
8015 * We're accessing memory.
8016 */
8017 IEM_MC_BEGIN(3, 3);
8018 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8019 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8020 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8021 IEM_MC_LOCAL(uint8_t, u8RegCopy);
8022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8023
8024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8025 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8026 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8027 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
8028 IEM_MC_FETCH_EFLAGS(EFlags);
8029 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8030 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8031 else
8032 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
8033
8034 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8035 IEM_MC_COMMIT_EFLAGS(EFlags);
8036 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
8037 IEM_MC_ADVANCE_RIP();
8038 IEM_MC_END();
8039 return VINF_SUCCESS;
8040 }
8041 return VINF_SUCCESS;
8042}
8043
8044
8045/** Opcode 0x0f 0xc1. */
8046FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
8047{
8048 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
8049 IEMOP_HLP_MIN_486();
8050 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8051
8052 /*
8053 * If rm is denoting a register, no more instruction bytes.
8054 */
8055 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8056 {
8057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8058
8059 switch (pVCpu->iem.s.enmEffOpSize)
8060 {
8061 case IEMMODE_16BIT:
8062 IEM_MC_BEGIN(3, 0);
8063 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8064 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8065 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8066
8067 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8068 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8069 IEM_MC_REF_EFLAGS(pEFlags);
8070 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8071
8072 IEM_MC_ADVANCE_RIP();
8073 IEM_MC_END();
8074 return VINF_SUCCESS;
8075
8076 case IEMMODE_32BIT:
8077 IEM_MC_BEGIN(3, 0);
8078 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8079 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8080 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8081
8082 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8083 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8084 IEM_MC_REF_EFLAGS(pEFlags);
8085 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8086
8087 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8088 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
8089 IEM_MC_ADVANCE_RIP();
8090 IEM_MC_END();
8091 return VINF_SUCCESS;
8092
8093 case IEMMODE_64BIT:
8094 IEM_MC_BEGIN(3, 0);
8095 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8096 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8097 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8098
8099 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8100 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8101 IEM_MC_REF_EFLAGS(pEFlags);
8102 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8103
8104 IEM_MC_ADVANCE_RIP();
8105 IEM_MC_END();
8106 return VINF_SUCCESS;
8107
8108 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8109 }
8110 }
8111 else
8112 {
8113 /*
8114 * We're accessing memory.
8115 */
8116 switch (pVCpu->iem.s.enmEffOpSize)
8117 {
8118 case IEMMODE_16BIT:
8119 IEM_MC_BEGIN(3, 3);
8120 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8121 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8122 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8123 IEM_MC_LOCAL(uint16_t, u16RegCopy);
8124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8125
8126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8127 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8128 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8129 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
8130 IEM_MC_FETCH_EFLAGS(EFlags);
8131 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8132 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8133 else
8134 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
8135
8136 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8137 IEM_MC_COMMIT_EFLAGS(EFlags);
8138 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
8139 IEM_MC_ADVANCE_RIP();
8140 IEM_MC_END();
8141 return VINF_SUCCESS;
8142
8143 case IEMMODE_32BIT:
8144 IEM_MC_BEGIN(3, 3);
8145 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8146 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8147 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8148 IEM_MC_LOCAL(uint32_t, u32RegCopy);
8149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8150
8151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8152 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8153 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8154 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8155 IEM_MC_FETCH_EFLAGS(EFlags);
8156 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8157 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8158 else
8159 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8160
8161 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8162 IEM_MC_COMMIT_EFLAGS(EFlags);
8163 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8164 IEM_MC_ADVANCE_RIP();
8165 IEM_MC_END();
8166 return VINF_SUCCESS;
8167
8168 case IEMMODE_64BIT:
8169 IEM_MC_BEGIN(3, 3);
8170 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8171 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8172 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8173 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8175
8176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8177 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8178 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8179 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8180 IEM_MC_FETCH_EFLAGS(EFlags);
8181 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8182 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8183 else
8184 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8185
8186 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8187 IEM_MC_COMMIT_EFLAGS(EFlags);
8188 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8189 IEM_MC_ADVANCE_RIP();
8190 IEM_MC_END();
8191 return VINF_SUCCESS;
8192
8193 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8194 }
8195 }
8196}
8197
8198
8199/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8200FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8201/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8202FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8203/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8204FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8205/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8206FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8207
8208
8209/** Opcode 0x0f 0xc3. */
8210FNIEMOP_DEF(iemOp_movnti_My_Gy)
8211{
8212 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8213
8214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8215
8216 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8217 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8218 {
8219 switch (pVCpu->iem.s.enmEffOpSize)
8220 {
8221 case IEMMODE_32BIT:
8222 IEM_MC_BEGIN(0, 2);
8223 IEM_MC_LOCAL(uint32_t, u32Value);
8224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8225
8226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8228 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8229 return IEMOP_RAISE_INVALID_OPCODE();
8230
8231 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8232 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8233 IEM_MC_ADVANCE_RIP();
8234 IEM_MC_END();
8235 break;
8236
8237 case IEMMODE_64BIT:
8238 IEM_MC_BEGIN(0, 2);
8239 IEM_MC_LOCAL(uint64_t, u64Value);
8240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8241
8242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8244 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8245 return IEMOP_RAISE_INVALID_OPCODE();
8246
8247 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8248 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8249 IEM_MC_ADVANCE_RIP();
8250 IEM_MC_END();
8251 break;
8252
8253 case IEMMODE_16BIT:
8254 /** @todo check this form. */
8255 return IEMOP_RAISE_INVALID_OPCODE();
8256 }
8257 }
8258 else
8259 return IEMOP_RAISE_INVALID_OPCODE();
8260 return VINF_SUCCESS;
8261}
8262/* Opcode 0x66 0x0f 0xc3 - invalid */
8263/* Opcode 0xf3 0x0f 0xc3 - invalid */
8264/* Opcode 0xf2 0x0f 0xc3 - invalid */
8265
8266/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8267FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8268/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8269FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8270/* Opcode 0xf3 0x0f 0xc4 - invalid */
8271/* Opcode 0xf2 0x0f 0xc4 - invalid */
8272
8273/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8274FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8275/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8276FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8277/* Opcode 0xf3 0x0f 0xc5 - invalid */
8278/* Opcode 0xf2 0x0f 0xc5 - invalid */
8279
8280/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8281FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8282/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8283FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8284/* Opcode 0xf3 0x0f 0xc6 - invalid */
8285/* Opcode 0xf2 0x0f 0xc6 - invalid */
8286
8287
8288/** Opcode 0x0f 0xc7 !11/1. */
8289FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8290{
8291 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8292
8293 IEM_MC_BEGIN(4, 3);
8294 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8295 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8296 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8297 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8298 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8299 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8301
8302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8303 IEMOP_HLP_DONE_DECODING();
8304 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8305
8306 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8307 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8308 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8309
8310 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8311 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8312 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8313
8314 IEM_MC_FETCH_EFLAGS(EFlags);
8315 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8316 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8317 else
8318 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8319
8320 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8321 IEM_MC_COMMIT_EFLAGS(EFlags);
8322 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8323 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8324 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8325 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8326 IEM_MC_ENDIF();
8327 IEM_MC_ADVANCE_RIP();
8328
8329 IEM_MC_END();
8330 return VINF_SUCCESS;
8331}
8332
8333
8334/** Opcode REX.W 0x0f 0xc7 !11/1. */
8335FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8336{
8337 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8338 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8339 {
8340#if 0
8341 RT_NOREF(bRm);
8342 IEMOP_BITCH_ABOUT_STUB();
8343 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8344#else
8345 IEM_MC_BEGIN(4, 3);
8346 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8347 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8348 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8349 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8350 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8351 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8353
8354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8355 IEMOP_HLP_DONE_DECODING();
8356 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8357 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8358
8359 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8360 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8361 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8362
8363 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8364 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8365 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8366
8367 IEM_MC_FETCH_EFLAGS(EFlags);
8368# ifdef RT_ARCH_AMD64
8369 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8370 {
8371 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8372 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8373 else
8374 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8375 }
8376 else
8377# endif
8378 {
8379 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8380 accesses and not all all atomic, which works fine on in UNI CPU guest
8381 configuration (ignoring DMA). If guest SMP is active we have no choice
8382 but to use a rendezvous callback here. Sigh. */
8383 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8384 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8385 else
8386 {
8387 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8388 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8389 }
8390 }
8391
8392 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8393 IEM_MC_COMMIT_EFLAGS(EFlags);
8394 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8395 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8396 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8397 IEM_MC_ENDIF();
8398 IEM_MC_ADVANCE_RIP();
8399
8400 IEM_MC_END();
8401 return VINF_SUCCESS;
8402#endif
8403 }
8404 Log(("cmpxchg16b -> #UD\n"));
8405 return IEMOP_RAISE_INVALID_OPCODE();
8406}
8407
8408FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8409{
8410 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8411 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8412 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8413}
8414
8415/** Opcode 0x0f 0xc7 11/6. */
8416FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8417
8418/** Opcode 0x0f 0xc7 !11/6. */
8419FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8420
8421/** Opcode 0x66 0x0f 0xc7 !11/6. */
8422FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8423
8424/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8425FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8426
8427/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8428FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8429
8430/** Opcode 0x0f 0xc7 11/7. */
8431FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8432
8433
8434/**
8435 * Group 9 jump table for register variant.
8436 */
8437IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8438{ /* pfx: none, 066h, 0f3h, 0f2h */
8439 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8440 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8441 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8442 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8443 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8444 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8445 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8446 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8447};
8448AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8449
8450
8451/**
8452 * Group 9 jump table for memory variant.
8453 */
8454IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8455{ /* pfx: none, 066h, 0f3h, 0f2h */
8456 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8457 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8458 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8459 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8460 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8461 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8462 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8463 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8464};
8465AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8466
8467
8468/** Opcode 0x0f 0xc7. */
8469FNIEMOP_DEF(iemOp_Grp9)
8470{
8471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8472 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8473 /* register, register */
8474 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8475 + pVCpu->iem.s.idxPrefix], bRm);
8476 /* memory, register */
8477 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8478 + pVCpu->iem.s.idxPrefix], bRm);
8479}
8480
8481
8482/**
8483 * Common 'bswap register' helper.
8484 */
8485FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8486{
8487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8488 switch (pVCpu->iem.s.enmEffOpSize)
8489 {
8490 case IEMMODE_16BIT:
8491 IEM_MC_BEGIN(1, 0);
8492 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8493 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8494 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8495 IEM_MC_ADVANCE_RIP();
8496 IEM_MC_END();
8497 return VINF_SUCCESS;
8498
8499 case IEMMODE_32BIT:
8500 IEM_MC_BEGIN(1, 0);
8501 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8502 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8503 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8504 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8505 IEM_MC_ADVANCE_RIP();
8506 IEM_MC_END();
8507 return VINF_SUCCESS;
8508
8509 case IEMMODE_64BIT:
8510 IEM_MC_BEGIN(1, 0);
8511 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8512 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8513 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8514 IEM_MC_ADVANCE_RIP();
8515 IEM_MC_END();
8516 return VINF_SUCCESS;
8517
8518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8519 }
8520}
8521
8522
8523/** Opcode 0x0f 0xc8. */
8524FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8525{
8526 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8527 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8528 prefix. REX.B is the correct prefix it appears. For a parallel
8529 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8530 IEMOP_HLP_MIN_486();
8531 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8532}
8533
8534
8535/** Opcode 0x0f 0xc9. */
8536FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8537{
8538 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8539 IEMOP_HLP_MIN_486();
8540 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8541}
8542
8543
8544/** Opcode 0x0f 0xca. */
8545FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8546{
8547 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8548 IEMOP_HLP_MIN_486();
8549 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8550}
8551
8552
8553/** Opcode 0x0f 0xcb. */
8554FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8555{
8556 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8557 IEMOP_HLP_MIN_486();
8558 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8559}
8560
8561
8562/** Opcode 0x0f 0xcc. */
8563FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8564{
8565 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8566 IEMOP_HLP_MIN_486();
8567 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8568}
8569
8570
8571/** Opcode 0x0f 0xcd. */
8572FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8573{
8574 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8575 IEMOP_HLP_MIN_486();
8576 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8577}
8578
8579
8580/** Opcode 0x0f 0xce. */
8581FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8582{
8583 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8584 IEMOP_HLP_MIN_486();
8585 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8586}
8587
8588
8589/** Opcode 0x0f 0xcf. */
8590FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8591{
8592 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8593 IEMOP_HLP_MIN_486();
8594 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8595}
8596
8597
8598/* Opcode 0x0f 0xd0 - invalid */
8599/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8600FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8601/* Opcode 0xf3 0x0f 0xd0 - invalid */
8602/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8603FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8604
8605/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8606FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8607/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8608FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8609/* Opcode 0xf3 0x0f 0xd1 - invalid */
8610/* Opcode 0xf2 0x0f 0xd1 - invalid */
8611
8612/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8613FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8614/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8615FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8616/* Opcode 0xf3 0x0f 0xd2 - invalid */
8617/* Opcode 0xf2 0x0f 0xd2 - invalid */
8618
8619/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8620FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8621/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8622FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8623/* Opcode 0xf3 0x0f 0xd3 - invalid */
8624/* Opcode 0xf2 0x0f 0xd3 - invalid */
8625
8626/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8627FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8628/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8629FNIEMOP_STUB(iemOp_paddq_Vx_W);
8630/* Opcode 0xf3 0x0f 0xd4 - invalid */
8631/* Opcode 0xf2 0x0f 0xd4 - invalid */
8632
8633/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8634FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8635/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8636FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8637/* Opcode 0xf3 0x0f 0xd5 - invalid */
8638/* Opcode 0xf2 0x0f 0xd5 - invalid */
8639
8640/* Opcode 0x0f 0xd6 - invalid */
8641
8642/**
8643 * @opcode 0xd6
8644 * @oppfx 0x66
8645 * @opcpuid sse2
8646 * @opgroup og_sse2_pcksclr_datamove
8647 * @opxcpttype none
8648 * @optest op1=-1 op2=2 -> op1=2
8649 * @optest op1=0 op2=-42 -> op1=-42
8650 */
8651FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8652{
8653 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8654 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8655 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8656 {
8657 /*
8658 * Register, register.
8659 */
8660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8661 IEM_MC_BEGIN(0, 2);
8662 IEM_MC_LOCAL(uint64_t, uSrc);
8663
8664 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8665 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8666
8667 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8668 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8669
8670 IEM_MC_ADVANCE_RIP();
8671 IEM_MC_END();
8672 }
8673 else
8674 {
8675 /*
8676 * Memory, register.
8677 */
8678 IEM_MC_BEGIN(0, 2);
8679 IEM_MC_LOCAL(uint64_t, uSrc);
8680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8681
8682 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8684 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8685 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8686
8687 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8688 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8689
8690 IEM_MC_ADVANCE_RIP();
8691 IEM_MC_END();
8692 }
8693 return VINF_SUCCESS;
8694}
8695
8696
8697/**
8698 * @opcode 0xd6
8699 * @opcodesub 11 mr/reg
8700 * @oppfx f3
8701 * @opcpuid sse2
8702 * @opgroup og_sse2_simdint_datamove
8703 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8704 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8705 */
8706FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8707{
8708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8709 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8710 {
8711 /*
8712 * Register, register.
8713 */
8714 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8716 IEM_MC_BEGIN(0, 1);
8717 IEM_MC_LOCAL(uint64_t, uSrc);
8718
8719 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8720 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8721
8722 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8723 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8724 IEM_MC_FPU_TO_MMX_MODE();
8725
8726 IEM_MC_ADVANCE_RIP();
8727 IEM_MC_END();
8728 return VINF_SUCCESS;
8729 }
8730
8731 /**
8732 * @opdone
8733 * @opmnemonic udf30fd6mem
8734 * @opcode 0xd6
8735 * @opcodesub !11 mr/reg
8736 * @oppfx f3
8737 * @opunused intel-modrm
8738 * @opcpuid sse
8739 * @optest ->
8740 */
8741 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8742}
8743
8744
8745/**
8746 * @opcode 0xd6
8747 * @opcodesub 11 mr/reg
8748 * @oppfx f2
8749 * @opcpuid sse2
8750 * @opgroup og_sse2_simdint_datamove
8751 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8752 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8753 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
8754 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
8755 * @optest op1=-42 op2=0xfedcba9876543210
8756 * -> op1=0xfedcba9876543210 ftw=0xff
8757 */
8758FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
8759{
8760 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8761 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8762 {
8763 /*
8764 * Register, register.
8765 */
8766 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8768 IEM_MC_BEGIN(0, 1);
8769 IEM_MC_LOCAL(uint64_t, uSrc);
8770
8771 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8772 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8773
8774 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8775 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
8776 IEM_MC_FPU_TO_MMX_MODE();
8777
8778 IEM_MC_ADVANCE_RIP();
8779 IEM_MC_END();
8780 return VINF_SUCCESS;
8781 }
8782
8783 /**
8784 * @opdone
8785 * @opmnemonic udf20fd6mem
8786 * @opcode 0xd6
8787 * @opcodesub !11 mr/reg
8788 * @oppfx f2
8789 * @opunused intel-modrm
8790 * @opcpuid sse
8791 * @optest ->
8792 */
8793 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8794}
8795
8796/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8797FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8798{
8799 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8800 /** @todo testcase: Check that the instruction implicitly clears the high
8801 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8802 * and opcode modifications are made to work with the whole width (not
8803 * just 128). */
8804 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8805 /* Docs says register only. */
8806 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8807 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8808 {
8809 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8810 IEM_MC_BEGIN(2, 0);
8811 IEM_MC_ARG(uint64_t *, pDst, 0);
8812 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8813 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8814 IEM_MC_PREPARE_FPU_USAGE();
8815 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8816 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8817 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8818 IEM_MC_ADVANCE_RIP();
8819 IEM_MC_END();
8820 return VINF_SUCCESS;
8821 }
8822 return IEMOP_RAISE_INVALID_OPCODE();
8823}
8824
8825/** Opcode 0x66 0x0f 0xd7 - */
8826FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8827{
8828 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8829 /** @todo testcase: Check that the instruction implicitly clears the high
8830 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8831 * and opcode modifications are made to work with the whole width (not
8832 * just 128). */
8833 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8834 /* Docs says register only. */
8835 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8836 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8837 {
8838 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8839 IEM_MC_BEGIN(2, 0);
8840 IEM_MC_ARG(uint64_t *, pDst, 0);
8841 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8842 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8843 IEM_MC_PREPARE_SSE_USAGE();
8844 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8845 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8846 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8847 IEM_MC_ADVANCE_RIP();
8848 IEM_MC_END();
8849 return VINF_SUCCESS;
8850 }
8851 return IEMOP_RAISE_INVALID_OPCODE();
8852}
8853
8854/* Opcode 0xf3 0x0f 0xd7 - invalid */
8855/* Opcode 0xf2 0x0f 0xd7 - invalid */
8856
8857
8858/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8859FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8860/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8861FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8862/* Opcode 0xf3 0x0f 0xd8 - invalid */
8863/* Opcode 0xf2 0x0f 0xd8 - invalid */
8864
8865/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8866FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8867/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8868FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8869/* Opcode 0xf3 0x0f 0xd9 - invalid */
8870/* Opcode 0xf2 0x0f 0xd9 - invalid */
8871
8872/** Opcode 0x0f 0xda - pminub Pq, Qq */
8873FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8874/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8875FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8876/* Opcode 0xf3 0x0f 0xda - invalid */
8877/* Opcode 0xf2 0x0f 0xda - invalid */
8878
8879/** Opcode 0x0f 0xdb - pand Pq, Qq */
8880FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8881/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8882FNIEMOP_STUB(iemOp_pand_Vx_W);
8883/* Opcode 0xf3 0x0f 0xdb - invalid */
8884/* Opcode 0xf2 0x0f 0xdb - invalid */
8885
8886/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8887FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8888/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8889FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8890/* Opcode 0xf3 0x0f 0xdc - invalid */
8891/* Opcode 0xf2 0x0f 0xdc - invalid */
8892
8893/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8894FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8895/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8896FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8897/* Opcode 0xf3 0x0f 0xdd - invalid */
8898/* Opcode 0xf2 0x0f 0xdd - invalid */
8899
8900/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8901FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8902/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8903FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8904/* Opcode 0xf3 0x0f 0xde - invalid */
8905/* Opcode 0xf2 0x0f 0xde - invalid */
8906
8907/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8908FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8909/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8910FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8911/* Opcode 0xf3 0x0f 0xdf - invalid */
8912/* Opcode 0xf2 0x0f 0xdf - invalid */
8913
8914/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8915FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8916/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8917FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8918/* Opcode 0xf3 0x0f 0xe0 - invalid */
8919/* Opcode 0xf2 0x0f 0xe0 - invalid */
8920
8921/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8922FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8923/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8924FNIEMOP_STUB(iemOp_psraw_Vx_W);
8925/* Opcode 0xf3 0x0f 0xe1 - invalid */
8926/* Opcode 0xf2 0x0f 0xe1 - invalid */
8927
8928/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8929FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8930/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8931FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8932/* Opcode 0xf3 0x0f 0xe2 - invalid */
8933/* Opcode 0xf2 0x0f 0xe2 - invalid */
8934
8935/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8936FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8937/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8938FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8939/* Opcode 0xf3 0x0f 0xe3 - invalid */
8940/* Opcode 0xf2 0x0f 0xe3 - invalid */
8941
8942/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8943FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8944/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8945FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8946/* Opcode 0xf3 0x0f 0xe4 - invalid */
8947/* Opcode 0xf2 0x0f 0xe4 - invalid */
8948
8949/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8950FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8951/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8952FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8953/* Opcode 0xf3 0x0f 0xe5 - invalid */
8954/* Opcode 0xf2 0x0f 0xe5 - invalid */
8955
8956/* Opcode 0x0f 0xe6 - invalid */
8957/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8958FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8959/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8960FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8961/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8962FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8963
8964
8965/**
8966 * @opcode 0xe7
8967 * @opcodesub !11 mr/reg
8968 * @oppfx none
8969 * @opcpuid sse
8970 * @opgroup og_sse1_cachect
8971 * @opxcpttype none
8972 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
8973 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8974 */
8975FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8976{
8977 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8978 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8979 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8980 {
8981 /* Register, memory. */
8982 IEM_MC_BEGIN(0, 2);
8983 IEM_MC_LOCAL(uint64_t, uSrc);
8984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8985
8986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8988 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8989 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8990
8991 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8992 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8993 IEM_MC_FPU_TO_MMX_MODE();
8994
8995 IEM_MC_ADVANCE_RIP();
8996 IEM_MC_END();
8997 return VINF_SUCCESS;
8998 }
8999 /**
9000 * @opdone
9001 * @opmnemonic ud0fe7reg
9002 * @opcode 0xe7
9003 * @opcodesub 11 mr/reg
9004 * @oppfx none
9005 * @opunused immediate
9006 * @opcpuid sse
9007 * @optest ->
9008 */
9009 return IEMOP_RAISE_INVALID_OPCODE();
9010}
9011
9012/**
9013 * @opcode 0xe7
9014 * @opcodesub !11 mr/reg
9015 * @oppfx 0x66
9016 * @opcpuid sse2
9017 * @opgroup og_sse2_cachect
9018 * @opxcpttype 1
9019 * @optest op1=-1 op2=2 -> op1=2
9020 * @optest op1=0 op2=-42 -> op1=-42
9021 */
9022FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
9023{
9024 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9025 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9026 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9027 {
9028 /* Register, memory. */
9029 IEM_MC_BEGIN(0, 2);
9030 IEM_MC_LOCAL(RTUINT128U, uSrc);
9031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9032
9033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9035 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9036 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9037
9038 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9039 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9040
9041 IEM_MC_ADVANCE_RIP();
9042 IEM_MC_END();
9043 return VINF_SUCCESS;
9044 }
9045
9046 /**
9047 * @opdone
9048 * @opmnemonic ud660fe7reg
9049 * @opcode 0xe7
9050 * @opcodesub 11 mr/reg
9051 * @oppfx 0x66
9052 * @opunused immediate
9053 * @opcpuid sse
9054 * @optest ->
9055 */
9056 return IEMOP_RAISE_INVALID_OPCODE();
9057}
9058
9059/* Opcode 0xf3 0x0f 0xe7 - invalid */
9060/* Opcode 0xf2 0x0f 0xe7 - invalid */
9061
9062
9063/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
9064FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
9065/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
9066FNIEMOP_STUB(iemOp_psubsb_Vx_W);
9067/* Opcode 0xf3 0x0f 0xe8 - invalid */
9068/* Opcode 0xf2 0x0f 0xe8 - invalid */
9069
9070/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
9071FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
9072/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
9073FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
9074/* Opcode 0xf3 0x0f 0xe9 - invalid */
9075/* Opcode 0xf2 0x0f 0xe9 - invalid */
9076
9077/** Opcode 0x0f 0xea - pminsw Pq, Qq */
9078FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
9079/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
9080FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
9081/* Opcode 0xf3 0x0f 0xea - invalid */
9082/* Opcode 0xf2 0x0f 0xea - invalid */
9083
9084/** Opcode 0x0f 0xeb - por Pq, Qq */
9085FNIEMOP_STUB(iemOp_por_Pq_Qq);
9086/** Opcode 0x66 0x0f 0xeb - por Vx, W */
9087FNIEMOP_STUB(iemOp_por_Vx_W);
9088/* Opcode 0xf3 0x0f 0xeb - invalid */
9089/* Opcode 0xf2 0x0f 0xeb - invalid */
9090
9091/** Opcode 0x0f 0xec - paddsb Pq, Qq */
9092FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
9093/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
9094FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
9095/* Opcode 0xf3 0x0f 0xec - invalid */
9096/* Opcode 0xf2 0x0f 0xec - invalid */
9097
9098/** Opcode 0x0f 0xed - paddsw Pq, Qq */
9099FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
9100/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
9101FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
9102/* Opcode 0xf3 0x0f 0xed - invalid */
9103/* Opcode 0xf2 0x0f 0xed - invalid */
9104
9105/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
9106FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
9107/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
9108FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
9109/* Opcode 0xf3 0x0f 0xee - invalid */
9110/* Opcode 0xf2 0x0f 0xee - invalid */
9111
9112
9113/** Opcode 0x0f 0xef - pxor Pq, Qq */
9114FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
9115{
9116 IEMOP_MNEMONIC(pxor, "pxor");
9117 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
9118}
9119
9120/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
9121FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
9122{
9123 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
9124 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
9125}
9126
9127/* Opcode 0xf3 0x0f 0xef - invalid */
9128/* Opcode 0xf2 0x0f 0xef - invalid */
9129
9130/* Opcode 0x0f 0xf0 - invalid */
9131/* Opcode 0x66 0x0f 0xf0 - invalid */
9132/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
9133FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
9134
9135/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
9136FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
9137/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
9138FNIEMOP_STUB(iemOp_psllw_Vx_W);
9139/* Opcode 0xf2 0x0f 0xf1 - invalid */
9140
9141/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
9142FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
9143/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
9144FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
9145/* Opcode 0xf2 0x0f 0xf2 - invalid */
9146
9147/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
9148FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
9149/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
9150FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
9151/* Opcode 0xf2 0x0f 0xf3 - invalid */
9152
9153/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
9154FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
9155/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
9156FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
9157/* Opcode 0xf2 0x0f 0xf4 - invalid */
9158
9159/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
9160FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
9161/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
9162FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
9163/* Opcode 0xf2 0x0f 0xf5 - invalid */
9164
9165/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
9166FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
9167/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
9168FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
9169/* Opcode 0xf2 0x0f 0xf6 - invalid */
9170
9171/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
9172FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
9173/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
9174FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
9175/* Opcode 0xf2 0x0f 0xf7 - invalid */
9176
9177/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
9178FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
9179/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
9180FNIEMOP_STUB(iemOp_psubb_Vx_W);
9181/* Opcode 0xf2 0x0f 0xf8 - invalid */
9182
9183/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
9184FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
9185/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
9186FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
9187/* Opcode 0xf2 0x0f 0xf9 - invalid */
9188
9189/** Opcode 0x0f 0xfa - psubd Pq, Qq */
9190FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
9191/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9192FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9193/* Opcode 0xf2 0x0f 0xfa - invalid */
9194
9195/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9196FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9197/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9198FNIEMOP_STUB(iemOp_psubq_Vx_W);
9199/* Opcode 0xf2 0x0f 0xfb - invalid */
9200
9201/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9202FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9203/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9204FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9205/* Opcode 0xf2 0x0f 0xfc - invalid */
9206
9207/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9208FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9209/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9210FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9211/* Opcode 0xf2 0x0f 0xfd - invalid */
9212
9213/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9214FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9215/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9216FNIEMOP_STUB(iemOp_paddd_Vx_W);
9217/* Opcode 0xf2 0x0f 0xfe - invalid */
9218
9219
9220/** Opcode **** 0x0f 0xff - UD0 */
9221FNIEMOP_DEF(iemOp_ud0)
9222{
9223 IEMOP_MNEMONIC(ud0, "ud0");
9224 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9225 {
9226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9227#ifndef TST_IEM_CHECK_MC
9228 RTGCPTR GCPtrEff;
9229 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9230 if (rcStrict != VINF_SUCCESS)
9231 return rcStrict;
9232#endif
9233 IEMOP_HLP_DONE_DECODING();
9234 }
9235 return IEMOP_RAISE_INVALID_OPCODE();
9236}
9237
9238
9239
9240/**
9241 * Two byte opcode map, first byte 0x0f.
9242 *
9243 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9244 * check if it needs updating as well when making changes.
9245 */
9246IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9247{
9248 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9249 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9250 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9251 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9252 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9253 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9254 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9255 /* 0x06 */ IEMOP_X4(iemOp_clts),
9256 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9257 /* 0x08 */ IEMOP_X4(iemOp_invd),
9258 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9259 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9260 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9261 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9262 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9263 /* 0x0e */ IEMOP_X4(iemOp_femms),
9264 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9265
9266 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9267 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9268 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9269 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9270 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9271 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9272 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9273 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9274 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9275 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9276 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9277 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9278 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9279 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9280 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9281 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9282
9283 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9284 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9285 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9286 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9287 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9288 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9289 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9290 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9291 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9292 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9293 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9294 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9295 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9296 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9297 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9298 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9299
9300 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9301 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9302 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9303 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9304 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9305 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9306 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9307 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9308 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9309 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9310 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9311 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9312 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9313 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9314 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9315 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9316
9317 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9318 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9319 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9320 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9321 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9322 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9323 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9324 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9325 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9326 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9327 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9328 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9329 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9330 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9331 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9332 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9333
9334 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9335 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9336 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9337 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9338 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9339 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9340 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9341 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9342 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9343 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9344 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9345 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9346 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9347 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9348 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9349 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9350
9351 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9352 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9353 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9354 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9355 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9356 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9357 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9358 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9359 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9360 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9361 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9362 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9363 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9364 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9365 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9366 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
9367
9368 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9369 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9370 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9371 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9372 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9373 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9374 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9375 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9376
9377 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9378 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9379 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9380 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9381 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9382 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9383 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9384 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9385
9386 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9387 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9388 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9389 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9390 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9391 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9392 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9393 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9394 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9395 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9396 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9397 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9398 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9399 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9400 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9401 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9402
9403 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9404 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9405 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9406 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9407 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9408 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9409 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9410 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9411 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9412 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9413 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9414 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9415 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9416 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9417 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9418 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9419
9420 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9421 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9422 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9423 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9424 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9425 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9426 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9427 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9428 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9429 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9430 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9431 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9432 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9433 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9434 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9435 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9436
9437 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9438 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9439 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9440 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9441 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9442 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9443 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9444 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9445 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9446 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9447 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9448 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9449 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9450 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9451 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9452 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9453
9454 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9455 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9456 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9457 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9458 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9459 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9460 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9461 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9462 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9463 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9464 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9465 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9466 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9467 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9468 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9469 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9470
9471 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9472 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9473 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9474 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9475 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9476 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9477 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9478 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9479 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9480 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9481 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9482 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9483 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9484 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9485 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9486 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9487
9488 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9489 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9490 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9491 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9492 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9493 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9494 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9495 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9496 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9497 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9498 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9499 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9500 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9501 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9502 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9503 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9504
9505 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9506 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9507 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9508 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9509 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9510 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9511 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9512 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9513 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9514 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9515 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9516 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9517 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9518 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9519 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9520 /* 0xff */ IEMOP_X4(iemOp_ud0),
9521};
9522AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9523
9524/** @} */
9525
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette