VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66470

Last change on this file since 66470 was 66470, checked in by vboxsync, 8 years ago

IEM: build fix (unused function)

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 306.0 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66470 2017-04-07 09:36:37Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.215389.xyz. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 switch (pVCpu->iem.s.enmEffOpSize)
38 {
39 case IEMMODE_16BIT:
40 IEM_MC_BEGIN(0, 1);
41 IEM_MC_LOCAL(uint16_t, u16Ldtr);
42 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
43 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
44 IEM_MC_ADVANCE_RIP();
45 IEM_MC_END();
46 break;
47
48 case IEMMODE_32BIT:
49 IEM_MC_BEGIN(0, 1);
50 IEM_MC_LOCAL(uint32_t, u32Ldtr);
51 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
52 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
53 IEM_MC_ADVANCE_RIP();
54 IEM_MC_END();
55 break;
56
57 case IEMMODE_64BIT:
58 IEM_MC_BEGIN(0, 1);
59 IEM_MC_LOCAL(uint64_t, u64Ldtr);
60 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
61 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
62 IEM_MC_ADVANCE_RIP();
63 IEM_MC_END();
64 break;
65
66 IEM_NOT_REACHED_DEFAULT_CASE_RET();
67 }
68 }
69 else
70 {
71 IEM_MC_BEGIN(0, 2);
72 IEM_MC_LOCAL(uint16_t, u16Ldtr);
73 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
76 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
77 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
78 IEM_MC_ADVANCE_RIP();
79 IEM_MC_END();
80 }
81 return VINF_SUCCESS;
82}
83
84
85/** Opcode 0x0f 0x00 /1. */
86FNIEMOPRM_DEF(iemOp_Grp6_str)
87{
88 IEMOP_MNEMONIC(str, "str Rv/Mw");
89 IEMOP_HLP_MIN_286();
90 IEMOP_HLP_NO_REAL_OR_V86_MODE();
91
92 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
93 {
94 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
95 switch (pVCpu->iem.s.enmEffOpSize)
96 {
97 case IEMMODE_16BIT:
98 IEM_MC_BEGIN(0, 1);
99 IEM_MC_LOCAL(uint16_t, u16Tr);
100 IEM_MC_FETCH_TR_U16(u16Tr);
101 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
102 IEM_MC_ADVANCE_RIP();
103 IEM_MC_END();
104 break;
105
106 case IEMMODE_32BIT:
107 IEM_MC_BEGIN(0, 1);
108 IEM_MC_LOCAL(uint32_t, u32Tr);
109 IEM_MC_FETCH_TR_U32(u32Tr);
110 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
111 IEM_MC_ADVANCE_RIP();
112 IEM_MC_END();
113 break;
114
115 case IEMMODE_64BIT:
116 IEM_MC_BEGIN(0, 1);
117 IEM_MC_LOCAL(uint64_t, u64Tr);
118 IEM_MC_FETCH_TR_U64(u64Tr);
119 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
120 IEM_MC_ADVANCE_RIP();
121 IEM_MC_END();
122 break;
123
124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
125 }
126 }
127 else
128 {
129 IEM_MC_BEGIN(0, 2);
130 IEM_MC_LOCAL(uint16_t, u16Tr);
131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
133 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
134 IEM_MC_FETCH_TR_U16(u16Tr);
135 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
136 IEM_MC_ADVANCE_RIP();
137 IEM_MC_END();
138 }
139 return VINF_SUCCESS;
140}
141
142
143/** Opcode 0x0f 0x00 /2. */
144FNIEMOPRM_DEF(iemOp_Grp6_lldt)
145{
146 IEMOP_MNEMONIC(lldt, "lldt Ew");
147 IEMOP_HLP_MIN_286();
148 IEMOP_HLP_NO_REAL_OR_V86_MODE();
149
150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
151 {
152 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
153 IEM_MC_BEGIN(1, 0);
154 IEM_MC_ARG(uint16_t, u16Sel, 0);
155 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
156 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
157 IEM_MC_END();
158 }
159 else
160 {
161 IEM_MC_BEGIN(1, 1);
162 IEM_MC_ARG(uint16_t, u16Sel, 0);
163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
165 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
166 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
167 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
168 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
169 IEM_MC_END();
170 }
171 return VINF_SUCCESS;
172}
173
174
175/** Opcode 0x0f 0x00 /3. */
176FNIEMOPRM_DEF(iemOp_Grp6_ltr)
177{
178 IEMOP_MNEMONIC(ltr, "ltr Ew");
179 IEMOP_HLP_MIN_286();
180 IEMOP_HLP_NO_REAL_OR_V86_MODE();
181
182 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
183 {
184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
185 IEM_MC_BEGIN(1, 0);
186 IEM_MC_ARG(uint16_t, u16Sel, 0);
187 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
188 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
189 IEM_MC_END();
190 }
191 else
192 {
193 IEM_MC_BEGIN(1, 1);
194 IEM_MC_ARG(uint16_t, u16Sel, 0);
195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
198 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
199 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
200 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
201 IEM_MC_END();
202 }
203 return VINF_SUCCESS;
204}
205
206
207/** Opcode 0x0f 0x00 /3. */
208FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
209{
210 IEMOP_HLP_MIN_286();
211 IEMOP_HLP_NO_REAL_OR_V86_MODE();
212
213 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
214 {
215 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
216 IEM_MC_BEGIN(2, 0);
217 IEM_MC_ARG(uint16_t, u16Sel, 0);
218 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
219 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
220 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
221 IEM_MC_END();
222 }
223 else
224 {
225 IEM_MC_BEGIN(2, 1);
226 IEM_MC_ARG(uint16_t, u16Sel, 0);
227 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
230 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
231 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
232 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
233 IEM_MC_END();
234 }
235 return VINF_SUCCESS;
236}
237
238
239/** Opcode 0x0f 0x00 /4. */
240FNIEMOPRM_DEF(iemOp_Grp6_verr)
241{
242 IEMOP_MNEMONIC(verr, "verr Ew");
243 IEMOP_HLP_MIN_286();
244 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
245}
246
247
248/** Opcode 0x0f 0x00 /5. */
249FNIEMOPRM_DEF(iemOp_Grp6_verw)
250{
251 IEMOP_MNEMONIC(verw, "verw Ew");
252 IEMOP_HLP_MIN_286();
253 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
254}
255
256
257/**
258 * Group 6 jump table.
259 */
260IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
261{
262 iemOp_Grp6_sldt,
263 iemOp_Grp6_str,
264 iemOp_Grp6_lldt,
265 iemOp_Grp6_ltr,
266 iemOp_Grp6_verr,
267 iemOp_Grp6_verw,
268 iemOp_InvalidWithRM,
269 iemOp_InvalidWithRM
270};
271
272/** Opcode 0x0f 0x00. */
273FNIEMOP_DEF(iemOp_Grp6)
274{
275 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
276 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
277}
278
279
280/** Opcode 0x0f 0x01 /0. */
281FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
282{
283 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
284 IEMOP_HLP_MIN_286();
285 IEMOP_HLP_64BIT_OP_SIZE();
286 IEM_MC_BEGIN(2, 1);
287 IEM_MC_ARG(uint8_t, iEffSeg, 0);
288 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
291 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
292 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
293 IEM_MC_END();
294 return VINF_SUCCESS;
295}
296
297
298/** Opcode 0x0f 0x01 /0. */
299FNIEMOP_DEF(iemOp_Grp7_vmcall)
300{
301 IEMOP_BITCH_ABOUT_STUB();
302 return IEMOP_RAISE_INVALID_OPCODE();
303}
304
305
306/** Opcode 0x0f 0x01 /0. */
307FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
308{
309 IEMOP_BITCH_ABOUT_STUB();
310 return IEMOP_RAISE_INVALID_OPCODE();
311}
312
313
314/** Opcode 0x0f 0x01 /0. */
315FNIEMOP_DEF(iemOp_Grp7_vmresume)
316{
317 IEMOP_BITCH_ABOUT_STUB();
318 return IEMOP_RAISE_INVALID_OPCODE();
319}
320
321
322/** Opcode 0x0f 0x01 /0. */
323FNIEMOP_DEF(iemOp_Grp7_vmxoff)
324{
325 IEMOP_BITCH_ABOUT_STUB();
326 return IEMOP_RAISE_INVALID_OPCODE();
327}
328
329
330/** Opcode 0x0f 0x01 /1. */
331FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
332{
333 IEMOP_MNEMONIC(sidt, "sidt Ms");
334 IEMOP_HLP_MIN_286();
335 IEMOP_HLP_64BIT_OP_SIZE();
336 IEM_MC_BEGIN(2, 1);
337 IEM_MC_ARG(uint8_t, iEffSeg, 0);
338 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
341 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
342 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
343 IEM_MC_END();
344 return VINF_SUCCESS;
345}
346
347
348/** Opcode 0x0f 0x01 /1. */
349FNIEMOP_DEF(iemOp_Grp7_monitor)
350{
351 IEMOP_MNEMONIC(monitor, "monitor");
352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
353 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
354}
355
356
357/** Opcode 0x0f 0x01 /1. */
358FNIEMOP_DEF(iemOp_Grp7_mwait)
359{
360 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
362 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
363}
364
365
366/** Opcode 0x0f 0x01 /2. */
367FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
368{
369 IEMOP_MNEMONIC(lgdt, "lgdt");
370 IEMOP_HLP_64BIT_OP_SIZE();
371 IEM_MC_BEGIN(3, 1);
372 IEM_MC_ARG(uint8_t, iEffSeg, 0);
373 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
374 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
377 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
378 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
379 IEM_MC_END();
380 return VINF_SUCCESS;
381}
382
383
384/** Opcode 0x0f 0x01 0xd0. */
385FNIEMOP_DEF(iemOp_Grp7_xgetbv)
386{
387 IEMOP_MNEMONIC(xgetbv, "xgetbv");
388 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
389 {
390 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
391 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
392 }
393 return IEMOP_RAISE_INVALID_OPCODE();
394}
395
396
397/** Opcode 0x0f 0x01 0xd1. */
398FNIEMOP_DEF(iemOp_Grp7_xsetbv)
399{
400 IEMOP_MNEMONIC(xsetbv, "xsetbv");
401 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
402 {
403 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
404 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
405 }
406 return IEMOP_RAISE_INVALID_OPCODE();
407}
408
409
410/** Opcode 0x0f 0x01 /3. */
411FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
412{
413 IEMOP_MNEMONIC(lidt, "lidt");
414 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
415 ? IEMMODE_64BIT
416 : pVCpu->iem.s.enmEffOpSize;
417 IEM_MC_BEGIN(3, 1);
418 IEM_MC_ARG(uint8_t, iEffSeg, 0);
419 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
420 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
423 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
424 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
425 IEM_MC_END();
426 return VINF_SUCCESS;
427}
428
429
430#ifdef VBOX_WITH_NESTED_HWVIRT
431/** Opcode 0x0f 0x01 0xd8. */
432FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
433{
434 IEMOP_MNEMONIC(vmrun, "vmrun");
435 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
436}
437
438/** Opcode 0x0f 0x01 0xd9. */
439FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
440{
441 IEMOP_MNEMONIC(vmmcall, "vmmcall");
442 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
443}
444
445
446/** Opcode 0x0f 0x01 0xda. */
447FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
448{
449 IEMOP_MNEMONIC(vmload, "vmload");
450 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
451}
452
453
454/** Opcode 0x0f 0x01 0xdb. */
455FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
456{
457 IEMOP_MNEMONIC(vmsave, "vmsave");
458 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
459}
460
461
462/** Opcode 0x0f 0x01 0xdc. */
463FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
464{
465 IEMOP_MNEMONIC(stgi, "stgi");
466 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
467}
468
469
470/** Opcode 0x0f 0x01 0xdd. */
471FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
472{
473 IEMOP_MNEMONIC(clgi, "clgi");
474 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
475}
476
477
478/** Opcode 0x0f 0x01 0xdf. */
479FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
480{
481 IEMOP_MNEMONIC(invlpga, "invlpga");
482 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
483}
484#else
485/** Opcode 0x0f 0x01 0xd8. */
486FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
487
488/** Opcode 0x0f 0x01 0xd9. */
489FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
490
491/** Opcode 0x0f 0x01 0xda. */
492FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
493
494/** Opcode 0x0f 0x01 0xdb. */
495FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
496
497/** Opcode 0x0f 0x01 0xdc. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
499
500/** Opcode 0x0f 0x01 0xdd. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
502
503/** Opcode 0x0f 0x01 0xdf. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
505#endif /* VBOX_WITH_NESTED_HWVIRT */
506
507/** Opcode 0x0f 0x01 0xde. */
508FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
509
510/** Opcode 0x0f 0x01 /4. */
511FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
512{
513 IEMOP_MNEMONIC(smsw, "smsw");
514 IEMOP_HLP_MIN_286();
515 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
516 {
517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
518 switch (pVCpu->iem.s.enmEffOpSize)
519 {
520 case IEMMODE_16BIT:
521 IEM_MC_BEGIN(0, 1);
522 IEM_MC_LOCAL(uint16_t, u16Tmp);
523 IEM_MC_FETCH_CR0_U16(u16Tmp);
524 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
525 { /* likely */ }
526 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
527 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
528 else
529 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
530 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
531 IEM_MC_ADVANCE_RIP();
532 IEM_MC_END();
533 return VINF_SUCCESS;
534
535 case IEMMODE_32BIT:
536 IEM_MC_BEGIN(0, 1);
537 IEM_MC_LOCAL(uint32_t, u32Tmp);
538 IEM_MC_FETCH_CR0_U32(u32Tmp);
539 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
540 IEM_MC_ADVANCE_RIP();
541 IEM_MC_END();
542 return VINF_SUCCESS;
543
544 case IEMMODE_64BIT:
545 IEM_MC_BEGIN(0, 1);
546 IEM_MC_LOCAL(uint64_t, u64Tmp);
547 IEM_MC_FETCH_CR0_U64(u64Tmp);
548 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
549 IEM_MC_ADVANCE_RIP();
550 IEM_MC_END();
551 return VINF_SUCCESS;
552
553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
554 }
555 }
556 else
557 {
558 /* Ignore operand size here, memory refs are always 16-bit. */
559 IEM_MC_BEGIN(0, 2);
560 IEM_MC_LOCAL(uint16_t, u16Tmp);
561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
564 IEM_MC_FETCH_CR0_U16(u16Tmp);
565 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
566 { /* likely */ }
567 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
568 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
569 else
570 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
571 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
572 IEM_MC_ADVANCE_RIP();
573 IEM_MC_END();
574 return VINF_SUCCESS;
575 }
576}
577
578
579/** Opcode 0x0f 0x01 /6. */
580FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
581{
582 /* The operand size is effectively ignored, all is 16-bit and only the
583 lower 3-bits are used. */
584 IEMOP_MNEMONIC(lmsw, "lmsw");
585 IEMOP_HLP_MIN_286();
586 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
587 {
588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
589 IEM_MC_BEGIN(1, 0);
590 IEM_MC_ARG(uint16_t, u16Tmp, 0);
591 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
592 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
593 IEM_MC_END();
594 }
595 else
596 {
597 IEM_MC_BEGIN(1, 1);
598 IEM_MC_ARG(uint16_t, u16Tmp, 0);
599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
602 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
603 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
604 IEM_MC_END();
605 }
606 return VINF_SUCCESS;
607}
608
609
610/** Opcode 0x0f 0x01 /7. */
611FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
612{
613 IEMOP_MNEMONIC(invlpg, "invlpg");
614 IEMOP_HLP_MIN_486();
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_BEGIN(1, 1);
617 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
619 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
620 IEM_MC_END();
621 return VINF_SUCCESS;
622}
623
624
625/** Opcode 0x0f 0x01 /7. */
626FNIEMOP_DEF(iemOp_Grp7_swapgs)
627{
628 IEMOP_MNEMONIC(swapgs, "swapgs");
629 IEMOP_HLP_ONLY_64BIT();
630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
631 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
632}
633
634
635/** Opcode 0x0f 0x01 /7. */
636FNIEMOP_DEF(iemOp_Grp7_rdtscp)
637{
638 NOREF(pVCpu);
639 IEMOP_BITCH_ABOUT_STUB();
640 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
641}
642
643
644/**
645 * Group 7 jump table, memory variant.
646 */
647IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
648{
649 iemOp_Grp7_sgdt,
650 iemOp_Grp7_sidt,
651 iemOp_Grp7_lgdt,
652 iemOp_Grp7_lidt,
653 iemOp_Grp7_smsw,
654 iemOp_InvalidWithRM,
655 iemOp_Grp7_lmsw,
656 iemOp_Grp7_invlpg
657};
658
659
660/** Opcode 0x0f 0x01. */
661FNIEMOP_DEF(iemOp_Grp7)
662{
663 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
664 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
665 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
666
667 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
668 {
669 case 0:
670 switch (bRm & X86_MODRM_RM_MASK)
671 {
672 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
673 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
674 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
675 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
676 }
677 return IEMOP_RAISE_INVALID_OPCODE();
678
679 case 1:
680 switch (bRm & X86_MODRM_RM_MASK)
681 {
682 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
683 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
684 }
685 return IEMOP_RAISE_INVALID_OPCODE();
686
687 case 2:
688 switch (bRm & X86_MODRM_RM_MASK)
689 {
690 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
691 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 3:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
700 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
701 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
702 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
703 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
704 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
705 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
706 IEM_NOT_REACHED_DEFAULT_CASE_RET();
707 }
708
709 case 4:
710 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
711
712 case 5:
713 return IEMOP_RAISE_INVALID_OPCODE();
714
715 case 6:
716 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
717
718 case 7:
719 switch (bRm & X86_MODRM_RM_MASK)
720 {
721 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
722 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
723 }
724 return IEMOP_RAISE_INVALID_OPCODE();
725
726 IEM_NOT_REACHED_DEFAULT_CASE_RET();
727 }
728}
729
730/** Opcode 0x0f 0x00 /3. */
731FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
732{
733 IEMOP_HLP_NO_REAL_OR_V86_MODE();
734 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
735
736 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
737 {
738 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
739 switch (pVCpu->iem.s.enmEffOpSize)
740 {
741 case IEMMODE_16BIT:
742 {
743 IEM_MC_BEGIN(3, 0);
744 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
745 IEM_MC_ARG(uint16_t, u16Sel, 1);
746 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
747
748 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
749 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
750 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
751
752 IEM_MC_END();
753 return VINF_SUCCESS;
754 }
755
756 case IEMMODE_32BIT:
757 case IEMMODE_64BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 IEM_NOT_REACHED_DEFAULT_CASE_RET();
773 }
774 }
775 else
776 {
777 switch (pVCpu->iem.s.enmEffOpSize)
778 {
779 case IEMMODE_16BIT:
780 {
781 IEM_MC_BEGIN(3, 1);
782 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
783 IEM_MC_ARG(uint16_t, u16Sel, 1);
784 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
786
787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
788 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
789
790 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
791 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
792 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
793
794 IEM_MC_END();
795 return VINF_SUCCESS;
796 }
797
798 case IEMMODE_32BIT:
799 case IEMMODE_64BIT:
800 {
801 IEM_MC_BEGIN(3, 1);
802 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
803 IEM_MC_ARG(uint16_t, u16Sel, 1);
804 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
806
807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
808 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
809/** @todo testcase: make sure it's a 16-bit read. */
810
811 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
812 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
813 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
814
815 IEM_MC_END();
816 return VINF_SUCCESS;
817 }
818
819 IEM_NOT_REACHED_DEFAULT_CASE_RET();
820 }
821 }
822}
823
824
825
826/** Opcode 0x0f 0x02. */
827FNIEMOP_DEF(iemOp_lar_Gv_Ew)
828{
829 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
830 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
831}
832
833
834/** Opcode 0x0f 0x03. */
835FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
836{
837 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
838 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
839}
840
841
842/** Opcode 0x0f 0x05. */
843FNIEMOP_DEF(iemOp_syscall)
844{
845 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
847 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
848}
849
850
851/** Opcode 0x0f 0x06. */
852FNIEMOP_DEF(iemOp_clts)
853{
854 IEMOP_MNEMONIC(clts, "clts");
855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
856 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
857}
858
859
860/** Opcode 0x0f 0x07. */
861FNIEMOP_DEF(iemOp_sysret)
862{
863 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
865 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
866}
867
868
869/** Opcode 0x0f 0x08. */
870FNIEMOP_STUB(iemOp_invd);
871// IEMOP_HLP_MIN_486();
872
873
874/** Opcode 0x0f 0x09. */
875FNIEMOP_DEF(iemOp_wbinvd)
876{
877 IEMOP_MNEMONIC(wbinvd, "wbinvd");
878 IEMOP_HLP_MIN_486();
879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
880 IEM_MC_BEGIN(0, 0);
881 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
882 IEM_MC_ADVANCE_RIP();
883 IEM_MC_END();
884 return VINF_SUCCESS; /* ignore for now */
885}
886
887
888/** Opcode 0x0f 0x0b. */
889FNIEMOP_DEF(iemOp_ud2)
890{
891 IEMOP_MNEMONIC(ud2, "ud2");
892 return IEMOP_RAISE_INVALID_OPCODE();
893}
894
895/** Opcode 0x0f 0x0d. */
896FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
897{
898 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
899 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
900 {
901 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
902 return IEMOP_RAISE_INVALID_OPCODE();
903 }
904
905 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
906 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
907 {
908 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
909 return IEMOP_RAISE_INVALID_OPCODE();
910 }
911
912 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
913 {
914 case 2: /* Aliased to /0 for the time being. */
915 case 4: /* Aliased to /0 for the time being. */
916 case 5: /* Aliased to /0 for the time being. */
917 case 6: /* Aliased to /0 for the time being. */
918 case 7: /* Aliased to /0 for the time being. */
919 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
920 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
921 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
922 IEM_NOT_REACHED_DEFAULT_CASE_RET();
923 }
924
925 IEM_MC_BEGIN(0, 1);
926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
929 /* Currently a NOP. */
930 NOREF(GCPtrEffSrc);
931 IEM_MC_ADVANCE_RIP();
932 IEM_MC_END();
933 return VINF_SUCCESS;
934}
935
936
937/** Opcode 0x0f 0x0e. */
938FNIEMOP_STUB(iemOp_femms);
939
940
941/** Opcode 0x0f 0x0f 0x0c. */
942FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
943
944/** Opcode 0x0f 0x0f 0x0d. */
945FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
946
947/** Opcode 0x0f 0x0f 0x1c. */
948FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
949
950/** Opcode 0x0f 0x0f 0x1d. */
951FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
952
953/** Opcode 0x0f 0x0f 0x8a. */
954FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
955
956/** Opcode 0x0f 0x0f 0x8e. */
957FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
958
959/** Opcode 0x0f 0x0f 0x90. */
960FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
961
962/** Opcode 0x0f 0x0f 0x94. */
963FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
964
965/** Opcode 0x0f 0x0f 0x96. */
966FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
967
968/** Opcode 0x0f 0x0f 0x97. */
969FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
970
971/** Opcode 0x0f 0x0f 0x9a. */
972FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
973
974/** Opcode 0x0f 0x0f 0x9e. */
975FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
976
977/** Opcode 0x0f 0x0f 0xa0. */
978FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
979
980/** Opcode 0x0f 0x0f 0xa4. */
981FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
982
983/** Opcode 0x0f 0x0f 0xa6. */
984FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
985
986/** Opcode 0x0f 0x0f 0xa7. */
987FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
988
989/** Opcode 0x0f 0x0f 0xaa. */
990FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
991
992/** Opcode 0x0f 0x0f 0xae. */
993FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
994
995/** Opcode 0x0f 0x0f 0xb0. */
996FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
997
998/** Opcode 0x0f 0x0f 0xb4. */
999FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1000
1001/** Opcode 0x0f 0x0f 0xb6. */
1002FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1003
1004/** Opcode 0x0f 0x0f 0xb7. */
1005FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1006
1007/** Opcode 0x0f 0x0f 0xbb. */
1008FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1009
1010/** Opcode 0x0f 0x0f 0xbf. */
1011FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1012
1013
1014/** Opcode 0x0f 0x0f. */
1015FNIEMOP_DEF(iemOp_3Dnow)
1016{
1017 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1018 {
1019 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1020 return IEMOP_RAISE_INVALID_OPCODE();
1021 }
1022
1023 /* This is pretty sparse, use switch instead of table. */
1024 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1025 switch (b)
1026 {
1027 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1028 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1029 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1030 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1031 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1032 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1033 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1034 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1035 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1036 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1037 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1038 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1039 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1040 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1041 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1042 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1043 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1044 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1045 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1046 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1047 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1048 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1049 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1050 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1051 default:
1052 return IEMOP_RAISE_INVALID_OPCODE();
1053 }
1054}
1055
1056
1057/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1058FNIEMOP_STUB(iemOp_movups_Vps_Wps);
1059/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1060FNIEMOP_STUB(iemOp_movupd_Vpd_Wpd);
1061
1062
1063/**
1064 * @opcode 0x10
1065 * @oppfx 0xf3
1066 * @opcpuid sse
1067 * @opgroup og_sse_simdfp_datamove
1068 * @opxcpttype 5
1069 * @optest op1=1 op2=2 -> op1=2
1070 * @optest op1=0 op2=-22 -> op1=-22
1071 */
1072FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1073{
1074 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZxReg, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1076 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1077 {
1078 /*
1079 * Register, register.
1080 */
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_BEGIN(0, 1);
1083 IEM_MC_LOCAL(uint32_t, uSrc);
1084
1085 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1086 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1087 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1088 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1089
1090 IEM_MC_ADVANCE_RIP();
1091 IEM_MC_END();
1092 }
1093 else
1094 {
1095 /*
1096 * Memory, register.
1097 */
1098 IEM_MC_BEGIN(0, 2);
1099 IEM_MC_LOCAL(uint32_t, uSrc);
1100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1101
1102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1104 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1105 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1106
1107 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1108 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1109
1110 IEM_MC_ADVANCE_RIP();
1111 IEM_MC_END();
1112 }
1113 return VINF_SUCCESS;
1114}
1115
1116
1117/** Opcode 0xf2 0x0f 0x10 - movsd Vx, Wsd */
1118FNIEMOP_STUB(iemOp_movsd_Vx_Wsd);
1119
1120
1121/**
1122 * @opcode 0x11
1123 * @oppfx none
1124 * @opcpuid sse
1125 * @opgroup og_sse_simdfp_datamove
1126 * @opxcpttype 4UA
1127 * @optest op1=1 op2=2 -> op1=2
1128 * @optest op1=0 op2=-42 -> op1=-42
1129 */
1130FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1131{
1132 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1134 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1135 {
1136 /*
1137 * Register, register.
1138 */
1139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1140 IEM_MC_BEGIN(0, 0);
1141 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1142 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1143 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1144 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1145 IEM_MC_ADVANCE_RIP();
1146 IEM_MC_END();
1147 }
1148 else
1149 {
1150 /*
1151 * Memory, register.
1152 */
1153 IEM_MC_BEGIN(0, 2);
1154 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1156
1157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1159 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1160 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1161
1162 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1163 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1164
1165 IEM_MC_ADVANCE_RIP();
1166 IEM_MC_END();
1167 }
1168 return VINF_SUCCESS;
1169}
1170
1171
1172/**
1173 * @opcode 0x11
1174 * @oppfx 0x66
1175 * @opcpuid sse2
1176 * @opgroup og_sse2_pcksclr_datamove
1177 * @opxcpttype 4UA
1178 * @optest op1=1 op2=2 -> op1=2
1179 * @optest op1=0 op2=-42 -> op1=-42
1180 */
1181FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1182{
1183 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1184 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1185 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1186 {
1187 /*
1188 * Register, register.
1189 */
1190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1191 IEM_MC_BEGIN(0, 0);
1192 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1193 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1194 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1195 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1196 IEM_MC_ADVANCE_RIP();
1197 IEM_MC_END();
1198 }
1199 else
1200 {
1201 /*
1202 * Memory, register.
1203 */
1204 IEM_MC_BEGIN(0, 2);
1205 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1207
1208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1210 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1211 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1212
1213 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1214 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1215
1216 IEM_MC_ADVANCE_RIP();
1217 IEM_MC_END();
1218 }
1219 return VINF_SUCCESS;
1220}
1221
1222
1223/**
1224 * @opcode 0x11
1225 * @oppfx 0xf3
1226 * @opcpuid sse
1227 * @opgroup og_sse_simdfp_datamove
1228 * @opxcpttype 5
1229 * @optest op1=1 op2=2 -> op1=2
1230 * @optest op1=0 op2=-22 -> op1=-22
1231 */
1232FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1233{
1234 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1235 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1236 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1237 {
1238 /*
1239 * Register, register.
1240 */
1241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1242 IEM_MC_BEGIN(0, 1);
1243 IEM_MC_LOCAL(uint32_t, uSrc);
1244
1245 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1246 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1247 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1248 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1249
1250 IEM_MC_ADVANCE_RIP();
1251 IEM_MC_END();
1252 }
1253 else
1254 {
1255 /*
1256 * Memory, register.
1257 */
1258 IEM_MC_BEGIN(0, 2);
1259 IEM_MC_LOCAL(uint32_t, uSrc);
1260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1261
1262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1264 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1265 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1266
1267 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1268 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1269
1270 IEM_MC_ADVANCE_RIP();
1271 IEM_MC_END();
1272 }
1273 return VINF_SUCCESS;
1274}
1275
1276
1277/**
1278 * @opcode 0x11
1279 * @oppfx 0xf2
1280 * @opcpuid sse2
1281 * @opgroup og_sse2_pcksclr_datamove
1282 * @opxcpttype 5
1283 * @optest op1=1 op2=2 -> op1=2
1284 * @optest op1=0 op2=-42 -> op1=-42
1285 */
1286FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1287{
1288 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1290 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1291 {
1292 /*
1293 * Register, register.
1294 */
1295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1296 IEM_MC_BEGIN(0, 1);
1297 IEM_MC_LOCAL(uint64_t, uSrc);
1298
1299 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1300 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1301 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1302 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1303
1304 IEM_MC_ADVANCE_RIP();
1305 IEM_MC_END();
1306 }
1307 else
1308 {
1309 /*
1310 * Memory, register.
1311 */
1312 IEM_MC_BEGIN(0, 2);
1313 IEM_MC_LOCAL(uint64_t, uSrc);
1314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1315
1316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1318 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1319 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1320
1321 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1322 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1323
1324 IEM_MC_ADVANCE_RIP();
1325 IEM_MC_END();
1326 }
1327 return VINF_SUCCESS;
1328}
1329
1330
1331FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1332{
1333 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1334 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1335 {
1336 /**
1337 * @opcode 0x12
1338 * @opcodesub 11 mr/reg
1339 * @oppfx none
1340 * @opcpuid sse
1341 * @opgroup og_sse_simdfp_datamove
1342 * @opxcpttype 5
1343 * @optest op1=1 op2=2 -> op1=2
1344 * @optest op1=0 op2=-42 -> op1=-42
1345 */
1346 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1347
1348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1349 IEM_MC_BEGIN(0, 1);
1350 IEM_MC_LOCAL(uint64_t, uSrc);
1351
1352 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1353 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1354 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1355 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1356
1357 IEM_MC_ADVANCE_RIP();
1358 IEM_MC_END();
1359 }
1360 else
1361 {
1362 /**
1363 * @opdone
1364 * @opcode 0x12
1365 * @opcodesub !11 mr/reg
1366 * @oppfx none
1367 * @opcpuid sse
1368 * @opgroup og_sse_simdfp_datamove
1369 * @opxcpttype 5
1370 * @optest op1=1 op2=2 -> op1=2
1371 * @optest op1=0 op2=-42 -> op1=-42
1372 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1373 */
1374 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1375
1376 IEM_MC_BEGIN(0, 2);
1377 IEM_MC_LOCAL(uint64_t, uSrc);
1378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1379
1380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1382 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1383 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1384
1385 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1386 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1387
1388 IEM_MC_ADVANCE_RIP();
1389 IEM_MC_END();
1390 }
1391 return VINF_SUCCESS;
1392}
1393
1394
1395/**
1396 * @opcode 0x12
1397 * @opcodesub !11 mr/reg
1398 * @oppfx 0x66
1399 * @opcpuid sse2
1400 * @opgroup og_sse2_pcksclr_datamove
1401 * @opxcpttype 5
1402 * @optest op1=1 op2=2 -> op1=2
1403 * @optest op1=0 op2=-42 -> op1=-42
1404 */
1405FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1406{
1407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1408 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1409 {
1410 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1411
1412 IEM_MC_BEGIN(0, 2);
1413 IEM_MC_LOCAL(uint64_t, uSrc);
1414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1415
1416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1418 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1419 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1420
1421 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1422 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1423
1424 IEM_MC_ADVANCE_RIP();
1425 IEM_MC_END();
1426 return VINF_SUCCESS;
1427 }
1428
1429 /**
1430 * @opdone
1431 * @opmnemonic ud660f12m3
1432 * @opcode 0x12
1433 * @opcodesub 11 mr/reg
1434 * @oppfx 0x66
1435 * @opunused immediate
1436 * @opcpuid sse
1437 * @optest ->
1438 */
1439 return IEMOP_RAISE_INVALID_OPCODE();
1440}
1441
1442
1443/**
1444 * @opcode 0x12
1445 * @oppfx 0xf3
1446 * @opcpuid sse3
1447 * @opgroup og_sse3_pcksclr_datamove
1448 * @opxcpttype 4
1449 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1450 * op1=0x00000002000000020000000100000001
1451 */
1452FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1453{
1454 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1456 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1457 {
1458 /*
1459 * Register, register.
1460 */
1461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1462 IEM_MC_BEGIN(2, 0);
1463 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1464 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1465
1466 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1467 IEM_MC_PREPARE_SSE_USAGE();
1468
1469 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1470 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1471 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1472
1473 IEM_MC_ADVANCE_RIP();
1474 IEM_MC_END();
1475 }
1476 else
1477 {
1478 /*
1479 * Register, memory.
1480 */
1481 IEM_MC_BEGIN(2, 2);
1482 IEM_MC_LOCAL(RTUINT128U, uSrc);
1483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1484 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1485 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1486
1487 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1489 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1490 IEM_MC_PREPARE_SSE_USAGE();
1491
1492 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1493 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1494 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1495
1496 IEM_MC_ADVANCE_RIP();
1497 IEM_MC_END();
1498 }
1499 return VINF_SUCCESS;
1500}
1501
1502
1503/**
1504 * @opcode 0x12
1505 * @oppfx 0xf2
1506 * @opcpuid sse3
1507 * @opgroup og_sse3_pcksclr_datamove
1508 * @opxcpttype 5
1509 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1510 * op1=0x22222222111111112222222211111111
1511 */
1512FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1513{
1514 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1516 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1517 {
1518 /*
1519 * Register, register.
1520 */
1521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1522 IEM_MC_BEGIN(2, 0);
1523 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1524 IEM_MC_ARG(uint64_t, uSrc, 1);
1525
1526 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1527 IEM_MC_PREPARE_SSE_USAGE();
1528
1529 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1530 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1531 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1532
1533 IEM_MC_ADVANCE_RIP();
1534 IEM_MC_END();
1535 }
1536 else
1537 {
1538 /*
1539 * Register, memory.
1540 */
1541 IEM_MC_BEGIN(2, 2);
1542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1543 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1544 IEM_MC_ARG(uint64_t, uSrc, 1);
1545
1546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1548 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1549 IEM_MC_PREPARE_SSE_USAGE();
1550
1551 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1552 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1553 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1554
1555 IEM_MC_ADVANCE_RIP();
1556 IEM_MC_END();
1557 }
1558 return VINF_SUCCESS;
1559}
1560
1561
1562/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1563FNIEMOP_STUB(iemOp_movlps_Mq_Vq);
1564
1565/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1566FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1567{
1568 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1569 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1570 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1571 {
1572#if 0
1573 /*
1574 * Register, register.
1575 */
1576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1577 IEM_MC_BEGIN(0, 1);
1578 IEM_MC_LOCAL(uint64_t, uSrc);
1579 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1580 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1581 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1582 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1583 IEM_MC_ADVANCE_RIP();
1584 IEM_MC_END();
1585#else
1586 return IEMOP_RAISE_INVALID_OPCODE();
1587#endif
1588 }
1589 else
1590 {
1591 /*
1592 * Memory, register.
1593 */
1594 IEM_MC_BEGIN(0, 2);
1595 IEM_MC_LOCAL(uint64_t, uSrc);
1596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1597
1598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1600 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1601 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1602
1603 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1604 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1605
1606 IEM_MC_ADVANCE_RIP();
1607 IEM_MC_END();
1608 }
1609 return VINF_SUCCESS;
1610}
1611
1612/* Opcode 0xf3 0x0f 0x13 - invalid */
1613/* Opcode 0xf2 0x0f 0x13 - invalid */
1614
1615/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1616FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1617/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1618FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1619/* Opcode 0xf3 0x0f 0x14 - invalid */
1620/* Opcode 0xf2 0x0f 0x14 - invalid */
1621/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1622FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1623/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1624FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1625/* Opcode 0xf3 0x0f 0x15 - invalid */
1626/* Opcode 0xf2 0x0f 0x15 - invalid */
1627/** Opcode 0x0f 0x16 - movhpsv1 Vdq, Mq movlhps Vdq, Uq */
1628FNIEMOP_STUB(iemOp_movhpsv1_Vdq_Mq__movlhps_Vdq_Uq); //NEXT
1629/** Opcode 0x66 0x0f 0x16 - movhpdv1 Vdq, Mq */
1630FNIEMOP_STUB(iemOp_movhpdv1_Vdq_Mq); //NEXT
1631/** Opcode 0xf3 0x0f 0x16 - movshdup Vx, Wx */
1632FNIEMOP_STUB(iemOp_movshdup_Vx_Wx); //NEXT
1633/* Opcode 0xf2 0x0f 0x16 - invalid */
1634/** Opcode 0x0f 0x17 - movhpsv1 Mq, Vq */
1635FNIEMOP_STUB(iemOp_movhpsv1_Mq_Vq); //NEXT
1636/** Opcode 0x66 0x0f 0x17 - movhpdv1 Mq, Vq */
1637FNIEMOP_STUB(iemOp_movhpdv1_Mq_Vq); //NEXT
1638/* Opcode 0xf3 0x0f 0x17 - invalid */
1639/* Opcode 0xf2 0x0f 0x17 - invalid */
1640
1641
1642/** Opcode 0x0f 0x18. */
1643FNIEMOP_DEF(iemOp_prefetch_Grp16)
1644{
1645 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1646 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1647 {
1648 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1649 {
1650 case 4: /* Aliased to /0 for the time being according to AMD. */
1651 case 5: /* Aliased to /0 for the time being according to AMD. */
1652 case 6: /* Aliased to /0 for the time being according to AMD. */
1653 case 7: /* Aliased to /0 for the time being according to AMD. */
1654 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1655 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1656 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1657 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1658 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1659 }
1660
1661 IEM_MC_BEGIN(0, 1);
1662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1665 /* Currently a NOP. */
1666 NOREF(GCPtrEffSrc);
1667 IEM_MC_ADVANCE_RIP();
1668 IEM_MC_END();
1669 return VINF_SUCCESS;
1670 }
1671
1672 return IEMOP_RAISE_INVALID_OPCODE();
1673}
1674
1675
1676/** Opcode 0x0f 0x19..0x1f. */
1677FNIEMOP_DEF(iemOp_nop_Ev)
1678{
1679 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1681 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1682 {
1683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1684 IEM_MC_BEGIN(0, 0);
1685 IEM_MC_ADVANCE_RIP();
1686 IEM_MC_END();
1687 }
1688 else
1689 {
1690 IEM_MC_BEGIN(0, 1);
1691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1694 /* Currently a NOP. */
1695 NOREF(GCPtrEffSrc);
1696 IEM_MC_ADVANCE_RIP();
1697 IEM_MC_END();
1698 }
1699 return VINF_SUCCESS;
1700}
1701
1702
1703/** Opcode 0x0f 0x20. */
1704FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1705{
1706 /* mod is ignored, as is operand size overrides. */
1707 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1708 IEMOP_HLP_MIN_386();
1709 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1710 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1711 else
1712 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1713
1714 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1715 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1716 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1717 {
1718 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1719 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1720 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1721 iCrReg |= 8;
1722 }
1723 switch (iCrReg)
1724 {
1725 case 0: case 2: case 3: case 4: case 8:
1726 break;
1727 default:
1728 return IEMOP_RAISE_INVALID_OPCODE();
1729 }
1730 IEMOP_HLP_DONE_DECODING();
1731
1732 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1733}
1734
1735
1736/** Opcode 0x0f 0x21. */
1737FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1738{
1739 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1740 IEMOP_HLP_MIN_386();
1741 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1743 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1744 return IEMOP_RAISE_INVALID_OPCODE();
1745 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1746 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1747 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1748}
1749
1750
1751/** Opcode 0x0f 0x22. */
1752FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1753{
1754 /* mod is ignored, as is operand size overrides. */
1755 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1756 IEMOP_HLP_MIN_386();
1757 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1758 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1759 else
1760 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1761
1762 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1763 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1764 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1765 {
1766 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1767 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1768 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1769 iCrReg |= 8;
1770 }
1771 switch (iCrReg)
1772 {
1773 case 0: case 2: case 3: case 4: case 8:
1774 break;
1775 default:
1776 return IEMOP_RAISE_INVALID_OPCODE();
1777 }
1778 IEMOP_HLP_DONE_DECODING();
1779
1780 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1781}
1782
1783
1784/** Opcode 0x0f 0x23. */
1785FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1786{
1787 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1788 IEMOP_HLP_MIN_386();
1789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1791 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1792 return IEMOP_RAISE_INVALID_OPCODE();
1793 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1794 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1795 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1796}
1797
1798
1799/** Opcode 0x0f 0x24. */
1800FNIEMOP_DEF(iemOp_mov_Rd_Td)
1801{
1802 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1803 /** @todo works on 386 and 486. */
1804 /* The RM byte is not considered, see testcase. */
1805 return IEMOP_RAISE_INVALID_OPCODE();
1806}
1807
1808
1809/** Opcode 0x0f 0x26. */
1810FNIEMOP_DEF(iemOp_mov_Td_Rd)
1811{
1812 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1813 /** @todo works on 386 and 486. */
1814 /* The RM byte is not considered, see testcase. */
1815 return IEMOP_RAISE_INVALID_OPCODE();
1816}
1817
1818
1819/** Opcode 0x0f 0x28 - movaps Vps, Wps */
1820FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
1821{
1822 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1824 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1825 {
1826 /*
1827 * Register, register.
1828 */
1829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1830 IEM_MC_BEGIN(0, 0);
1831 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1832 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1833 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1834 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1835 IEM_MC_ADVANCE_RIP();
1836 IEM_MC_END();
1837 }
1838 else
1839 {
1840 /*
1841 * Register, memory.
1842 */
1843 IEM_MC_BEGIN(0, 2);
1844 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1846
1847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1849 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1850 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1851
1852 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1853 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1854
1855 IEM_MC_ADVANCE_RIP();
1856 IEM_MC_END();
1857 }
1858 return VINF_SUCCESS;
1859}
1860
1861/** Opcode 0x66 0x0f 0x28 - movapd Vpd, Wpd */
1862FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
1863{
1864 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1865 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1866 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1867 {
1868 /*
1869 * Register, register.
1870 */
1871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1872 IEM_MC_BEGIN(0, 0);
1873 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1874 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1875 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1876 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1877 IEM_MC_ADVANCE_RIP();
1878 IEM_MC_END();
1879 }
1880 else
1881 {
1882 /*
1883 * Register, memory.
1884 */
1885 IEM_MC_BEGIN(0, 2);
1886 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1887 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1888
1889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1891 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1892 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1893
1894 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1895 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1896
1897 IEM_MC_ADVANCE_RIP();
1898 IEM_MC_END();
1899 }
1900 return VINF_SUCCESS;
1901}
1902
1903/* Opcode 0xf3 0x0f 0x28 - invalid */
1904/* Opcode 0xf2 0x0f 0x28 - invalid */
1905
1906/** Opcode 0x0f 0x29 - movaps Wps, Vps */
1907FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
1908{
1909 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1911 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1912 {
1913 /*
1914 * Register, register.
1915 */
1916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1917 IEM_MC_BEGIN(0, 0);
1918 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1919 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1920 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1921 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1922 IEM_MC_ADVANCE_RIP();
1923 IEM_MC_END();
1924 }
1925 else
1926 {
1927 /*
1928 * Memory, register.
1929 */
1930 IEM_MC_BEGIN(0, 2);
1931 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1933
1934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1936 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1937 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1938
1939 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1940 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1941
1942 IEM_MC_ADVANCE_RIP();
1943 IEM_MC_END();
1944 }
1945 return VINF_SUCCESS;
1946}
1947
1948/** Opcode 0x66 0x0f 0x29 - movapd Wpd,Vpd */
1949FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
1950{
1951 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1952 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1953 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1954 {
1955 /*
1956 * Register, register.
1957 */
1958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1959 IEM_MC_BEGIN(0, 0);
1960 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1961 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1962 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1963 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1964 IEM_MC_ADVANCE_RIP();
1965 IEM_MC_END();
1966 }
1967 else
1968 {
1969 /*
1970 * Memory, register.
1971 */
1972 IEM_MC_BEGIN(0, 2);
1973 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1974 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1975
1976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1978 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1979 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1980
1981 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1982 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1983
1984 IEM_MC_ADVANCE_RIP();
1985 IEM_MC_END();
1986 }
1987 return VINF_SUCCESS;
1988}
1989
1990/* Opcode 0xf3 0x0f 0x29 - invalid */
1991/* Opcode 0xf2 0x0f 0x29 - invalid */
1992
1993
1994/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
1995FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
1996/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
1997FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
1998/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
1999FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2000/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2001FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2002
2003
2004/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
2005FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2006{
2007 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2008 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2009 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2010 {
2011 /*
2012 * memory, register.
2013 */
2014 IEM_MC_BEGIN(0, 2);
2015 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2017
2018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2020 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2021 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2022
2023 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2024 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2025
2026 IEM_MC_ADVANCE_RIP();
2027 IEM_MC_END();
2028 }
2029 /* The register, register encoding is invalid. */
2030 else
2031 return IEMOP_RAISE_INVALID_OPCODE();
2032 return VINF_SUCCESS;
2033}
2034
2035/** Opcode 0x66 0x0f 0x2b - movntpd Mpd, Vpd */
2036FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2037{
2038 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2039 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2040 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2041 {
2042 /*
2043 * memory, register.
2044 */
2045 IEM_MC_BEGIN(0, 2);
2046 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2048
2049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2051 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2052 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2053
2054 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2055 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2056
2057 IEM_MC_ADVANCE_RIP();
2058 IEM_MC_END();
2059 }
2060 /* The register, register encoding is invalid. */
2061 else
2062 return IEMOP_RAISE_INVALID_OPCODE();
2063 return VINF_SUCCESS;
2064}
2065/* Opcode 0xf3 0x0f 0x2b - invalid */
2066/* Opcode 0xf2 0x0f 0x2b - invalid */
2067
2068
2069/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2070FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2071/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2072FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2073/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2074FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2075/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2076FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2077
2078/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2079FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2080/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2081FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2082/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2083FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2084/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2085FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2086
2087/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2088FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2089/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2090FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2091/* Opcode 0xf3 0x0f 0x2e - invalid */
2092/* Opcode 0xf2 0x0f 0x2e - invalid */
2093
2094/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2095FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2096/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2097FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2098/* Opcode 0xf3 0x0f 0x2f - invalid */
2099/* Opcode 0xf2 0x0f 0x2f - invalid */
2100
2101/** Opcode 0x0f 0x30. */
2102FNIEMOP_DEF(iemOp_wrmsr)
2103{
2104 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2106 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2107}
2108
2109
2110/** Opcode 0x0f 0x31. */
2111FNIEMOP_DEF(iemOp_rdtsc)
2112{
2113 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2115 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2116}
2117
2118
2119/** Opcode 0x0f 0x33. */
2120FNIEMOP_DEF(iemOp_rdmsr)
2121{
2122 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2124 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2125}
2126
2127
2128/** Opcode 0x0f 0x34. */
2129FNIEMOP_STUB(iemOp_rdpmc);
2130/** Opcode 0x0f 0x34. */
2131FNIEMOP_STUB(iemOp_sysenter);
2132/** Opcode 0x0f 0x35. */
2133FNIEMOP_STUB(iemOp_sysexit);
2134/** Opcode 0x0f 0x37. */
2135FNIEMOP_STUB(iemOp_getsec);
2136/** Opcode 0x0f 0x38. */
2137FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2138/** Opcode 0x0f 0x3a. */
2139FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2140
2141
2142/**
2143 * Implements a conditional move.
2144 *
2145 * Wish there was an obvious way to do this where we could share and reduce
2146 * code bloat.
2147 *
2148 * @param a_Cnd The conditional "microcode" operation.
2149 */
2150#define CMOV_X(a_Cnd) \
2151 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2152 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2153 { \
2154 switch (pVCpu->iem.s.enmEffOpSize) \
2155 { \
2156 case IEMMODE_16BIT: \
2157 IEM_MC_BEGIN(0, 1); \
2158 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2159 a_Cnd { \
2160 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2161 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2162 } IEM_MC_ENDIF(); \
2163 IEM_MC_ADVANCE_RIP(); \
2164 IEM_MC_END(); \
2165 return VINF_SUCCESS; \
2166 \
2167 case IEMMODE_32BIT: \
2168 IEM_MC_BEGIN(0, 1); \
2169 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2170 a_Cnd { \
2171 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2172 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2173 } IEM_MC_ELSE() { \
2174 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2175 } IEM_MC_ENDIF(); \
2176 IEM_MC_ADVANCE_RIP(); \
2177 IEM_MC_END(); \
2178 return VINF_SUCCESS; \
2179 \
2180 case IEMMODE_64BIT: \
2181 IEM_MC_BEGIN(0, 1); \
2182 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2183 a_Cnd { \
2184 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2185 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2186 } IEM_MC_ENDIF(); \
2187 IEM_MC_ADVANCE_RIP(); \
2188 IEM_MC_END(); \
2189 return VINF_SUCCESS; \
2190 \
2191 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2192 } \
2193 } \
2194 else \
2195 { \
2196 switch (pVCpu->iem.s.enmEffOpSize) \
2197 { \
2198 case IEMMODE_16BIT: \
2199 IEM_MC_BEGIN(0, 2); \
2200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2201 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2203 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2204 a_Cnd { \
2205 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2206 } IEM_MC_ENDIF(); \
2207 IEM_MC_ADVANCE_RIP(); \
2208 IEM_MC_END(); \
2209 return VINF_SUCCESS; \
2210 \
2211 case IEMMODE_32BIT: \
2212 IEM_MC_BEGIN(0, 2); \
2213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2214 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2216 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2217 a_Cnd { \
2218 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2219 } IEM_MC_ELSE() { \
2220 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2221 } IEM_MC_ENDIF(); \
2222 IEM_MC_ADVANCE_RIP(); \
2223 IEM_MC_END(); \
2224 return VINF_SUCCESS; \
2225 \
2226 case IEMMODE_64BIT: \
2227 IEM_MC_BEGIN(0, 2); \
2228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2229 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2231 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2232 a_Cnd { \
2233 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2234 } IEM_MC_ENDIF(); \
2235 IEM_MC_ADVANCE_RIP(); \
2236 IEM_MC_END(); \
2237 return VINF_SUCCESS; \
2238 \
2239 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2240 } \
2241 } do {} while (0)
2242
2243
2244
2245/** Opcode 0x0f 0x40. */
2246FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2247{
2248 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2249 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2250}
2251
2252
2253/** Opcode 0x0f 0x41. */
2254FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2255{
2256 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2257 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2258}
2259
2260
2261/** Opcode 0x0f 0x42. */
2262FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2263{
2264 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2265 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2266}
2267
2268
2269/** Opcode 0x0f 0x43. */
2270FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2271{
2272 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2273 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2274}
2275
2276
2277/** Opcode 0x0f 0x44. */
2278FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2279{
2280 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2281 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2282}
2283
2284
2285/** Opcode 0x0f 0x45. */
2286FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2287{
2288 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2289 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2290}
2291
2292
2293/** Opcode 0x0f 0x46. */
2294FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2295{
2296 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2297 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2298}
2299
2300
2301/** Opcode 0x0f 0x47. */
2302FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2303{
2304 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2305 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2306}
2307
2308
2309/** Opcode 0x0f 0x48. */
2310FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2311{
2312 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2313 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2314}
2315
2316
2317/** Opcode 0x0f 0x49. */
2318FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2319{
2320 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2321 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2322}
2323
2324
2325/** Opcode 0x0f 0x4a. */
2326FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2327{
2328 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2329 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2330}
2331
2332
2333/** Opcode 0x0f 0x4b. */
2334FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2335{
2336 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2337 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2338}
2339
2340
2341/** Opcode 0x0f 0x4c. */
2342FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2343{
2344 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2345 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2346}
2347
2348
2349/** Opcode 0x0f 0x4d. */
2350FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2351{
2352 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2353 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2354}
2355
2356
2357/** Opcode 0x0f 0x4e. */
2358FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2359{
2360 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2361 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2362}
2363
2364
2365/** Opcode 0x0f 0x4f. */
2366FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2367{
2368 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2369 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2370}
2371
2372#undef CMOV_X
2373
2374/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2375FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2376/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2377FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2378/* Opcode 0xf3 0x0f 0x50 - invalid */
2379/* Opcode 0xf2 0x0f 0x50 - invalid */
2380
2381/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2382FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2383/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2384FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2385/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2386FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2387/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2388FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2389
2390/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2391FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2392/* Opcode 0x66 0x0f 0x52 - invalid */
2393/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2394FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2395/* Opcode 0xf2 0x0f 0x52 - invalid */
2396
2397/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2398FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2399/* Opcode 0x66 0x0f 0x53 - invalid */
2400/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2401FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2402/* Opcode 0xf2 0x0f 0x53 - invalid */
2403
2404/** Opcode 0x0f 0x54 - andps Vps, Wps */
2405FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2406/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2407FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2408/* Opcode 0xf3 0x0f 0x54 - invalid */
2409/* Opcode 0xf2 0x0f 0x54 - invalid */
2410
2411/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2412FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2413/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2414FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2415/* Opcode 0xf3 0x0f 0x55 - invalid */
2416/* Opcode 0xf2 0x0f 0x55 - invalid */
2417
2418/** Opcode 0x0f 0x56 - orps Vps, Wps */
2419FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2420/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2421FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2422/* Opcode 0xf3 0x0f 0x56 - invalid */
2423/* Opcode 0xf2 0x0f 0x56 - invalid */
2424
2425/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2426FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2427/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2428FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2429/* Opcode 0xf3 0x0f 0x57 - invalid */
2430/* Opcode 0xf2 0x0f 0x57 - invalid */
2431
2432/** Opcode 0x0f 0x58 - addps Vps, Wps */
2433FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2434/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2435FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2436/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2437FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2438/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2439FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2440
2441/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2442FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2443/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2444FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2445/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2446FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2447/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2448FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2449
2450/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2451FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2452/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2453FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2454/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2455FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2456/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2457FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2458
2459/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2460FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2461/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2462FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2463/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2464FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2465/* Opcode 0xf2 0x0f 0x5b - invalid */
2466
2467/** Opcode 0x0f 0x5c - subps Vps, Wps */
2468FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2469/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2470FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2471/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2472FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2473/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2474FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2475
2476/** Opcode 0x0f 0x5d - minps Vps, Wps */
2477FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2478/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2479FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2480/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2481FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2482/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2483FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2484
2485/** Opcode 0x0f 0x5e - divps Vps, Wps */
2486FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2487/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2488FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2489/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2490FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2491/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2492FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2493
2494/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2495FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2496/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2497FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2498/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2499FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2500/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2501FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2502
2503/**
2504 * Common worker for MMX instructions on the forms:
2505 * pxxxx mm1, mm2/mem32
2506 *
2507 * The 2nd operand is the first half of a register, which in the memory case
2508 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2509 * memory accessed for MMX.
2510 *
2511 * Exceptions type 4.
2512 */
2513FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2514{
2515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2516 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2517 {
2518 /*
2519 * Register, register.
2520 */
2521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2522 IEM_MC_BEGIN(2, 0);
2523 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2524 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2525 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2526 IEM_MC_PREPARE_SSE_USAGE();
2527 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2528 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2529 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2530 IEM_MC_ADVANCE_RIP();
2531 IEM_MC_END();
2532 }
2533 else
2534 {
2535 /*
2536 * Register, memory.
2537 */
2538 IEM_MC_BEGIN(2, 2);
2539 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2540 IEM_MC_LOCAL(uint64_t, uSrc);
2541 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2543
2544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2546 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2547 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2548
2549 IEM_MC_PREPARE_SSE_USAGE();
2550 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2551 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2552
2553 IEM_MC_ADVANCE_RIP();
2554 IEM_MC_END();
2555 }
2556 return VINF_SUCCESS;
2557}
2558
2559
2560/**
2561 * Common worker for SSE2 instructions on the forms:
2562 * pxxxx xmm1, xmm2/mem128
2563 *
2564 * The 2nd operand is the first half of a register, which in the memory case
2565 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2566 * memory accessed for MMX.
2567 *
2568 * Exceptions type 4.
2569 */
2570FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2571{
2572 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2573 if (!pImpl->pfnU64)
2574 return IEMOP_RAISE_INVALID_OPCODE();
2575 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2576 {
2577 /*
2578 * Register, register.
2579 */
2580 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2581 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2583 IEM_MC_BEGIN(2, 0);
2584 IEM_MC_ARG(uint64_t *, pDst, 0);
2585 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2586 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2587 IEM_MC_PREPARE_FPU_USAGE();
2588 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2589 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2590 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2591 IEM_MC_ADVANCE_RIP();
2592 IEM_MC_END();
2593 }
2594 else
2595 {
2596 /*
2597 * Register, memory.
2598 */
2599 IEM_MC_BEGIN(2, 2);
2600 IEM_MC_ARG(uint64_t *, pDst, 0);
2601 IEM_MC_LOCAL(uint32_t, uSrc);
2602 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2604
2605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2607 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2608 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2609
2610 IEM_MC_PREPARE_FPU_USAGE();
2611 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2612 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2613
2614 IEM_MC_ADVANCE_RIP();
2615 IEM_MC_END();
2616 }
2617 return VINF_SUCCESS;
2618}
2619
2620
2621/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2622FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2623{
2624 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2625 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2626}
2627
2628/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
2629FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
2630{
2631 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
2632 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2633}
2634
2635/* Opcode 0xf3 0x0f 0x60 - invalid */
2636
2637
2638/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2639FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2640{
2641 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2642 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2643}
2644
2645/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
2646FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
2647{
2648 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
2649 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2650}
2651
2652/* Opcode 0xf3 0x0f 0x61 - invalid */
2653
2654
2655/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2656FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2657{
2658 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2659 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2660}
2661
2662/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
2663FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
2664{
2665 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
2666 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2667}
2668
2669/* Opcode 0xf3 0x0f 0x62 - invalid */
2670
2671
2672
2673/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2674FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2675/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
2676FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
2677/* Opcode 0xf3 0x0f 0x63 - invalid */
2678
2679/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2680FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2681/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
2682FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
2683/* Opcode 0xf3 0x0f 0x64 - invalid */
2684
2685/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2686FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2687/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
2688FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
2689/* Opcode 0xf3 0x0f 0x65 - invalid */
2690
2691/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2692FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2693/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
2694FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
2695/* Opcode 0xf3 0x0f 0x66 - invalid */
2696
2697/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2698FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2699/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
2700FNIEMOP_STUB(iemOp_packuswb_Vx_W);
2701/* Opcode 0xf3 0x0f 0x67 - invalid */
2702
2703
2704/**
2705 * Common worker for MMX instructions on the form:
2706 * pxxxx mm1, mm2/mem64
2707 *
2708 * The 2nd operand is the second half of a register, which in the memory case
2709 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2710 * where it may read the full 128 bits or only the upper 64 bits.
2711 *
2712 * Exceptions type 4.
2713 */
2714FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2715{
2716 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2717 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2718 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2719 {
2720 /*
2721 * Register, register.
2722 */
2723 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2724 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2726 IEM_MC_BEGIN(2, 0);
2727 IEM_MC_ARG(uint64_t *, pDst, 0);
2728 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2729 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2730 IEM_MC_PREPARE_FPU_USAGE();
2731 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2732 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2733 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2734 IEM_MC_ADVANCE_RIP();
2735 IEM_MC_END();
2736 }
2737 else
2738 {
2739 /*
2740 * Register, memory.
2741 */
2742 IEM_MC_BEGIN(2, 2);
2743 IEM_MC_ARG(uint64_t *, pDst, 0);
2744 IEM_MC_LOCAL(uint64_t, uSrc);
2745 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2747
2748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2750 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2751 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2752
2753 IEM_MC_PREPARE_FPU_USAGE();
2754 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2755 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2756
2757 IEM_MC_ADVANCE_RIP();
2758 IEM_MC_END();
2759 }
2760 return VINF_SUCCESS;
2761}
2762
2763
2764/**
2765 * Common worker for SSE2 instructions on the form:
2766 * pxxxx xmm1, xmm2/mem128
2767 *
2768 * The 2nd operand is the second half of a register, which in the memory case
2769 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2770 * where it may read the full 128 bits or only the upper 64 bits.
2771 *
2772 * Exceptions type 4.
2773 */
2774FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2775{
2776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2777 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2778 {
2779 /*
2780 * Register, register.
2781 */
2782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2783 IEM_MC_BEGIN(2, 0);
2784 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2785 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2786 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2787 IEM_MC_PREPARE_SSE_USAGE();
2788 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2789 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2790 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2791 IEM_MC_ADVANCE_RIP();
2792 IEM_MC_END();
2793 }
2794 else
2795 {
2796 /*
2797 * Register, memory.
2798 */
2799 IEM_MC_BEGIN(2, 2);
2800 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2801 IEM_MC_LOCAL(RTUINT128U, uSrc);
2802 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2804
2805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2807 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2808 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2809
2810 IEM_MC_PREPARE_SSE_USAGE();
2811 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2812 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2813
2814 IEM_MC_ADVANCE_RIP();
2815 IEM_MC_END();
2816 }
2817 return VINF_SUCCESS;
2818}
2819
2820
2821/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2822FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2823{
2824 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2825 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2826}
2827
2828/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
2829FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
2830{
2831 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
2832 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2833}
2834/* Opcode 0xf3 0x0f 0x68 - invalid */
2835
2836
2837/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2838FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2839{
2840 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2841 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2842}
2843
2844/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
2845FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
2846{
2847 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
2848 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2849
2850}
2851/* Opcode 0xf3 0x0f 0x69 - invalid */
2852
2853
2854/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2855FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2856{
2857 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2858 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2859}
2860
2861/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
2862FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
2863{
2864 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
2865 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2866}
2867/* Opcode 0xf3 0x0f 0x6a - invalid */
2868
2869
2870/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2871FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2872/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
2873FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
2874/* Opcode 0xf3 0x0f 0x6b - invalid */
2875
2876
2877/* Opcode 0x0f 0x6c - invalid */
2878
2879/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
2880FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
2881{
2882 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
2883 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2884}
2885
2886/* Opcode 0xf3 0x0f 0x6c - invalid */
2887/* Opcode 0xf2 0x0f 0x6c - invalid */
2888
2889
2890/* Opcode 0x0f 0x6d - invalid */
2891
2892/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
2893FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
2894{
2895 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
2896 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2897}
2898
2899/* Opcode 0xf3 0x0f 0x6d - invalid */
2900
2901
2902/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
2903FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
2904{
2905 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2906 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2907 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2908 else
2909 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2910 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2911 {
2912 /* MMX, greg */
2913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2914 IEM_MC_BEGIN(0, 1);
2915 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2916 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2917 IEM_MC_LOCAL(uint64_t, u64Tmp);
2918 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2919 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2920 else
2921 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2922 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2923 IEM_MC_ADVANCE_RIP();
2924 IEM_MC_END();
2925 }
2926 else
2927 {
2928 /* MMX, [mem] */
2929 IEM_MC_BEGIN(0, 2);
2930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2931 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2934 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2935 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2936 {
2937 IEM_MC_LOCAL(uint64_t, u64Tmp);
2938 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2939 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2940 }
2941 else
2942 {
2943 IEM_MC_LOCAL(uint32_t, u32Tmp);
2944 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2945 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2946 }
2947 IEM_MC_ADVANCE_RIP();
2948 IEM_MC_END();
2949 }
2950 return VINF_SUCCESS;
2951}
2952
2953/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
2954FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
2955{
2956 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2957 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2958 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
2959 else
2960 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
2961 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2962 {
2963 /* XMM, greg*/
2964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2965 IEM_MC_BEGIN(0, 1);
2966 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2967 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2968 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2969 {
2970 IEM_MC_LOCAL(uint64_t, u64Tmp);
2971 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2972 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2973 }
2974 else
2975 {
2976 IEM_MC_LOCAL(uint32_t, u32Tmp);
2977 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2978 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2979 }
2980 IEM_MC_ADVANCE_RIP();
2981 IEM_MC_END();
2982 }
2983 else
2984 {
2985 /* XMM, [mem] */
2986 IEM_MC_BEGIN(0, 2);
2987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2988 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2991 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2992 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2993 {
2994 IEM_MC_LOCAL(uint64_t, u64Tmp);
2995 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2996 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2997 }
2998 else
2999 {
3000 IEM_MC_LOCAL(uint32_t, u32Tmp);
3001 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3002 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3003 }
3004 IEM_MC_ADVANCE_RIP();
3005 IEM_MC_END();
3006 }
3007 return VINF_SUCCESS;
3008}
3009
3010/* Opcode 0xf3 0x0f 0x6e - invalid */
3011
3012
3013/** Opcode 0x0f 0x6f - movq Pq, Qq */
3014FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3015{
3016 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3017 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3018 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3019 {
3020 /*
3021 * Register, register.
3022 */
3023 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3024 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3026 IEM_MC_BEGIN(0, 1);
3027 IEM_MC_LOCAL(uint64_t, u64Tmp);
3028 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3029 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3030 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3031 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3032 IEM_MC_ADVANCE_RIP();
3033 IEM_MC_END();
3034 }
3035 else
3036 {
3037 /*
3038 * Register, memory.
3039 */
3040 IEM_MC_BEGIN(0, 2);
3041 IEM_MC_LOCAL(uint64_t, u64Tmp);
3042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3043
3044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3046 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3047 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3048 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3049 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3050
3051 IEM_MC_ADVANCE_RIP();
3052 IEM_MC_END();
3053 }
3054 return VINF_SUCCESS;
3055}
3056
3057/** Opcode 0x66 0x0f 0x6f - movdqa Vx, Wx */
3058FNIEMOP_DEF(iemOp_movdqa_Vx_Wx)
3059{
3060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3061 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3062 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3063 {
3064 /*
3065 * Register, register.
3066 */
3067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3068 IEM_MC_BEGIN(0, 0);
3069 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3070 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3071 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3072 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3073 IEM_MC_ADVANCE_RIP();
3074 IEM_MC_END();
3075 }
3076 else
3077 {
3078 /*
3079 * Register, memory.
3080 */
3081 IEM_MC_BEGIN(0, 2);
3082 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3084
3085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3087 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3088 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3089 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3090 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3091
3092 IEM_MC_ADVANCE_RIP();
3093 IEM_MC_END();
3094 }
3095 return VINF_SUCCESS;
3096}
3097
3098/** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */
3099FNIEMOP_DEF(iemOp_movdqu_Vx_Wx)
3100{
3101 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3102 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3104 {
3105 /*
3106 * Register, register.
3107 */
3108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3109 IEM_MC_BEGIN(0, 0);
3110 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3111 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3112 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3113 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3114 IEM_MC_ADVANCE_RIP();
3115 IEM_MC_END();
3116 }
3117 else
3118 {
3119 /*
3120 * Register, memory.
3121 */
3122 IEM_MC_BEGIN(0, 2);
3123 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3125
3126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3128 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3129 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3130 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3131 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3132
3133 IEM_MC_ADVANCE_RIP();
3134 IEM_MC_END();
3135 }
3136 return VINF_SUCCESS;
3137}
3138
3139
3140/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3141FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3142{
3143 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3144 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3145 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3146 {
3147 /*
3148 * Register, register.
3149 */
3150 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3152
3153 IEM_MC_BEGIN(3, 0);
3154 IEM_MC_ARG(uint64_t *, pDst, 0);
3155 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3156 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3157 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3158 IEM_MC_PREPARE_FPU_USAGE();
3159 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3160 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3161 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3162 IEM_MC_ADVANCE_RIP();
3163 IEM_MC_END();
3164 }
3165 else
3166 {
3167 /*
3168 * Register, memory.
3169 */
3170 IEM_MC_BEGIN(3, 2);
3171 IEM_MC_ARG(uint64_t *, pDst, 0);
3172 IEM_MC_LOCAL(uint64_t, uSrc);
3173 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3175
3176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3177 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3178 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3180 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3181
3182 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3183 IEM_MC_PREPARE_FPU_USAGE();
3184 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3185 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3186
3187 IEM_MC_ADVANCE_RIP();
3188 IEM_MC_END();
3189 }
3190 return VINF_SUCCESS;
3191}
3192
3193/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3194FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3195{
3196 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3197 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3198 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3199 {
3200 /*
3201 * Register, register.
3202 */
3203 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3205
3206 IEM_MC_BEGIN(3, 0);
3207 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3208 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3209 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3210 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3211 IEM_MC_PREPARE_SSE_USAGE();
3212 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3213 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3214 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3215 IEM_MC_ADVANCE_RIP();
3216 IEM_MC_END();
3217 }
3218 else
3219 {
3220 /*
3221 * Register, memory.
3222 */
3223 IEM_MC_BEGIN(3, 2);
3224 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3225 IEM_MC_LOCAL(RTUINT128U, uSrc);
3226 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3228
3229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3230 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3231 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3233 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3234
3235 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3236 IEM_MC_PREPARE_SSE_USAGE();
3237 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3238 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3239
3240 IEM_MC_ADVANCE_RIP();
3241 IEM_MC_END();
3242 }
3243 return VINF_SUCCESS;
3244}
3245
3246/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3247FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3248{
3249 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3250 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3251 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3252 {
3253 /*
3254 * Register, register.
3255 */
3256 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3258
3259 IEM_MC_BEGIN(3, 0);
3260 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3261 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3262 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3263 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3264 IEM_MC_PREPARE_SSE_USAGE();
3265 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3266 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3267 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3268 IEM_MC_ADVANCE_RIP();
3269 IEM_MC_END();
3270 }
3271 else
3272 {
3273 /*
3274 * Register, memory.
3275 */
3276 IEM_MC_BEGIN(3, 2);
3277 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3278 IEM_MC_LOCAL(RTUINT128U, uSrc);
3279 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3281
3282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3283 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3284 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3286 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3287
3288 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3289 IEM_MC_PREPARE_SSE_USAGE();
3290 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3291 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3292
3293 IEM_MC_ADVANCE_RIP();
3294 IEM_MC_END();
3295 }
3296 return VINF_SUCCESS;
3297}
3298
3299/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3300FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3301{
3302 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3304 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3305 {
3306 /*
3307 * Register, register.
3308 */
3309 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3311
3312 IEM_MC_BEGIN(3, 0);
3313 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3314 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3315 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3316 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3317 IEM_MC_PREPARE_SSE_USAGE();
3318 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3319 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3320 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3321 IEM_MC_ADVANCE_RIP();
3322 IEM_MC_END();
3323 }
3324 else
3325 {
3326 /*
3327 * Register, memory.
3328 */
3329 IEM_MC_BEGIN(3, 2);
3330 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3331 IEM_MC_LOCAL(RTUINT128U, uSrc);
3332 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3334
3335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3336 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3337 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3339 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3340
3341 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3342 IEM_MC_PREPARE_SSE_USAGE();
3343 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3344 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3345
3346 IEM_MC_ADVANCE_RIP();
3347 IEM_MC_END();
3348 }
3349 return VINF_SUCCESS;
3350}
3351
3352
3353/** Opcode 0x0f 0x71 11/2. */
3354FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3355
3356/** Opcode 0x66 0x0f 0x71 11/2. */
3357FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3358
3359/** Opcode 0x0f 0x71 11/4. */
3360FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3361
3362/** Opcode 0x66 0x0f 0x71 11/4. */
3363FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3364
3365/** Opcode 0x0f 0x71 11/6. */
3366FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3367
3368/** Opcode 0x66 0x0f 0x71 11/6. */
3369FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3370
3371
3372/**
3373 * Group 12 jump table for register variant.
3374 */
3375IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3376{
3377 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3378 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3379 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3380 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3381 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3382 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3383 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3384 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3385};
3386AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3387
3388
3389/** Opcode 0x0f 0x71. */
3390FNIEMOP_DEF(iemOp_Grp12)
3391{
3392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3393 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3394 /* register, register */
3395 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3396 + pVCpu->iem.s.idxPrefix], bRm);
3397 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3398}
3399
3400
3401/** Opcode 0x0f 0x72 11/2. */
3402FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3403
3404/** Opcode 0x66 0x0f 0x72 11/2. */
3405FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
3406
3407/** Opcode 0x0f 0x72 11/4. */
3408FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3409
3410/** Opcode 0x66 0x0f 0x72 11/4. */
3411FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
3412
3413/** Opcode 0x0f 0x72 11/6. */
3414FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3415
3416/** Opcode 0x66 0x0f 0x72 11/6. */
3417FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
3418
3419
3420/**
3421 * Group 13 jump table for register variant.
3422 */
3423IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3424{
3425 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3426 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3427 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3428 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3429 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3430 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3431 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3432 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3433};
3434AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3435
3436/** Opcode 0x0f 0x72. */
3437FNIEMOP_DEF(iemOp_Grp13)
3438{
3439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3440 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3441 /* register, register */
3442 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3443 + pVCpu->iem.s.idxPrefix], bRm);
3444 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3445}
3446
3447
3448/** Opcode 0x0f 0x73 11/2. */
3449FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3450
3451/** Opcode 0x66 0x0f 0x73 11/2. */
3452FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
3453
3454/** Opcode 0x66 0x0f 0x73 11/3. */
3455FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
3456
3457/** Opcode 0x0f 0x73 11/6. */
3458FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3459
3460/** Opcode 0x66 0x0f 0x73 11/6. */
3461FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
3462
3463/** Opcode 0x66 0x0f 0x73 11/7. */
3464FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
3465
3466/**
3467 * Group 14 jump table for register variant.
3468 */
3469IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
3470{
3471 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3472 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3473 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3474 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3475 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3476 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3477 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3478 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3479};
3480AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
3481
3482
3483/** Opcode 0x0f 0x73. */
3484FNIEMOP_DEF(iemOp_Grp14)
3485{
3486 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3487 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3488 /* register, register */
3489 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3490 + pVCpu->iem.s.idxPrefix], bRm);
3491 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3492}
3493
3494
3495/**
3496 * Common worker for MMX instructions on the form:
3497 * pxxx mm1, mm2/mem64
3498 */
3499FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3500{
3501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3502 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3503 {
3504 /*
3505 * Register, register.
3506 */
3507 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3508 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3510 IEM_MC_BEGIN(2, 0);
3511 IEM_MC_ARG(uint64_t *, pDst, 0);
3512 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3513 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3514 IEM_MC_PREPARE_FPU_USAGE();
3515 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3516 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3517 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3518 IEM_MC_ADVANCE_RIP();
3519 IEM_MC_END();
3520 }
3521 else
3522 {
3523 /*
3524 * Register, memory.
3525 */
3526 IEM_MC_BEGIN(2, 2);
3527 IEM_MC_ARG(uint64_t *, pDst, 0);
3528 IEM_MC_LOCAL(uint64_t, uSrc);
3529 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3531
3532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3534 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3535 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3536
3537 IEM_MC_PREPARE_FPU_USAGE();
3538 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3539 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3540
3541 IEM_MC_ADVANCE_RIP();
3542 IEM_MC_END();
3543 }
3544 return VINF_SUCCESS;
3545}
3546
3547
3548/**
3549 * Common worker for SSE2 instructions on the forms:
3550 * pxxx xmm1, xmm2/mem128
3551 *
3552 * Proper alignment of the 128-bit operand is enforced.
3553 * Exceptions type 4. SSE2 cpuid checks.
3554 */
3555FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3556{
3557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3558 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3559 {
3560 /*
3561 * Register, register.
3562 */
3563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3564 IEM_MC_BEGIN(2, 0);
3565 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3566 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3567 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3568 IEM_MC_PREPARE_SSE_USAGE();
3569 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3570 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3571 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3572 IEM_MC_ADVANCE_RIP();
3573 IEM_MC_END();
3574 }
3575 else
3576 {
3577 /*
3578 * Register, memory.
3579 */
3580 IEM_MC_BEGIN(2, 2);
3581 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3582 IEM_MC_LOCAL(RTUINT128U, uSrc);
3583 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3585
3586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3588 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3589 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3590
3591 IEM_MC_PREPARE_SSE_USAGE();
3592 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3593 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3594
3595 IEM_MC_ADVANCE_RIP();
3596 IEM_MC_END();
3597 }
3598 return VINF_SUCCESS;
3599}
3600
3601
3602/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3603FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3604{
3605 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3606 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3607}
3608
3609/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
3610FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
3611{
3612 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
3613 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3614}
3615
3616/* Opcode 0xf3 0x0f 0x74 - invalid */
3617/* Opcode 0xf2 0x0f 0x74 - invalid */
3618
3619
3620/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3621FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3622{
3623 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3624 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3625}
3626
3627/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
3628FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
3629{
3630 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
3631 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3632}
3633
3634/* Opcode 0xf3 0x0f 0x75 - invalid */
3635/* Opcode 0xf2 0x0f 0x75 - invalid */
3636
3637
3638/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3639FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3640{
3641 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3642 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3643}
3644
3645/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
3646FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
3647{
3648 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
3649 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3650}
3651
3652/* Opcode 0xf3 0x0f 0x76 - invalid */
3653/* Opcode 0xf2 0x0f 0x76 - invalid */
3654
3655
3656/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
3657FNIEMOP_STUB(iemOp_emms);
3658/* Opcode 0x66 0x0f 0x77 - invalid */
3659/* Opcode 0xf3 0x0f 0x77 - invalid */
3660/* Opcode 0xf2 0x0f 0x77 - invalid */
3661
3662/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3663FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3664/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3665FNIEMOP_STUB(iemOp_AmdGrp17);
3666/* Opcode 0xf3 0x0f 0x78 - invalid */
3667/* Opcode 0xf2 0x0f 0x78 - invalid */
3668
3669/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3670FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3671/* Opcode 0x66 0x0f 0x79 - invalid */
3672/* Opcode 0xf3 0x0f 0x79 - invalid */
3673/* Opcode 0xf2 0x0f 0x79 - invalid */
3674
3675/* Opcode 0x0f 0x7a - invalid */
3676/* Opcode 0x66 0x0f 0x7a - invalid */
3677/* Opcode 0xf3 0x0f 0x7a - invalid */
3678/* Opcode 0xf2 0x0f 0x7a - invalid */
3679
3680/* Opcode 0x0f 0x7b - invalid */
3681/* Opcode 0x66 0x0f 0x7b - invalid */
3682/* Opcode 0xf3 0x0f 0x7b - invalid */
3683/* Opcode 0xf2 0x0f 0x7b - invalid */
3684
3685/* Opcode 0x0f 0x7c - invalid */
3686/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
3687FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
3688/* Opcode 0xf3 0x0f 0x7c - invalid */
3689/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
3690FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
3691
3692/* Opcode 0x0f 0x7d - invalid */
3693/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
3694FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
3695/* Opcode 0xf3 0x0f 0x7d - invalid */
3696/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
3697FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
3698
3699
3700/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3701FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3702{
3703 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3704 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3705 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3706 else
3707 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3708 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3709 {
3710 /* greg, MMX */
3711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3712 IEM_MC_BEGIN(0, 1);
3713 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3714 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3715 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3716 {
3717 IEM_MC_LOCAL(uint64_t, u64Tmp);
3718 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3719 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3720 }
3721 else
3722 {
3723 IEM_MC_LOCAL(uint32_t, u32Tmp);
3724 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3725 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3726 }
3727 IEM_MC_ADVANCE_RIP();
3728 IEM_MC_END();
3729 }
3730 else
3731 {
3732 /* [mem], MMX */
3733 IEM_MC_BEGIN(0, 2);
3734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3735 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3738 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3739 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3740 {
3741 IEM_MC_LOCAL(uint64_t, u64Tmp);
3742 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3743 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3744 }
3745 else
3746 {
3747 IEM_MC_LOCAL(uint32_t, u32Tmp);
3748 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3749 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3750 }
3751 IEM_MC_ADVANCE_RIP();
3752 IEM_MC_END();
3753 }
3754 return VINF_SUCCESS;
3755}
3756
3757/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
3758FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
3759{
3760 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3761 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3762 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3763 else
3764 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3765 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3766 {
3767 /* greg, XMM */
3768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3769 IEM_MC_BEGIN(0, 1);
3770 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3771 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3772 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3773 {
3774 IEM_MC_LOCAL(uint64_t, u64Tmp);
3775 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3776 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3777 }
3778 else
3779 {
3780 IEM_MC_LOCAL(uint32_t, u32Tmp);
3781 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3782 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3783 }
3784 IEM_MC_ADVANCE_RIP();
3785 IEM_MC_END();
3786 }
3787 else
3788 {
3789 /* [mem], XMM */
3790 IEM_MC_BEGIN(0, 2);
3791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3792 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3795 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3796 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3797 {
3798 IEM_MC_LOCAL(uint64_t, u64Tmp);
3799 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3800 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3801 }
3802 else
3803 {
3804 IEM_MC_LOCAL(uint32_t, u32Tmp);
3805 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3806 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3807 }
3808 IEM_MC_ADVANCE_RIP();
3809 IEM_MC_END();
3810 }
3811 return VINF_SUCCESS;
3812}
3813
3814/** Opcode 0xf3 0x0f 0x7e - movq Vq, Wq */
3815FNIEMOP_STUB(iemOp_movq_Vq_Wq);
3816/* Opcode 0xf2 0x0f 0x7e - invalid */
3817
3818
3819/** Opcode 0x0f 0x7f - movq Qq, Pq */
3820FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3821{
3822 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3824 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3825 {
3826 /*
3827 * Register, register.
3828 */
3829 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3830 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3832 IEM_MC_BEGIN(0, 1);
3833 IEM_MC_LOCAL(uint64_t, u64Tmp);
3834 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3835 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3836 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3837 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3838 IEM_MC_ADVANCE_RIP();
3839 IEM_MC_END();
3840 }
3841 else
3842 {
3843 /*
3844 * Register, memory.
3845 */
3846 IEM_MC_BEGIN(0, 2);
3847 IEM_MC_LOCAL(uint64_t, u64Tmp);
3848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3849
3850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3852 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3853 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3854
3855 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3856 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3857
3858 IEM_MC_ADVANCE_RIP();
3859 IEM_MC_END();
3860 }
3861 return VINF_SUCCESS;
3862}
3863
3864/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
3865FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
3866{
3867 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
3868 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3869 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3870 {
3871 /*
3872 * Register, register.
3873 */
3874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3875 IEM_MC_BEGIN(0, 0);
3876 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3877 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3878 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3879 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3880 IEM_MC_ADVANCE_RIP();
3881 IEM_MC_END();
3882 }
3883 else
3884 {
3885 /*
3886 * Register, memory.
3887 */
3888 IEM_MC_BEGIN(0, 2);
3889 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3891
3892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3894 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3895 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3896
3897 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3898 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3899
3900 IEM_MC_ADVANCE_RIP();
3901 IEM_MC_END();
3902 }
3903 return VINF_SUCCESS;
3904}
3905
3906/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
3907FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
3908{
3909 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3910 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
3911 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3912 {
3913 /*
3914 * Register, register.
3915 */
3916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3917 IEM_MC_BEGIN(0, 0);
3918 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3919 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3920 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3921 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3922 IEM_MC_ADVANCE_RIP();
3923 IEM_MC_END();
3924 }
3925 else
3926 {
3927 /*
3928 * Register, memory.
3929 */
3930 IEM_MC_BEGIN(0, 2);
3931 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3933
3934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3936 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3937 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3938
3939 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3940 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3941
3942 IEM_MC_ADVANCE_RIP();
3943 IEM_MC_END();
3944 }
3945 return VINF_SUCCESS;
3946}
3947
3948/* Opcode 0xf2 0x0f 0x7f - invalid */
3949
3950
3951
3952/** Opcode 0x0f 0x80. */
3953FNIEMOP_DEF(iemOp_jo_Jv)
3954{
3955 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3956 IEMOP_HLP_MIN_386();
3957 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3958 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3959 {
3960 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3962
3963 IEM_MC_BEGIN(0, 0);
3964 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3965 IEM_MC_REL_JMP_S16(i16Imm);
3966 } IEM_MC_ELSE() {
3967 IEM_MC_ADVANCE_RIP();
3968 } IEM_MC_ENDIF();
3969 IEM_MC_END();
3970 }
3971 else
3972 {
3973 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3975
3976 IEM_MC_BEGIN(0, 0);
3977 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3978 IEM_MC_REL_JMP_S32(i32Imm);
3979 } IEM_MC_ELSE() {
3980 IEM_MC_ADVANCE_RIP();
3981 } IEM_MC_ENDIF();
3982 IEM_MC_END();
3983 }
3984 return VINF_SUCCESS;
3985}
3986
3987
3988/** Opcode 0x0f 0x81. */
3989FNIEMOP_DEF(iemOp_jno_Jv)
3990{
3991 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3992 IEMOP_HLP_MIN_386();
3993 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3994 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3995 {
3996 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3998
3999 IEM_MC_BEGIN(0, 0);
4000 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4001 IEM_MC_ADVANCE_RIP();
4002 } IEM_MC_ELSE() {
4003 IEM_MC_REL_JMP_S16(i16Imm);
4004 } IEM_MC_ENDIF();
4005 IEM_MC_END();
4006 }
4007 else
4008 {
4009 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4011
4012 IEM_MC_BEGIN(0, 0);
4013 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4014 IEM_MC_ADVANCE_RIP();
4015 } IEM_MC_ELSE() {
4016 IEM_MC_REL_JMP_S32(i32Imm);
4017 } IEM_MC_ENDIF();
4018 IEM_MC_END();
4019 }
4020 return VINF_SUCCESS;
4021}
4022
4023
4024/** Opcode 0x0f 0x82. */
4025FNIEMOP_DEF(iemOp_jc_Jv)
4026{
4027 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4028 IEMOP_HLP_MIN_386();
4029 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4030 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4031 {
4032 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4034
4035 IEM_MC_BEGIN(0, 0);
4036 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4037 IEM_MC_REL_JMP_S16(i16Imm);
4038 } IEM_MC_ELSE() {
4039 IEM_MC_ADVANCE_RIP();
4040 } IEM_MC_ENDIF();
4041 IEM_MC_END();
4042 }
4043 else
4044 {
4045 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4047
4048 IEM_MC_BEGIN(0, 0);
4049 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4050 IEM_MC_REL_JMP_S32(i32Imm);
4051 } IEM_MC_ELSE() {
4052 IEM_MC_ADVANCE_RIP();
4053 } IEM_MC_ENDIF();
4054 IEM_MC_END();
4055 }
4056 return VINF_SUCCESS;
4057}
4058
4059
4060/** Opcode 0x0f 0x83. */
4061FNIEMOP_DEF(iemOp_jnc_Jv)
4062{
4063 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4064 IEMOP_HLP_MIN_386();
4065 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4066 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4067 {
4068 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4070
4071 IEM_MC_BEGIN(0, 0);
4072 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4073 IEM_MC_ADVANCE_RIP();
4074 } IEM_MC_ELSE() {
4075 IEM_MC_REL_JMP_S16(i16Imm);
4076 } IEM_MC_ENDIF();
4077 IEM_MC_END();
4078 }
4079 else
4080 {
4081 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4083
4084 IEM_MC_BEGIN(0, 0);
4085 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4086 IEM_MC_ADVANCE_RIP();
4087 } IEM_MC_ELSE() {
4088 IEM_MC_REL_JMP_S32(i32Imm);
4089 } IEM_MC_ENDIF();
4090 IEM_MC_END();
4091 }
4092 return VINF_SUCCESS;
4093}
4094
4095
4096/** Opcode 0x0f 0x84. */
4097FNIEMOP_DEF(iemOp_je_Jv)
4098{
4099 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4100 IEMOP_HLP_MIN_386();
4101 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4102 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4103 {
4104 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4106
4107 IEM_MC_BEGIN(0, 0);
4108 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4109 IEM_MC_REL_JMP_S16(i16Imm);
4110 } IEM_MC_ELSE() {
4111 IEM_MC_ADVANCE_RIP();
4112 } IEM_MC_ENDIF();
4113 IEM_MC_END();
4114 }
4115 else
4116 {
4117 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4119
4120 IEM_MC_BEGIN(0, 0);
4121 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4122 IEM_MC_REL_JMP_S32(i32Imm);
4123 } IEM_MC_ELSE() {
4124 IEM_MC_ADVANCE_RIP();
4125 } IEM_MC_ENDIF();
4126 IEM_MC_END();
4127 }
4128 return VINF_SUCCESS;
4129}
4130
4131
4132/** Opcode 0x0f 0x85. */
4133FNIEMOP_DEF(iemOp_jne_Jv)
4134{
4135 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4136 IEMOP_HLP_MIN_386();
4137 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4138 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4139 {
4140 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4142
4143 IEM_MC_BEGIN(0, 0);
4144 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4145 IEM_MC_ADVANCE_RIP();
4146 } IEM_MC_ELSE() {
4147 IEM_MC_REL_JMP_S16(i16Imm);
4148 } IEM_MC_ENDIF();
4149 IEM_MC_END();
4150 }
4151 else
4152 {
4153 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4155
4156 IEM_MC_BEGIN(0, 0);
4157 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4158 IEM_MC_ADVANCE_RIP();
4159 } IEM_MC_ELSE() {
4160 IEM_MC_REL_JMP_S32(i32Imm);
4161 } IEM_MC_ENDIF();
4162 IEM_MC_END();
4163 }
4164 return VINF_SUCCESS;
4165}
4166
4167
4168/** Opcode 0x0f 0x86. */
4169FNIEMOP_DEF(iemOp_jbe_Jv)
4170{
4171 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4172 IEMOP_HLP_MIN_386();
4173 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4174 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4175 {
4176 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4178
4179 IEM_MC_BEGIN(0, 0);
4180 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4181 IEM_MC_REL_JMP_S16(i16Imm);
4182 } IEM_MC_ELSE() {
4183 IEM_MC_ADVANCE_RIP();
4184 } IEM_MC_ENDIF();
4185 IEM_MC_END();
4186 }
4187 else
4188 {
4189 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4191
4192 IEM_MC_BEGIN(0, 0);
4193 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4194 IEM_MC_REL_JMP_S32(i32Imm);
4195 } IEM_MC_ELSE() {
4196 IEM_MC_ADVANCE_RIP();
4197 } IEM_MC_ENDIF();
4198 IEM_MC_END();
4199 }
4200 return VINF_SUCCESS;
4201}
4202
4203
4204/** Opcode 0x0f 0x87. */
4205FNIEMOP_DEF(iemOp_jnbe_Jv)
4206{
4207 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4208 IEMOP_HLP_MIN_386();
4209 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4210 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4211 {
4212 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4214
4215 IEM_MC_BEGIN(0, 0);
4216 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4217 IEM_MC_ADVANCE_RIP();
4218 } IEM_MC_ELSE() {
4219 IEM_MC_REL_JMP_S16(i16Imm);
4220 } IEM_MC_ENDIF();
4221 IEM_MC_END();
4222 }
4223 else
4224 {
4225 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4227
4228 IEM_MC_BEGIN(0, 0);
4229 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4230 IEM_MC_ADVANCE_RIP();
4231 } IEM_MC_ELSE() {
4232 IEM_MC_REL_JMP_S32(i32Imm);
4233 } IEM_MC_ENDIF();
4234 IEM_MC_END();
4235 }
4236 return VINF_SUCCESS;
4237}
4238
4239
4240/** Opcode 0x0f 0x88. */
4241FNIEMOP_DEF(iemOp_js_Jv)
4242{
4243 IEMOP_MNEMONIC(js_Jv, "js Jv");
4244 IEMOP_HLP_MIN_386();
4245 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4246 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4247 {
4248 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4250
4251 IEM_MC_BEGIN(0, 0);
4252 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4253 IEM_MC_REL_JMP_S16(i16Imm);
4254 } IEM_MC_ELSE() {
4255 IEM_MC_ADVANCE_RIP();
4256 } IEM_MC_ENDIF();
4257 IEM_MC_END();
4258 }
4259 else
4260 {
4261 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4263
4264 IEM_MC_BEGIN(0, 0);
4265 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4266 IEM_MC_REL_JMP_S32(i32Imm);
4267 } IEM_MC_ELSE() {
4268 IEM_MC_ADVANCE_RIP();
4269 } IEM_MC_ENDIF();
4270 IEM_MC_END();
4271 }
4272 return VINF_SUCCESS;
4273}
4274
4275
4276/** Opcode 0x0f 0x89. */
4277FNIEMOP_DEF(iemOp_jns_Jv)
4278{
4279 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4280 IEMOP_HLP_MIN_386();
4281 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4282 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4283 {
4284 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4286
4287 IEM_MC_BEGIN(0, 0);
4288 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4289 IEM_MC_ADVANCE_RIP();
4290 } IEM_MC_ELSE() {
4291 IEM_MC_REL_JMP_S16(i16Imm);
4292 } IEM_MC_ENDIF();
4293 IEM_MC_END();
4294 }
4295 else
4296 {
4297 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4299
4300 IEM_MC_BEGIN(0, 0);
4301 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4302 IEM_MC_ADVANCE_RIP();
4303 } IEM_MC_ELSE() {
4304 IEM_MC_REL_JMP_S32(i32Imm);
4305 } IEM_MC_ENDIF();
4306 IEM_MC_END();
4307 }
4308 return VINF_SUCCESS;
4309}
4310
4311
4312/** Opcode 0x0f 0x8a. */
4313FNIEMOP_DEF(iemOp_jp_Jv)
4314{
4315 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4316 IEMOP_HLP_MIN_386();
4317 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4318 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4319 {
4320 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4322
4323 IEM_MC_BEGIN(0, 0);
4324 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4325 IEM_MC_REL_JMP_S16(i16Imm);
4326 } IEM_MC_ELSE() {
4327 IEM_MC_ADVANCE_RIP();
4328 } IEM_MC_ENDIF();
4329 IEM_MC_END();
4330 }
4331 else
4332 {
4333 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4335
4336 IEM_MC_BEGIN(0, 0);
4337 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4338 IEM_MC_REL_JMP_S32(i32Imm);
4339 } IEM_MC_ELSE() {
4340 IEM_MC_ADVANCE_RIP();
4341 } IEM_MC_ENDIF();
4342 IEM_MC_END();
4343 }
4344 return VINF_SUCCESS;
4345}
4346
4347
4348/** Opcode 0x0f 0x8b. */
4349FNIEMOP_DEF(iemOp_jnp_Jv)
4350{
4351 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4352 IEMOP_HLP_MIN_386();
4353 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4354 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4355 {
4356 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4358
4359 IEM_MC_BEGIN(0, 0);
4360 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4361 IEM_MC_ADVANCE_RIP();
4362 } IEM_MC_ELSE() {
4363 IEM_MC_REL_JMP_S16(i16Imm);
4364 } IEM_MC_ENDIF();
4365 IEM_MC_END();
4366 }
4367 else
4368 {
4369 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4371
4372 IEM_MC_BEGIN(0, 0);
4373 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4374 IEM_MC_ADVANCE_RIP();
4375 } IEM_MC_ELSE() {
4376 IEM_MC_REL_JMP_S32(i32Imm);
4377 } IEM_MC_ENDIF();
4378 IEM_MC_END();
4379 }
4380 return VINF_SUCCESS;
4381}
4382
4383
4384/** Opcode 0x0f 0x8c. */
4385FNIEMOP_DEF(iemOp_jl_Jv)
4386{
4387 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4388 IEMOP_HLP_MIN_386();
4389 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4390 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4391 {
4392 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4394
4395 IEM_MC_BEGIN(0, 0);
4396 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4397 IEM_MC_REL_JMP_S16(i16Imm);
4398 } IEM_MC_ELSE() {
4399 IEM_MC_ADVANCE_RIP();
4400 } IEM_MC_ENDIF();
4401 IEM_MC_END();
4402 }
4403 else
4404 {
4405 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4407
4408 IEM_MC_BEGIN(0, 0);
4409 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4410 IEM_MC_REL_JMP_S32(i32Imm);
4411 } IEM_MC_ELSE() {
4412 IEM_MC_ADVANCE_RIP();
4413 } IEM_MC_ENDIF();
4414 IEM_MC_END();
4415 }
4416 return VINF_SUCCESS;
4417}
4418
4419
4420/** Opcode 0x0f 0x8d. */
4421FNIEMOP_DEF(iemOp_jnl_Jv)
4422{
4423 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4424 IEMOP_HLP_MIN_386();
4425 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4426 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4427 {
4428 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4430
4431 IEM_MC_BEGIN(0, 0);
4432 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4433 IEM_MC_ADVANCE_RIP();
4434 } IEM_MC_ELSE() {
4435 IEM_MC_REL_JMP_S16(i16Imm);
4436 } IEM_MC_ENDIF();
4437 IEM_MC_END();
4438 }
4439 else
4440 {
4441 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4443
4444 IEM_MC_BEGIN(0, 0);
4445 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4446 IEM_MC_ADVANCE_RIP();
4447 } IEM_MC_ELSE() {
4448 IEM_MC_REL_JMP_S32(i32Imm);
4449 } IEM_MC_ENDIF();
4450 IEM_MC_END();
4451 }
4452 return VINF_SUCCESS;
4453}
4454
4455
4456/** Opcode 0x0f 0x8e. */
4457FNIEMOP_DEF(iemOp_jle_Jv)
4458{
4459 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4460 IEMOP_HLP_MIN_386();
4461 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4462 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4463 {
4464 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4466
4467 IEM_MC_BEGIN(0, 0);
4468 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4469 IEM_MC_REL_JMP_S16(i16Imm);
4470 } IEM_MC_ELSE() {
4471 IEM_MC_ADVANCE_RIP();
4472 } IEM_MC_ENDIF();
4473 IEM_MC_END();
4474 }
4475 else
4476 {
4477 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4479
4480 IEM_MC_BEGIN(0, 0);
4481 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4482 IEM_MC_REL_JMP_S32(i32Imm);
4483 } IEM_MC_ELSE() {
4484 IEM_MC_ADVANCE_RIP();
4485 } IEM_MC_ENDIF();
4486 IEM_MC_END();
4487 }
4488 return VINF_SUCCESS;
4489}
4490
4491
4492/** Opcode 0x0f 0x8f. */
4493FNIEMOP_DEF(iemOp_jnle_Jv)
4494{
4495 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4496 IEMOP_HLP_MIN_386();
4497 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4498 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4499 {
4500 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4502
4503 IEM_MC_BEGIN(0, 0);
4504 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4505 IEM_MC_ADVANCE_RIP();
4506 } IEM_MC_ELSE() {
4507 IEM_MC_REL_JMP_S16(i16Imm);
4508 } IEM_MC_ENDIF();
4509 IEM_MC_END();
4510 }
4511 else
4512 {
4513 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4515
4516 IEM_MC_BEGIN(0, 0);
4517 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4518 IEM_MC_ADVANCE_RIP();
4519 } IEM_MC_ELSE() {
4520 IEM_MC_REL_JMP_S32(i32Imm);
4521 } IEM_MC_ENDIF();
4522 IEM_MC_END();
4523 }
4524 return VINF_SUCCESS;
4525}
4526
4527
4528/** Opcode 0x0f 0x90. */
4529FNIEMOP_DEF(iemOp_seto_Eb)
4530{
4531 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4532 IEMOP_HLP_MIN_386();
4533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4534
4535 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4536 * any way. AMD says it's "unused", whatever that means. We're
4537 * ignoring for now. */
4538 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4539 {
4540 /* register target */
4541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4542 IEM_MC_BEGIN(0, 0);
4543 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4544 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4545 } IEM_MC_ELSE() {
4546 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4547 } IEM_MC_ENDIF();
4548 IEM_MC_ADVANCE_RIP();
4549 IEM_MC_END();
4550 }
4551 else
4552 {
4553 /* memory target */
4554 IEM_MC_BEGIN(0, 1);
4555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4558 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4559 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4560 } IEM_MC_ELSE() {
4561 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4562 } IEM_MC_ENDIF();
4563 IEM_MC_ADVANCE_RIP();
4564 IEM_MC_END();
4565 }
4566 return VINF_SUCCESS;
4567}
4568
4569
4570/** Opcode 0x0f 0x91. */
4571FNIEMOP_DEF(iemOp_setno_Eb)
4572{
4573 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4574 IEMOP_HLP_MIN_386();
4575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4576
4577 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4578 * any way. AMD says it's "unused", whatever that means. We're
4579 * ignoring for now. */
4580 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4581 {
4582 /* register target */
4583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4584 IEM_MC_BEGIN(0, 0);
4585 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4586 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4587 } IEM_MC_ELSE() {
4588 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4589 } IEM_MC_ENDIF();
4590 IEM_MC_ADVANCE_RIP();
4591 IEM_MC_END();
4592 }
4593 else
4594 {
4595 /* memory target */
4596 IEM_MC_BEGIN(0, 1);
4597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4600 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4601 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4602 } IEM_MC_ELSE() {
4603 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4604 } IEM_MC_ENDIF();
4605 IEM_MC_ADVANCE_RIP();
4606 IEM_MC_END();
4607 }
4608 return VINF_SUCCESS;
4609}
4610
4611
4612/** Opcode 0x0f 0x92. */
4613FNIEMOP_DEF(iemOp_setc_Eb)
4614{
4615 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4616 IEMOP_HLP_MIN_386();
4617 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4618
4619 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4620 * any way. AMD says it's "unused", whatever that means. We're
4621 * ignoring for now. */
4622 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4623 {
4624 /* register target */
4625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4626 IEM_MC_BEGIN(0, 0);
4627 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4628 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4629 } IEM_MC_ELSE() {
4630 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4631 } IEM_MC_ENDIF();
4632 IEM_MC_ADVANCE_RIP();
4633 IEM_MC_END();
4634 }
4635 else
4636 {
4637 /* memory target */
4638 IEM_MC_BEGIN(0, 1);
4639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4642 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4643 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4644 } IEM_MC_ELSE() {
4645 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4646 } IEM_MC_ENDIF();
4647 IEM_MC_ADVANCE_RIP();
4648 IEM_MC_END();
4649 }
4650 return VINF_SUCCESS;
4651}
4652
4653
4654/** Opcode 0x0f 0x93. */
4655FNIEMOP_DEF(iemOp_setnc_Eb)
4656{
4657 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4658 IEMOP_HLP_MIN_386();
4659 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4660
4661 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4662 * any way. AMD says it's "unused", whatever that means. We're
4663 * ignoring for now. */
4664 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4665 {
4666 /* register target */
4667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4668 IEM_MC_BEGIN(0, 0);
4669 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4670 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4671 } IEM_MC_ELSE() {
4672 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4673 } IEM_MC_ENDIF();
4674 IEM_MC_ADVANCE_RIP();
4675 IEM_MC_END();
4676 }
4677 else
4678 {
4679 /* memory target */
4680 IEM_MC_BEGIN(0, 1);
4681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4682 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4684 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4685 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4686 } IEM_MC_ELSE() {
4687 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4688 } IEM_MC_ENDIF();
4689 IEM_MC_ADVANCE_RIP();
4690 IEM_MC_END();
4691 }
4692 return VINF_SUCCESS;
4693}
4694
4695
4696/** Opcode 0x0f 0x94. */
4697FNIEMOP_DEF(iemOp_sete_Eb)
4698{
4699 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4700 IEMOP_HLP_MIN_386();
4701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4702
4703 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4704 * any way. AMD says it's "unused", whatever that means. We're
4705 * ignoring for now. */
4706 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4707 {
4708 /* register target */
4709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4710 IEM_MC_BEGIN(0, 0);
4711 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4712 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4713 } IEM_MC_ELSE() {
4714 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4715 } IEM_MC_ENDIF();
4716 IEM_MC_ADVANCE_RIP();
4717 IEM_MC_END();
4718 }
4719 else
4720 {
4721 /* memory target */
4722 IEM_MC_BEGIN(0, 1);
4723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4726 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4727 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4728 } IEM_MC_ELSE() {
4729 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4730 } IEM_MC_ENDIF();
4731 IEM_MC_ADVANCE_RIP();
4732 IEM_MC_END();
4733 }
4734 return VINF_SUCCESS;
4735}
4736
4737
4738/** Opcode 0x0f 0x95. */
4739FNIEMOP_DEF(iemOp_setne_Eb)
4740{
4741 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4742 IEMOP_HLP_MIN_386();
4743 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4744
4745 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4746 * any way. AMD says it's "unused", whatever that means. We're
4747 * ignoring for now. */
4748 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4749 {
4750 /* register target */
4751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4752 IEM_MC_BEGIN(0, 0);
4753 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4754 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4755 } IEM_MC_ELSE() {
4756 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4757 } IEM_MC_ENDIF();
4758 IEM_MC_ADVANCE_RIP();
4759 IEM_MC_END();
4760 }
4761 else
4762 {
4763 /* memory target */
4764 IEM_MC_BEGIN(0, 1);
4765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4768 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4769 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4770 } IEM_MC_ELSE() {
4771 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4772 } IEM_MC_ENDIF();
4773 IEM_MC_ADVANCE_RIP();
4774 IEM_MC_END();
4775 }
4776 return VINF_SUCCESS;
4777}
4778
4779
4780/** Opcode 0x0f 0x96. */
4781FNIEMOP_DEF(iemOp_setbe_Eb)
4782{
4783 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4784 IEMOP_HLP_MIN_386();
4785 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4786
4787 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4788 * any way. AMD says it's "unused", whatever that means. We're
4789 * ignoring for now. */
4790 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4791 {
4792 /* register target */
4793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4794 IEM_MC_BEGIN(0, 0);
4795 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4796 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4797 } IEM_MC_ELSE() {
4798 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4799 } IEM_MC_ENDIF();
4800 IEM_MC_ADVANCE_RIP();
4801 IEM_MC_END();
4802 }
4803 else
4804 {
4805 /* memory target */
4806 IEM_MC_BEGIN(0, 1);
4807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4810 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4811 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4812 } IEM_MC_ELSE() {
4813 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4814 } IEM_MC_ENDIF();
4815 IEM_MC_ADVANCE_RIP();
4816 IEM_MC_END();
4817 }
4818 return VINF_SUCCESS;
4819}
4820
4821
4822/** Opcode 0x0f 0x97. */
4823FNIEMOP_DEF(iemOp_setnbe_Eb)
4824{
4825 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4826 IEMOP_HLP_MIN_386();
4827 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4828
4829 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4830 * any way. AMD says it's "unused", whatever that means. We're
4831 * ignoring for now. */
4832 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4833 {
4834 /* register target */
4835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4836 IEM_MC_BEGIN(0, 0);
4837 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4838 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4839 } IEM_MC_ELSE() {
4840 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4841 } IEM_MC_ENDIF();
4842 IEM_MC_ADVANCE_RIP();
4843 IEM_MC_END();
4844 }
4845 else
4846 {
4847 /* memory target */
4848 IEM_MC_BEGIN(0, 1);
4849 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4852 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4853 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4854 } IEM_MC_ELSE() {
4855 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4856 } IEM_MC_ENDIF();
4857 IEM_MC_ADVANCE_RIP();
4858 IEM_MC_END();
4859 }
4860 return VINF_SUCCESS;
4861}
4862
4863
4864/** Opcode 0x0f 0x98. */
4865FNIEMOP_DEF(iemOp_sets_Eb)
4866{
4867 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4868 IEMOP_HLP_MIN_386();
4869 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4870
4871 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4872 * any way. AMD says it's "unused", whatever that means. We're
4873 * ignoring for now. */
4874 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4875 {
4876 /* register target */
4877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4878 IEM_MC_BEGIN(0, 0);
4879 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4880 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4881 } IEM_MC_ELSE() {
4882 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4883 } IEM_MC_ENDIF();
4884 IEM_MC_ADVANCE_RIP();
4885 IEM_MC_END();
4886 }
4887 else
4888 {
4889 /* memory target */
4890 IEM_MC_BEGIN(0, 1);
4891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4894 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4895 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4896 } IEM_MC_ELSE() {
4897 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4898 } IEM_MC_ENDIF();
4899 IEM_MC_ADVANCE_RIP();
4900 IEM_MC_END();
4901 }
4902 return VINF_SUCCESS;
4903}
4904
4905
4906/** Opcode 0x0f 0x99. */
4907FNIEMOP_DEF(iemOp_setns_Eb)
4908{
4909 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4910 IEMOP_HLP_MIN_386();
4911 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4912
4913 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4914 * any way. AMD says it's "unused", whatever that means. We're
4915 * ignoring for now. */
4916 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4917 {
4918 /* register target */
4919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4920 IEM_MC_BEGIN(0, 0);
4921 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4922 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4923 } IEM_MC_ELSE() {
4924 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4925 } IEM_MC_ENDIF();
4926 IEM_MC_ADVANCE_RIP();
4927 IEM_MC_END();
4928 }
4929 else
4930 {
4931 /* memory target */
4932 IEM_MC_BEGIN(0, 1);
4933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4936 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4937 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4938 } IEM_MC_ELSE() {
4939 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4940 } IEM_MC_ENDIF();
4941 IEM_MC_ADVANCE_RIP();
4942 IEM_MC_END();
4943 }
4944 return VINF_SUCCESS;
4945}
4946
4947
4948/** Opcode 0x0f 0x9a. */
4949FNIEMOP_DEF(iemOp_setp_Eb)
4950{
4951 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4952 IEMOP_HLP_MIN_386();
4953 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4954
4955 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4956 * any way. AMD says it's "unused", whatever that means. We're
4957 * ignoring for now. */
4958 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4959 {
4960 /* register target */
4961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4962 IEM_MC_BEGIN(0, 0);
4963 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4964 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4965 } IEM_MC_ELSE() {
4966 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4967 } IEM_MC_ENDIF();
4968 IEM_MC_ADVANCE_RIP();
4969 IEM_MC_END();
4970 }
4971 else
4972 {
4973 /* memory target */
4974 IEM_MC_BEGIN(0, 1);
4975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4978 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4979 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4980 } IEM_MC_ELSE() {
4981 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4982 } IEM_MC_ENDIF();
4983 IEM_MC_ADVANCE_RIP();
4984 IEM_MC_END();
4985 }
4986 return VINF_SUCCESS;
4987}
4988
4989
4990/** Opcode 0x0f 0x9b. */
4991FNIEMOP_DEF(iemOp_setnp_Eb)
4992{
4993 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4994 IEMOP_HLP_MIN_386();
4995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4996
4997 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4998 * any way. AMD says it's "unused", whatever that means. We're
4999 * ignoring for now. */
5000 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5001 {
5002 /* register target */
5003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5004 IEM_MC_BEGIN(0, 0);
5005 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5006 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5007 } IEM_MC_ELSE() {
5008 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5009 } IEM_MC_ENDIF();
5010 IEM_MC_ADVANCE_RIP();
5011 IEM_MC_END();
5012 }
5013 else
5014 {
5015 /* memory target */
5016 IEM_MC_BEGIN(0, 1);
5017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5020 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5021 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5022 } IEM_MC_ELSE() {
5023 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5024 } IEM_MC_ENDIF();
5025 IEM_MC_ADVANCE_RIP();
5026 IEM_MC_END();
5027 }
5028 return VINF_SUCCESS;
5029}
5030
5031
5032/** Opcode 0x0f 0x9c. */
5033FNIEMOP_DEF(iemOp_setl_Eb)
5034{
5035 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5036 IEMOP_HLP_MIN_386();
5037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5038
5039 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5040 * any way. AMD says it's "unused", whatever that means. We're
5041 * ignoring for now. */
5042 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5043 {
5044 /* register target */
5045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5046 IEM_MC_BEGIN(0, 0);
5047 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5048 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5049 } IEM_MC_ELSE() {
5050 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5051 } IEM_MC_ENDIF();
5052 IEM_MC_ADVANCE_RIP();
5053 IEM_MC_END();
5054 }
5055 else
5056 {
5057 /* memory target */
5058 IEM_MC_BEGIN(0, 1);
5059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5062 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5063 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5064 } IEM_MC_ELSE() {
5065 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5066 } IEM_MC_ENDIF();
5067 IEM_MC_ADVANCE_RIP();
5068 IEM_MC_END();
5069 }
5070 return VINF_SUCCESS;
5071}
5072
5073
5074/** Opcode 0x0f 0x9d. */
5075FNIEMOP_DEF(iemOp_setnl_Eb)
5076{
5077 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5078 IEMOP_HLP_MIN_386();
5079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5080
5081 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5082 * any way. AMD says it's "unused", whatever that means. We're
5083 * ignoring for now. */
5084 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5085 {
5086 /* register target */
5087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5088 IEM_MC_BEGIN(0, 0);
5089 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5090 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5091 } IEM_MC_ELSE() {
5092 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5093 } IEM_MC_ENDIF();
5094 IEM_MC_ADVANCE_RIP();
5095 IEM_MC_END();
5096 }
5097 else
5098 {
5099 /* memory target */
5100 IEM_MC_BEGIN(0, 1);
5101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5104 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5105 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5106 } IEM_MC_ELSE() {
5107 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5108 } IEM_MC_ENDIF();
5109 IEM_MC_ADVANCE_RIP();
5110 IEM_MC_END();
5111 }
5112 return VINF_SUCCESS;
5113}
5114
5115
5116/** Opcode 0x0f 0x9e. */
5117FNIEMOP_DEF(iemOp_setle_Eb)
5118{
5119 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5120 IEMOP_HLP_MIN_386();
5121 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5122
5123 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5124 * any way. AMD says it's "unused", whatever that means. We're
5125 * ignoring for now. */
5126 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5127 {
5128 /* register target */
5129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5130 IEM_MC_BEGIN(0, 0);
5131 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5132 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5133 } IEM_MC_ELSE() {
5134 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5135 } IEM_MC_ENDIF();
5136 IEM_MC_ADVANCE_RIP();
5137 IEM_MC_END();
5138 }
5139 else
5140 {
5141 /* memory target */
5142 IEM_MC_BEGIN(0, 1);
5143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5146 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5147 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5148 } IEM_MC_ELSE() {
5149 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5150 } IEM_MC_ENDIF();
5151 IEM_MC_ADVANCE_RIP();
5152 IEM_MC_END();
5153 }
5154 return VINF_SUCCESS;
5155}
5156
5157
5158/** Opcode 0x0f 0x9f. */
5159FNIEMOP_DEF(iemOp_setnle_Eb)
5160{
5161 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5162 IEMOP_HLP_MIN_386();
5163 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5164
5165 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5166 * any way. AMD says it's "unused", whatever that means. We're
5167 * ignoring for now. */
5168 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5169 {
5170 /* register target */
5171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5172 IEM_MC_BEGIN(0, 0);
5173 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5174 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5175 } IEM_MC_ELSE() {
5176 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5177 } IEM_MC_ENDIF();
5178 IEM_MC_ADVANCE_RIP();
5179 IEM_MC_END();
5180 }
5181 else
5182 {
5183 /* memory target */
5184 IEM_MC_BEGIN(0, 1);
5185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5188 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5189 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5190 } IEM_MC_ELSE() {
5191 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5192 } IEM_MC_ENDIF();
5193 IEM_MC_ADVANCE_RIP();
5194 IEM_MC_END();
5195 }
5196 return VINF_SUCCESS;
5197}
5198
5199
5200/**
5201 * Common 'push segment-register' helper.
5202 */
5203FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5204{
5205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5206 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5207 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5208
5209 switch (pVCpu->iem.s.enmEffOpSize)
5210 {
5211 case IEMMODE_16BIT:
5212 IEM_MC_BEGIN(0, 1);
5213 IEM_MC_LOCAL(uint16_t, u16Value);
5214 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5215 IEM_MC_PUSH_U16(u16Value);
5216 IEM_MC_ADVANCE_RIP();
5217 IEM_MC_END();
5218 break;
5219
5220 case IEMMODE_32BIT:
5221 IEM_MC_BEGIN(0, 1);
5222 IEM_MC_LOCAL(uint32_t, u32Value);
5223 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5224 IEM_MC_PUSH_U32_SREG(u32Value);
5225 IEM_MC_ADVANCE_RIP();
5226 IEM_MC_END();
5227 break;
5228
5229 case IEMMODE_64BIT:
5230 IEM_MC_BEGIN(0, 1);
5231 IEM_MC_LOCAL(uint64_t, u64Value);
5232 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5233 IEM_MC_PUSH_U64(u64Value);
5234 IEM_MC_ADVANCE_RIP();
5235 IEM_MC_END();
5236 break;
5237 }
5238
5239 return VINF_SUCCESS;
5240}
5241
5242
5243/** Opcode 0x0f 0xa0. */
5244FNIEMOP_DEF(iemOp_push_fs)
5245{
5246 IEMOP_MNEMONIC(push_fs, "push fs");
5247 IEMOP_HLP_MIN_386();
5248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5249 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5250}
5251
5252
5253/** Opcode 0x0f 0xa1. */
5254FNIEMOP_DEF(iemOp_pop_fs)
5255{
5256 IEMOP_MNEMONIC(pop_fs, "pop fs");
5257 IEMOP_HLP_MIN_386();
5258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5259 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5260}
5261
5262
5263/** Opcode 0x0f 0xa2. */
5264FNIEMOP_DEF(iemOp_cpuid)
5265{
5266 IEMOP_MNEMONIC(cpuid, "cpuid");
5267 IEMOP_HLP_MIN_486(); /* not all 486es. */
5268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5269 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5270}
5271
5272
5273/**
5274 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5275 * iemOp_bts_Ev_Gv.
5276 */
5277FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5278{
5279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5280 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5281
5282 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5283 {
5284 /* register destination. */
5285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5286 switch (pVCpu->iem.s.enmEffOpSize)
5287 {
5288 case IEMMODE_16BIT:
5289 IEM_MC_BEGIN(3, 0);
5290 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5291 IEM_MC_ARG(uint16_t, u16Src, 1);
5292 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5293
5294 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5295 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5296 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5297 IEM_MC_REF_EFLAGS(pEFlags);
5298 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5299
5300 IEM_MC_ADVANCE_RIP();
5301 IEM_MC_END();
5302 return VINF_SUCCESS;
5303
5304 case IEMMODE_32BIT:
5305 IEM_MC_BEGIN(3, 0);
5306 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5307 IEM_MC_ARG(uint32_t, u32Src, 1);
5308 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5309
5310 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5311 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5312 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5313 IEM_MC_REF_EFLAGS(pEFlags);
5314 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5315
5316 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5317 IEM_MC_ADVANCE_RIP();
5318 IEM_MC_END();
5319 return VINF_SUCCESS;
5320
5321 case IEMMODE_64BIT:
5322 IEM_MC_BEGIN(3, 0);
5323 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5324 IEM_MC_ARG(uint64_t, u64Src, 1);
5325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5326
5327 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5328 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5329 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5330 IEM_MC_REF_EFLAGS(pEFlags);
5331 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5332
5333 IEM_MC_ADVANCE_RIP();
5334 IEM_MC_END();
5335 return VINF_SUCCESS;
5336
5337 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5338 }
5339 }
5340 else
5341 {
5342 /* memory destination. */
5343
5344 uint32_t fAccess;
5345 if (pImpl->pfnLockedU16)
5346 fAccess = IEM_ACCESS_DATA_RW;
5347 else /* BT */
5348 fAccess = IEM_ACCESS_DATA_R;
5349
5350 /** @todo test negative bit offsets! */
5351 switch (pVCpu->iem.s.enmEffOpSize)
5352 {
5353 case IEMMODE_16BIT:
5354 IEM_MC_BEGIN(3, 2);
5355 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5356 IEM_MC_ARG(uint16_t, u16Src, 1);
5357 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5359 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5360
5361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5362 if (pImpl->pfnLockedU16)
5363 IEMOP_HLP_DONE_DECODING();
5364 else
5365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5366 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5367 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5368 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5369 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5370 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5371 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5372 IEM_MC_FETCH_EFLAGS(EFlags);
5373
5374 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5375 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5376 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5377 else
5378 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5379 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5380
5381 IEM_MC_COMMIT_EFLAGS(EFlags);
5382 IEM_MC_ADVANCE_RIP();
5383 IEM_MC_END();
5384 return VINF_SUCCESS;
5385
5386 case IEMMODE_32BIT:
5387 IEM_MC_BEGIN(3, 2);
5388 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5389 IEM_MC_ARG(uint32_t, u32Src, 1);
5390 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5392 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5393
5394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5395 if (pImpl->pfnLockedU16)
5396 IEMOP_HLP_DONE_DECODING();
5397 else
5398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5399 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5400 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5401 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5402 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5403 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5404 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5405 IEM_MC_FETCH_EFLAGS(EFlags);
5406
5407 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5408 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5409 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5410 else
5411 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5412 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5413
5414 IEM_MC_COMMIT_EFLAGS(EFlags);
5415 IEM_MC_ADVANCE_RIP();
5416 IEM_MC_END();
5417 return VINF_SUCCESS;
5418
5419 case IEMMODE_64BIT:
5420 IEM_MC_BEGIN(3, 2);
5421 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5422 IEM_MC_ARG(uint64_t, u64Src, 1);
5423 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5425 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5426
5427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5428 if (pImpl->pfnLockedU16)
5429 IEMOP_HLP_DONE_DECODING();
5430 else
5431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5432 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5433 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5434 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5435 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5436 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5437 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5438 IEM_MC_FETCH_EFLAGS(EFlags);
5439
5440 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5441 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5442 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5443 else
5444 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5445 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5446
5447 IEM_MC_COMMIT_EFLAGS(EFlags);
5448 IEM_MC_ADVANCE_RIP();
5449 IEM_MC_END();
5450 return VINF_SUCCESS;
5451
5452 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5453 }
5454 }
5455}
5456
5457
5458/** Opcode 0x0f 0xa3. */
5459FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5460{
5461 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5462 IEMOP_HLP_MIN_386();
5463 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5464}
5465
5466
5467/**
5468 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5469 */
5470FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5471{
5472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5473 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5474
5475 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5476 {
5477 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5479
5480 switch (pVCpu->iem.s.enmEffOpSize)
5481 {
5482 case IEMMODE_16BIT:
5483 IEM_MC_BEGIN(4, 0);
5484 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5485 IEM_MC_ARG(uint16_t, u16Src, 1);
5486 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5487 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5488
5489 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5490 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5491 IEM_MC_REF_EFLAGS(pEFlags);
5492 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5493
5494 IEM_MC_ADVANCE_RIP();
5495 IEM_MC_END();
5496 return VINF_SUCCESS;
5497
5498 case IEMMODE_32BIT:
5499 IEM_MC_BEGIN(4, 0);
5500 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5501 IEM_MC_ARG(uint32_t, u32Src, 1);
5502 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5503 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5504
5505 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5506 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5507 IEM_MC_REF_EFLAGS(pEFlags);
5508 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5509
5510 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5511 IEM_MC_ADVANCE_RIP();
5512 IEM_MC_END();
5513 return VINF_SUCCESS;
5514
5515 case IEMMODE_64BIT:
5516 IEM_MC_BEGIN(4, 0);
5517 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5518 IEM_MC_ARG(uint64_t, u64Src, 1);
5519 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5520 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5521
5522 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5523 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5524 IEM_MC_REF_EFLAGS(pEFlags);
5525 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5526
5527 IEM_MC_ADVANCE_RIP();
5528 IEM_MC_END();
5529 return VINF_SUCCESS;
5530
5531 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5532 }
5533 }
5534 else
5535 {
5536 switch (pVCpu->iem.s.enmEffOpSize)
5537 {
5538 case IEMMODE_16BIT:
5539 IEM_MC_BEGIN(4, 2);
5540 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5541 IEM_MC_ARG(uint16_t, u16Src, 1);
5542 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5543 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5545
5546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5547 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5548 IEM_MC_ASSIGN(cShiftArg, cShift);
5549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5550 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5551 IEM_MC_FETCH_EFLAGS(EFlags);
5552 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5553 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5554
5555 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5556 IEM_MC_COMMIT_EFLAGS(EFlags);
5557 IEM_MC_ADVANCE_RIP();
5558 IEM_MC_END();
5559 return VINF_SUCCESS;
5560
5561 case IEMMODE_32BIT:
5562 IEM_MC_BEGIN(4, 2);
5563 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5564 IEM_MC_ARG(uint32_t, u32Src, 1);
5565 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5566 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5568
5569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5570 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5571 IEM_MC_ASSIGN(cShiftArg, cShift);
5572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5573 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5574 IEM_MC_FETCH_EFLAGS(EFlags);
5575 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5576 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5577
5578 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5579 IEM_MC_COMMIT_EFLAGS(EFlags);
5580 IEM_MC_ADVANCE_RIP();
5581 IEM_MC_END();
5582 return VINF_SUCCESS;
5583
5584 case IEMMODE_64BIT:
5585 IEM_MC_BEGIN(4, 2);
5586 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5587 IEM_MC_ARG(uint64_t, u64Src, 1);
5588 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5589 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5591
5592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5593 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5594 IEM_MC_ASSIGN(cShiftArg, cShift);
5595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5596 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5597 IEM_MC_FETCH_EFLAGS(EFlags);
5598 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5599 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5600
5601 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5602 IEM_MC_COMMIT_EFLAGS(EFlags);
5603 IEM_MC_ADVANCE_RIP();
5604 IEM_MC_END();
5605 return VINF_SUCCESS;
5606
5607 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5608 }
5609 }
5610}
5611
5612
5613/**
5614 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5615 */
5616FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5617{
5618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5619 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5620
5621 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5622 {
5623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5624
5625 switch (pVCpu->iem.s.enmEffOpSize)
5626 {
5627 case IEMMODE_16BIT:
5628 IEM_MC_BEGIN(4, 0);
5629 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5630 IEM_MC_ARG(uint16_t, u16Src, 1);
5631 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5632 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5633
5634 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5635 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5636 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5637 IEM_MC_REF_EFLAGS(pEFlags);
5638 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5639
5640 IEM_MC_ADVANCE_RIP();
5641 IEM_MC_END();
5642 return VINF_SUCCESS;
5643
5644 case IEMMODE_32BIT:
5645 IEM_MC_BEGIN(4, 0);
5646 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5647 IEM_MC_ARG(uint32_t, u32Src, 1);
5648 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5649 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5650
5651 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5652 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5653 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5654 IEM_MC_REF_EFLAGS(pEFlags);
5655 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5656
5657 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5658 IEM_MC_ADVANCE_RIP();
5659 IEM_MC_END();
5660 return VINF_SUCCESS;
5661
5662 case IEMMODE_64BIT:
5663 IEM_MC_BEGIN(4, 0);
5664 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5665 IEM_MC_ARG(uint64_t, u64Src, 1);
5666 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5667 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5668
5669 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5670 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5671 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5672 IEM_MC_REF_EFLAGS(pEFlags);
5673 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5674
5675 IEM_MC_ADVANCE_RIP();
5676 IEM_MC_END();
5677 return VINF_SUCCESS;
5678
5679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5680 }
5681 }
5682 else
5683 {
5684 switch (pVCpu->iem.s.enmEffOpSize)
5685 {
5686 case IEMMODE_16BIT:
5687 IEM_MC_BEGIN(4, 2);
5688 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5689 IEM_MC_ARG(uint16_t, u16Src, 1);
5690 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5691 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5693
5694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5696 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5697 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5698 IEM_MC_FETCH_EFLAGS(EFlags);
5699 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5700 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5701
5702 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5703 IEM_MC_COMMIT_EFLAGS(EFlags);
5704 IEM_MC_ADVANCE_RIP();
5705 IEM_MC_END();
5706 return VINF_SUCCESS;
5707
5708 case IEMMODE_32BIT:
5709 IEM_MC_BEGIN(4, 2);
5710 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5711 IEM_MC_ARG(uint32_t, u32Src, 1);
5712 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5713 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5715
5716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5718 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5719 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5720 IEM_MC_FETCH_EFLAGS(EFlags);
5721 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5722 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5723
5724 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5725 IEM_MC_COMMIT_EFLAGS(EFlags);
5726 IEM_MC_ADVANCE_RIP();
5727 IEM_MC_END();
5728 return VINF_SUCCESS;
5729
5730 case IEMMODE_64BIT:
5731 IEM_MC_BEGIN(4, 2);
5732 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5733 IEM_MC_ARG(uint64_t, u64Src, 1);
5734 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5735 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5737
5738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5740 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5741 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5742 IEM_MC_FETCH_EFLAGS(EFlags);
5743 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5744 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5745
5746 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5747 IEM_MC_COMMIT_EFLAGS(EFlags);
5748 IEM_MC_ADVANCE_RIP();
5749 IEM_MC_END();
5750 return VINF_SUCCESS;
5751
5752 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5753 }
5754 }
5755}
5756
5757
5758
5759/** Opcode 0x0f 0xa4. */
5760FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5761{
5762 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5763 IEMOP_HLP_MIN_386();
5764 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5765}
5766
5767
5768/** Opcode 0x0f 0xa5. */
5769FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5770{
5771 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5772 IEMOP_HLP_MIN_386();
5773 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5774}
5775
5776
5777/** Opcode 0x0f 0xa8. */
5778FNIEMOP_DEF(iemOp_push_gs)
5779{
5780 IEMOP_MNEMONIC(push_gs, "push gs");
5781 IEMOP_HLP_MIN_386();
5782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5783 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5784}
5785
5786
5787/** Opcode 0x0f 0xa9. */
5788FNIEMOP_DEF(iemOp_pop_gs)
5789{
5790 IEMOP_MNEMONIC(pop_gs, "pop gs");
5791 IEMOP_HLP_MIN_386();
5792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5793 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5794}
5795
5796
5797/** Opcode 0x0f 0xaa. */
5798FNIEMOP_STUB(iemOp_rsm);
5799//IEMOP_HLP_MIN_386();
5800
5801
5802/** Opcode 0x0f 0xab. */
5803FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5804{
5805 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5806 IEMOP_HLP_MIN_386();
5807 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5808}
5809
5810
5811/** Opcode 0x0f 0xac. */
5812FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5813{
5814 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5815 IEMOP_HLP_MIN_386();
5816 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5817}
5818
5819
5820/** Opcode 0x0f 0xad. */
5821FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5822{
5823 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5824 IEMOP_HLP_MIN_386();
5825 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5826}
5827
5828
5829/** Opcode 0x0f 0xae mem/0. */
5830FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5831{
5832 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5833 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5834 return IEMOP_RAISE_INVALID_OPCODE();
5835
5836 IEM_MC_BEGIN(3, 1);
5837 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5838 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5839 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5842 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
5843 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5844 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5845 IEM_MC_END();
5846 return VINF_SUCCESS;
5847}
5848
5849
5850/** Opcode 0x0f 0xae mem/1. */
5851FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5852{
5853 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5854 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5855 return IEMOP_RAISE_INVALID_OPCODE();
5856
5857 IEM_MC_BEGIN(3, 1);
5858 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5859 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5860 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5863 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5864 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5865 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5866 IEM_MC_END();
5867 return VINF_SUCCESS;
5868}
5869
5870
5871/**
5872 * @opmaps grp15
5873 * @opcode !11/2
5874 * @oppfx none
5875 * @opcpuid sse
5876 * @opgroup og_sse_mxcsrsm
5877 * @opxcpttype 5
5878 * @optest op1=0 -> mxcsr=0
5879 * @optest op1=0x2083 -> mxcsr=0x2083
5880 * @optest op1=0xfffffffe -> value.xcpt=0xd
5881 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
5882 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
5883 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
5884 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
5885 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
5886 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5887 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5888 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5889 */
5890FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
5891{
5892 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
5893 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
5894 return IEMOP_RAISE_INVALID_OPCODE();
5895
5896 IEM_MC_BEGIN(2, 0);
5897 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5898 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5901 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5902 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5903 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
5904 IEM_MC_END();
5905 return VINF_SUCCESS;
5906}
5907
5908
5909/**
5910 * @opmaps grp15
5911 * @opcode !11/3
5912 * @oppfx none
5913 * @opcpuid sse
5914 * @opgroup og_sse_mxcsrsm
5915 * @opxcpttype 5
5916 * @optest mxcsr=0 -> op1=0
5917 * @optest mxcsr=0x2083 -> op1=0x2083
5918 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
5919 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
5920 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
5921 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
5922 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
5923 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5924 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5925 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5926 */
5927FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
5928{
5929 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, MdWO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
5930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
5931 return IEMOP_RAISE_INVALID_OPCODE();
5932
5933 IEM_MC_BEGIN(2, 0);
5934 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5935 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5938 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5939 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5940 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
5941 IEM_MC_END();
5942 return VINF_SUCCESS;
5943}
5944
5945
5946/**
5947 * @opmaps grp15
5948 * @opcode !11/4
5949 * @oppfx none
5950 * @opcpuid xsave
5951 * @opgroup og_system
5952 * @opxcpttype none
5953 */
5954FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
5955{
5956 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, MRW, DISOPTYPE_HARMLESS, 0);
5957 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
5958 return IEMOP_RAISE_INVALID_OPCODE();
5959
5960 IEM_MC_BEGIN(3, 0);
5961 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5962 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5963 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5966 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
5967 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5968 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
5969 IEM_MC_END();
5970 return VINF_SUCCESS;
5971}
5972
5973
5974/**
5975 * @opmaps grp15
5976 * @opcode !11/5
5977 * @oppfx none
5978 * @opcpuid xsave
5979 * @opgroup og_system
5980 * @opxcpttype none
5981 */
5982FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
5983{
5984 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, MRO, DISOPTYPE_HARMLESS, 0);
5985 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
5986 return IEMOP_RAISE_INVALID_OPCODE();
5987
5988 IEM_MC_BEGIN(3, 0);
5989 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5990 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5991 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5994 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
5995 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5996 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5997 IEM_MC_END();
5998 return VINF_SUCCESS;
5999}
6000
6001/** Opcode 0x0f 0xae mem/6. */
6002FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6003
6004/**
6005 * @opmaps grp15
6006 * @opcode !11/7
6007 * @oppfx none
6008 * @opcpuid clfsh
6009 * @opgroup og_cachectl
6010 * @optest op1=1 ->
6011 */
6012FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6013{
6014 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6015 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6016 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6017
6018 IEM_MC_BEGIN(2, 0);
6019 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6020 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6023 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6024 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6025 IEM_MC_END();
6026 return VINF_SUCCESS;
6027}
6028
6029/**
6030 * @opmaps grp15
6031 * @opcode !11/7
6032 * @oppfx 0x66
6033 * @opcpuid clflushopt
6034 * @opgroup og_cachectl
6035 * @optest op1=1 ->
6036 */
6037FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6038{
6039 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6040 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6041 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6042
6043 IEM_MC_BEGIN(2, 0);
6044 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6045 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6048 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6049 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6050 IEM_MC_END();
6051 return VINF_SUCCESS;
6052}
6053
6054
6055/** Opcode 0x0f 0xae 11b/5. */
6056FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6057{
6058 RT_NOREF_PV(bRm);
6059 IEMOP_MNEMONIC(lfence, "lfence");
6060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6061 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6062 return IEMOP_RAISE_INVALID_OPCODE();
6063
6064 IEM_MC_BEGIN(0, 0);
6065 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6066 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6067 else
6068 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6069 IEM_MC_ADVANCE_RIP();
6070 IEM_MC_END();
6071 return VINF_SUCCESS;
6072}
6073
6074
6075/** Opcode 0x0f 0xae 11b/6. */
6076FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6077{
6078 RT_NOREF_PV(bRm);
6079 IEMOP_MNEMONIC(mfence, "mfence");
6080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6081 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6082 return IEMOP_RAISE_INVALID_OPCODE();
6083
6084 IEM_MC_BEGIN(0, 0);
6085 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6086 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6087 else
6088 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6089 IEM_MC_ADVANCE_RIP();
6090 IEM_MC_END();
6091 return VINF_SUCCESS;
6092}
6093
6094
6095/** Opcode 0x0f 0xae 11b/7. */
6096FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6097{
6098 RT_NOREF_PV(bRm);
6099 IEMOP_MNEMONIC(sfence, "sfence");
6100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6101 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6102 return IEMOP_RAISE_INVALID_OPCODE();
6103
6104 IEM_MC_BEGIN(0, 0);
6105 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6106 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6107 else
6108 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6109 IEM_MC_ADVANCE_RIP();
6110 IEM_MC_END();
6111 return VINF_SUCCESS;
6112}
6113
6114
6115/** Opcode 0xf3 0x0f 0xae 11b/0. */
6116FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6117
6118/** Opcode 0xf3 0x0f 0xae 11b/1. */
6119FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6120
6121/** Opcode 0xf3 0x0f 0xae 11b/2. */
6122FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6123
6124/** Opcode 0xf3 0x0f 0xae 11b/3. */
6125FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6126
6127
6128/**
6129 * Group 15 jump table for register variant.
6130 */
6131IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6132{ /* pfx: none, 066h, 0f3h, 0f2h */
6133 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6134 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6135 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6136 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6137 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6138 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6139 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6140 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6141};
6142AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6143
6144
6145/**
6146 * Group 15 jump table for memory variant.
6147 */
6148IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6149{ /* pfx: none, 066h, 0f3h, 0f2h */
6150 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6151 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6152 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6153 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6154 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6155 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6156 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6157 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6158};
6159AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6160
6161
6162/** Opcode 0x0f 0xae. */
6163FNIEMOP_DEF(iemOp_Grp15)
6164{
6165 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6167 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6168 /* register, register */
6169 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6170 + pVCpu->iem.s.idxPrefix], bRm);
6171 /* memory, register */
6172 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6173 + pVCpu->iem.s.idxPrefix], bRm);
6174}
6175
6176
6177/** Opcode 0x0f 0xaf. */
6178FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6179{
6180 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6181 IEMOP_HLP_MIN_386();
6182 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6183 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6184}
6185
6186
6187/** Opcode 0x0f 0xb0. */
6188FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6189{
6190 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6191 IEMOP_HLP_MIN_486();
6192 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6193
6194 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6195 {
6196 IEMOP_HLP_DONE_DECODING();
6197 IEM_MC_BEGIN(4, 0);
6198 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6199 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6200 IEM_MC_ARG(uint8_t, u8Src, 2);
6201 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6202
6203 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6204 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6205 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6206 IEM_MC_REF_EFLAGS(pEFlags);
6207 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6208 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6209 else
6210 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6211
6212 IEM_MC_ADVANCE_RIP();
6213 IEM_MC_END();
6214 }
6215 else
6216 {
6217 IEM_MC_BEGIN(4, 3);
6218 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6219 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6220 IEM_MC_ARG(uint8_t, u8Src, 2);
6221 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6223 IEM_MC_LOCAL(uint8_t, u8Al);
6224
6225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6226 IEMOP_HLP_DONE_DECODING();
6227 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6228 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6229 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6230 IEM_MC_FETCH_EFLAGS(EFlags);
6231 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6232 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6233 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6234 else
6235 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6236
6237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6238 IEM_MC_COMMIT_EFLAGS(EFlags);
6239 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6240 IEM_MC_ADVANCE_RIP();
6241 IEM_MC_END();
6242 }
6243 return VINF_SUCCESS;
6244}
6245
6246/** Opcode 0x0f 0xb1. */
6247FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6248{
6249 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6250 IEMOP_HLP_MIN_486();
6251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6252
6253 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6254 {
6255 IEMOP_HLP_DONE_DECODING();
6256 switch (pVCpu->iem.s.enmEffOpSize)
6257 {
6258 case IEMMODE_16BIT:
6259 IEM_MC_BEGIN(4, 0);
6260 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6261 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6262 IEM_MC_ARG(uint16_t, u16Src, 2);
6263 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6264
6265 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6266 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6267 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6268 IEM_MC_REF_EFLAGS(pEFlags);
6269 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6270 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6271 else
6272 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6273
6274 IEM_MC_ADVANCE_RIP();
6275 IEM_MC_END();
6276 return VINF_SUCCESS;
6277
6278 case IEMMODE_32BIT:
6279 IEM_MC_BEGIN(4, 0);
6280 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6281 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6282 IEM_MC_ARG(uint32_t, u32Src, 2);
6283 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6284
6285 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6286 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6287 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6288 IEM_MC_REF_EFLAGS(pEFlags);
6289 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6290 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6291 else
6292 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6293
6294 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6295 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6296 IEM_MC_ADVANCE_RIP();
6297 IEM_MC_END();
6298 return VINF_SUCCESS;
6299
6300 case IEMMODE_64BIT:
6301 IEM_MC_BEGIN(4, 0);
6302 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6303 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6304#ifdef RT_ARCH_X86
6305 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6306#else
6307 IEM_MC_ARG(uint64_t, u64Src, 2);
6308#endif
6309 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6310
6311 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6312 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6313 IEM_MC_REF_EFLAGS(pEFlags);
6314#ifdef RT_ARCH_X86
6315 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6316 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6317 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6318 else
6319 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6320#else
6321 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6322 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6323 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6324 else
6325 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6326#endif
6327
6328 IEM_MC_ADVANCE_RIP();
6329 IEM_MC_END();
6330 return VINF_SUCCESS;
6331
6332 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6333 }
6334 }
6335 else
6336 {
6337 switch (pVCpu->iem.s.enmEffOpSize)
6338 {
6339 case IEMMODE_16BIT:
6340 IEM_MC_BEGIN(4, 3);
6341 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6342 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6343 IEM_MC_ARG(uint16_t, u16Src, 2);
6344 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6346 IEM_MC_LOCAL(uint16_t, u16Ax);
6347
6348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6349 IEMOP_HLP_DONE_DECODING();
6350 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6351 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6352 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6353 IEM_MC_FETCH_EFLAGS(EFlags);
6354 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6355 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6356 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6357 else
6358 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6359
6360 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6361 IEM_MC_COMMIT_EFLAGS(EFlags);
6362 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6363 IEM_MC_ADVANCE_RIP();
6364 IEM_MC_END();
6365 return VINF_SUCCESS;
6366
6367 case IEMMODE_32BIT:
6368 IEM_MC_BEGIN(4, 3);
6369 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6370 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6371 IEM_MC_ARG(uint32_t, u32Src, 2);
6372 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6374 IEM_MC_LOCAL(uint32_t, u32Eax);
6375
6376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6377 IEMOP_HLP_DONE_DECODING();
6378 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6379 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6380 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6381 IEM_MC_FETCH_EFLAGS(EFlags);
6382 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6383 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6384 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6385 else
6386 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6387
6388 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6389 IEM_MC_COMMIT_EFLAGS(EFlags);
6390 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6391 IEM_MC_ADVANCE_RIP();
6392 IEM_MC_END();
6393 return VINF_SUCCESS;
6394
6395 case IEMMODE_64BIT:
6396 IEM_MC_BEGIN(4, 3);
6397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6398 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6399#ifdef RT_ARCH_X86
6400 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6401#else
6402 IEM_MC_ARG(uint64_t, u64Src, 2);
6403#endif
6404 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6406 IEM_MC_LOCAL(uint64_t, u64Rax);
6407
6408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6409 IEMOP_HLP_DONE_DECODING();
6410 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6411 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6412 IEM_MC_FETCH_EFLAGS(EFlags);
6413 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6414#ifdef RT_ARCH_X86
6415 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6416 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6417 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6418 else
6419 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6420#else
6421 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6422 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6423 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6424 else
6425 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6426#endif
6427
6428 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6429 IEM_MC_COMMIT_EFLAGS(EFlags);
6430 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6431 IEM_MC_ADVANCE_RIP();
6432 IEM_MC_END();
6433 return VINF_SUCCESS;
6434
6435 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6436 }
6437 }
6438}
6439
6440
6441FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6442{
6443 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6444 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6445
6446 switch (pVCpu->iem.s.enmEffOpSize)
6447 {
6448 case IEMMODE_16BIT:
6449 IEM_MC_BEGIN(5, 1);
6450 IEM_MC_ARG(uint16_t, uSel, 0);
6451 IEM_MC_ARG(uint16_t, offSeg, 1);
6452 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6453 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6454 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6455 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6458 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6459 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6460 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6461 IEM_MC_END();
6462 return VINF_SUCCESS;
6463
6464 case IEMMODE_32BIT:
6465 IEM_MC_BEGIN(5, 1);
6466 IEM_MC_ARG(uint16_t, uSel, 0);
6467 IEM_MC_ARG(uint32_t, offSeg, 1);
6468 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6469 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6470 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6471 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6474 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6475 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6476 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6477 IEM_MC_END();
6478 return VINF_SUCCESS;
6479
6480 case IEMMODE_64BIT:
6481 IEM_MC_BEGIN(5, 1);
6482 IEM_MC_ARG(uint16_t, uSel, 0);
6483 IEM_MC_ARG(uint64_t, offSeg, 1);
6484 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6485 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6486 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6487 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6490 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6491 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6492 else
6493 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6494 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6495 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6496 IEM_MC_END();
6497 return VINF_SUCCESS;
6498
6499 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6500 }
6501}
6502
6503
6504/** Opcode 0x0f 0xb2. */
6505FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6506{
6507 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6508 IEMOP_HLP_MIN_386();
6509 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6510 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6511 return IEMOP_RAISE_INVALID_OPCODE();
6512 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6513}
6514
6515
6516/** Opcode 0x0f 0xb3. */
6517FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6518{
6519 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6520 IEMOP_HLP_MIN_386();
6521 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6522}
6523
6524
6525/** Opcode 0x0f 0xb4. */
6526FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6527{
6528 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6529 IEMOP_HLP_MIN_386();
6530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6531 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6532 return IEMOP_RAISE_INVALID_OPCODE();
6533 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6534}
6535
6536
6537/** Opcode 0x0f 0xb5. */
6538FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6539{
6540 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6541 IEMOP_HLP_MIN_386();
6542 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6543 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6544 return IEMOP_RAISE_INVALID_OPCODE();
6545 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6546}
6547
6548
6549/** Opcode 0x0f 0xb6. */
6550FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6551{
6552 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6553 IEMOP_HLP_MIN_386();
6554
6555 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6556
6557 /*
6558 * If rm is denoting a register, no more instruction bytes.
6559 */
6560 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6561 {
6562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6563 switch (pVCpu->iem.s.enmEffOpSize)
6564 {
6565 case IEMMODE_16BIT:
6566 IEM_MC_BEGIN(0, 1);
6567 IEM_MC_LOCAL(uint16_t, u16Value);
6568 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6569 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6570 IEM_MC_ADVANCE_RIP();
6571 IEM_MC_END();
6572 return VINF_SUCCESS;
6573
6574 case IEMMODE_32BIT:
6575 IEM_MC_BEGIN(0, 1);
6576 IEM_MC_LOCAL(uint32_t, u32Value);
6577 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6578 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6579 IEM_MC_ADVANCE_RIP();
6580 IEM_MC_END();
6581 return VINF_SUCCESS;
6582
6583 case IEMMODE_64BIT:
6584 IEM_MC_BEGIN(0, 1);
6585 IEM_MC_LOCAL(uint64_t, u64Value);
6586 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6587 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6588 IEM_MC_ADVANCE_RIP();
6589 IEM_MC_END();
6590 return VINF_SUCCESS;
6591
6592 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6593 }
6594 }
6595 else
6596 {
6597 /*
6598 * We're loading a register from memory.
6599 */
6600 switch (pVCpu->iem.s.enmEffOpSize)
6601 {
6602 case IEMMODE_16BIT:
6603 IEM_MC_BEGIN(0, 2);
6604 IEM_MC_LOCAL(uint16_t, u16Value);
6605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6608 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6609 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6610 IEM_MC_ADVANCE_RIP();
6611 IEM_MC_END();
6612 return VINF_SUCCESS;
6613
6614 case IEMMODE_32BIT:
6615 IEM_MC_BEGIN(0, 2);
6616 IEM_MC_LOCAL(uint32_t, u32Value);
6617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6620 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6621 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6622 IEM_MC_ADVANCE_RIP();
6623 IEM_MC_END();
6624 return VINF_SUCCESS;
6625
6626 case IEMMODE_64BIT:
6627 IEM_MC_BEGIN(0, 2);
6628 IEM_MC_LOCAL(uint64_t, u64Value);
6629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6632 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6633 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6634 IEM_MC_ADVANCE_RIP();
6635 IEM_MC_END();
6636 return VINF_SUCCESS;
6637
6638 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6639 }
6640 }
6641}
6642
6643
6644/** Opcode 0x0f 0xb7. */
6645FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6646{
6647 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6648 IEMOP_HLP_MIN_386();
6649
6650 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6651
6652 /** @todo Not entirely sure how the operand size prefix is handled here,
6653 * assuming that it will be ignored. Would be nice to have a few
6654 * test for this. */
6655 /*
6656 * If rm is denoting a register, no more instruction bytes.
6657 */
6658 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6659 {
6660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6661 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6662 {
6663 IEM_MC_BEGIN(0, 1);
6664 IEM_MC_LOCAL(uint32_t, u32Value);
6665 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6666 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6667 IEM_MC_ADVANCE_RIP();
6668 IEM_MC_END();
6669 }
6670 else
6671 {
6672 IEM_MC_BEGIN(0, 1);
6673 IEM_MC_LOCAL(uint64_t, u64Value);
6674 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6675 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6676 IEM_MC_ADVANCE_RIP();
6677 IEM_MC_END();
6678 }
6679 }
6680 else
6681 {
6682 /*
6683 * We're loading a register from memory.
6684 */
6685 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6686 {
6687 IEM_MC_BEGIN(0, 2);
6688 IEM_MC_LOCAL(uint32_t, u32Value);
6689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6692 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6693 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6694 IEM_MC_ADVANCE_RIP();
6695 IEM_MC_END();
6696 }
6697 else
6698 {
6699 IEM_MC_BEGIN(0, 2);
6700 IEM_MC_LOCAL(uint64_t, u64Value);
6701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6704 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6705 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6706 IEM_MC_ADVANCE_RIP();
6707 IEM_MC_END();
6708 }
6709 }
6710 return VINF_SUCCESS;
6711}
6712
6713
6714/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6715FNIEMOP_UD_STUB(iemOp_jmpe);
6716/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6717FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6718
6719
6720/**
6721 * @opcode 0xb9
6722 * @opinvalid intel-modrm
6723 * @optest ->
6724 */
6725FNIEMOP_DEF(iemOp_Grp10)
6726{
6727 /*
6728 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
6729 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
6730 */
6731 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
6732 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZE); /* just picked Gb,Eb here. */
6733 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
6734}
6735
6736
6737/** Opcode 0x0f 0xba. */
6738FNIEMOP_DEF(iemOp_Grp8)
6739{
6740 IEMOP_HLP_MIN_386();
6741 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6742 PCIEMOPBINSIZES pImpl;
6743 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6744 {
6745 case 0: case 1: case 2: case 3:
6746 /* Both AMD and Intel want full modr/m decoding and imm8. */
6747 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
6748 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6749 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6750 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6751 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6752 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6753 }
6754 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6755
6756 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6757 {
6758 /* register destination. */
6759 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6761
6762 switch (pVCpu->iem.s.enmEffOpSize)
6763 {
6764 case IEMMODE_16BIT:
6765 IEM_MC_BEGIN(3, 0);
6766 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6767 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6768 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6769
6770 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6771 IEM_MC_REF_EFLAGS(pEFlags);
6772 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6773
6774 IEM_MC_ADVANCE_RIP();
6775 IEM_MC_END();
6776 return VINF_SUCCESS;
6777
6778 case IEMMODE_32BIT:
6779 IEM_MC_BEGIN(3, 0);
6780 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6781 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6782 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6783
6784 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6785 IEM_MC_REF_EFLAGS(pEFlags);
6786 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6787
6788 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6789 IEM_MC_ADVANCE_RIP();
6790 IEM_MC_END();
6791 return VINF_SUCCESS;
6792
6793 case IEMMODE_64BIT:
6794 IEM_MC_BEGIN(3, 0);
6795 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6796 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6797 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6798
6799 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6800 IEM_MC_REF_EFLAGS(pEFlags);
6801 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6802
6803 IEM_MC_ADVANCE_RIP();
6804 IEM_MC_END();
6805 return VINF_SUCCESS;
6806
6807 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6808 }
6809 }
6810 else
6811 {
6812 /* memory destination. */
6813
6814 uint32_t fAccess;
6815 if (pImpl->pfnLockedU16)
6816 fAccess = IEM_ACCESS_DATA_RW;
6817 else /* BT */
6818 fAccess = IEM_ACCESS_DATA_R;
6819
6820 /** @todo test negative bit offsets! */
6821 switch (pVCpu->iem.s.enmEffOpSize)
6822 {
6823 case IEMMODE_16BIT:
6824 IEM_MC_BEGIN(3, 1);
6825 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6826 IEM_MC_ARG(uint16_t, u16Src, 1);
6827 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6829
6830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6831 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6832 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6833 if (pImpl->pfnLockedU16)
6834 IEMOP_HLP_DONE_DECODING();
6835 else
6836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6837 IEM_MC_FETCH_EFLAGS(EFlags);
6838 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6839 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6840 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6841 else
6842 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6843 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6844
6845 IEM_MC_COMMIT_EFLAGS(EFlags);
6846 IEM_MC_ADVANCE_RIP();
6847 IEM_MC_END();
6848 return VINF_SUCCESS;
6849
6850 case IEMMODE_32BIT:
6851 IEM_MC_BEGIN(3, 1);
6852 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6853 IEM_MC_ARG(uint32_t, u32Src, 1);
6854 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6856
6857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6858 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6859 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6860 if (pImpl->pfnLockedU16)
6861 IEMOP_HLP_DONE_DECODING();
6862 else
6863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6864 IEM_MC_FETCH_EFLAGS(EFlags);
6865 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6866 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6867 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6868 else
6869 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6870 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6871
6872 IEM_MC_COMMIT_EFLAGS(EFlags);
6873 IEM_MC_ADVANCE_RIP();
6874 IEM_MC_END();
6875 return VINF_SUCCESS;
6876
6877 case IEMMODE_64BIT:
6878 IEM_MC_BEGIN(3, 1);
6879 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6880 IEM_MC_ARG(uint64_t, u64Src, 1);
6881 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6883
6884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6885 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6886 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6887 if (pImpl->pfnLockedU16)
6888 IEMOP_HLP_DONE_DECODING();
6889 else
6890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6891 IEM_MC_FETCH_EFLAGS(EFlags);
6892 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6893 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6894 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6895 else
6896 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6897 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6898
6899 IEM_MC_COMMIT_EFLAGS(EFlags);
6900 IEM_MC_ADVANCE_RIP();
6901 IEM_MC_END();
6902 return VINF_SUCCESS;
6903
6904 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6905 }
6906 }
6907}
6908
6909
6910/** Opcode 0x0f 0xbb. */
6911FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6912{
6913 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6914 IEMOP_HLP_MIN_386();
6915 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6916}
6917
6918
6919/** Opcode 0x0f 0xbc. */
6920FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6921{
6922 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6923 IEMOP_HLP_MIN_386();
6924 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6925 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6926}
6927
6928
6929/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6930FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6931
6932
6933/** Opcode 0x0f 0xbd. */
6934FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6935{
6936 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6937 IEMOP_HLP_MIN_386();
6938 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6939 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6940}
6941
6942
6943/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6944FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6945
6946
6947/** Opcode 0x0f 0xbe. */
6948FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6949{
6950 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6951 IEMOP_HLP_MIN_386();
6952
6953 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6954
6955 /*
6956 * If rm is denoting a register, no more instruction bytes.
6957 */
6958 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6959 {
6960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6961 switch (pVCpu->iem.s.enmEffOpSize)
6962 {
6963 case IEMMODE_16BIT:
6964 IEM_MC_BEGIN(0, 1);
6965 IEM_MC_LOCAL(uint16_t, u16Value);
6966 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6967 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6968 IEM_MC_ADVANCE_RIP();
6969 IEM_MC_END();
6970 return VINF_SUCCESS;
6971
6972 case IEMMODE_32BIT:
6973 IEM_MC_BEGIN(0, 1);
6974 IEM_MC_LOCAL(uint32_t, u32Value);
6975 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6976 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6977 IEM_MC_ADVANCE_RIP();
6978 IEM_MC_END();
6979 return VINF_SUCCESS;
6980
6981 case IEMMODE_64BIT:
6982 IEM_MC_BEGIN(0, 1);
6983 IEM_MC_LOCAL(uint64_t, u64Value);
6984 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6985 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6986 IEM_MC_ADVANCE_RIP();
6987 IEM_MC_END();
6988 return VINF_SUCCESS;
6989
6990 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6991 }
6992 }
6993 else
6994 {
6995 /*
6996 * We're loading a register from memory.
6997 */
6998 switch (pVCpu->iem.s.enmEffOpSize)
6999 {
7000 case IEMMODE_16BIT:
7001 IEM_MC_BEGIN(0, 2);
7002 IEM_MC_LOCAL(uint16_t, u16Value);
7003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7006 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7007 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7008 IEM_MC_ADVANCE_RIP();
7009 IEM_MC_END();
7010 return VINF_SUCCESS;
7011
7012 case IEMMODE_32BIT:
7013 IEM_MC_BEGIN(0, 2);
7014 IEM_MC_LOCAL(uint32_t, u32Value);
7015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7018 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7019 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7020 IEM_MC_ADVANCE_RIP();
7021 IEM_MC_END();
7022 return VINF_SUCCESS;
7023
7024 case IEMMODE_64BIT:
7025 IEM_MC_BEGIN(0, 2);
7026 IEM_MC_LOCAL(uint64_t, u64Value);
7027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7030 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7031 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7032 IEM_MC_ADVANCE_RIP();
7033 IEM_MC_END();
7034 return VINF_SUCCESS;
7035
7036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7037 }
7038 }
7039}
7040
7041
7042/** Opcode 0x0f 0xbf. */
7043FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7044{
7045 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7046 IEMOP_HLP_MIN_386();
7047
7048 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7049
7050 /** @todo Not entirely sure how the operand size prefix is handled here,
7051 * assuming that it will be ignored. Would be nice to have a few
7052 * test for this. */
7053 /*
7054 * If rm is denoting a register, no more instruction bytes.
7055 */
7056 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7057 {
7058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7059 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7060 {
7061 IEM_MC_BEGIN(0, 1);
7062 IEM_MC_LOCAL(uint32_t, u32Value);
7063 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7064 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7065 IEM_MC_ADVANCE_RIP();
7066 IEM_MC_END();
7067 }
7068 else
7069 {
7070 IEM_MC_BEGIN(0, 1);
7071 IEM_MC_LOCAL(uint64_t, u64Value);
7072 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7073 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7074 IEM_MC_ADVANCE_RIP();
7075 IEM_MC_END();
7076 }
7077 }
7078 else
7079 {
7080 /*
7081 * We're loading a register from memory.
7082 */
7083 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7084 {
7085 IEM_MC_BEGIN(0, 2);
7086 IEM_MC_LOCAL(uint32_t, u32Value);
7087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7090 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7091 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7092 IEM_MC_ADVANCE_RIP();
7093 IEM_MC_END();
7094 }
7095 else
7096 {
7097 IEM_MC_BEGIN(0, 2);
7098 IEM_MC_LOCAL(uint64_t, u64Value);
7099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7102 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7103 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7104 IEM_MC_ADVANCE_RIP();
7105 IEM_MC_END();
7106 }
7107 }
7108 return VINF_SUCCESS;
7109}
7110
7111
7112/** Opcode 0x0f 0xc0. */
7113FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7114{
7115 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7116 IEMOP_HLP_MIN_486();
7117 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7118
7119 /*
7120 * If rm is denoting a register, no more instruction bytes.
7121 */
7122 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7123 {
7124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7125
7126 IEM_MC_BEGIN(3, 0);
7127 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7128 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7129 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7130
7131 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7132 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7133 IEM_MC_REF_EFLAGS(pEFlags);
7134 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7135
7136 IEM_MC_ADVANCE_RIP();
7137 IEM_MC_END();
7138 }
7139 else
7140 {
7141 /*
7142 * We're accessing memory.
7143 */
7144 IEM_MC_BEGIN(3, 3);
7145 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7146 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7147 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7148 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7150
7151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7152 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7153 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7154 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7155 IEM_MC_FETCH_EFLAGS(EFlags);
7156 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7157 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7158 else
7159 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7160
7161 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7162 IEM_MC_COMMIT_EFLAGS(EFlags);
7163 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7164 IEM_MC_ADVANCE_RIP();
7165 IEM_MC_END();
7166 return VINF_SUCCESS;
7167 }
7168 return VINF_SUCCESS;
7169}
7170
7171
7172/** Opcode 0x0f 0xc1. */
7173FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7174{
7175 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7176 IEMOP_HLP_MIN_486();
7177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7178
7179 /*
7180 * If rm is denoting a register, no more instruction bytes.
7181 */
7182 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7183 {
7184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7185
7186 switch (pVCpu->iem.s.enmEffOpSize)
7187 {
7188 case IEMMODE_16BIT:
7189 IEM_MC_BEGIN(3, 0);
7190 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7191 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7192 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7193
7194 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7195 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7196 IEM_MC_REF_EFLAGS(pEFlags);
7197 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7198
7199 IEM_MC_ADVANCE_RIP();
7200 IEM_MC_END();
7201 return VINF_SUCCESS;
7202
7203 case IEMMODE_32BIT:
7204 IEM_MC_BEGIN(3, 0);
7205 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7206 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7207 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7208
7209 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7210 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7211 IEM_MC_REF_EFLAGS(pEFlags);
7212 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7213
7214 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7215 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7216 IEM_MC_ADVANCE_RIP();
7217 IEM_MC_END();
7218 return VINF_SUCCESS;
7219
7220 case IEMMODE_64BIT:
7221 IEM_MC_BEGIN(3, 0);
7222 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7223 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7224 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7225
7226 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7227 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7228 IEM_MC_REF_EFLAGS(pEFlags);
7229 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7230
7231 IEM_MC_ADVANCE_RIP();
7232 IEM_MC_END();
7233 return VINF_SUCCESS;
7234
7235 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7236 }
7237 }
7238 else
7239 {
7240 /*
7241 * We're accessing memory.
7242 */
7243 switch (pVCpu->iem.s.enmEffOpSize)
7244 {
7245 case IEMMODE_16BIT:
7246 IEM_MC_BEGIN(3, 3);
7247 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7248 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7249 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7250 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7252
7253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7254 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7255 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7256 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7257 IEM_MC_FETCH_EFLAGS(EFlags);
7258 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7259 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7260 else
7261 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7262
7263 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7264 IEM_MC_COMMIT_EFLAGS(EFlags);
7265 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7266 IEM_MC_ADVANCE_RIP();
7267 IEM_MC_END();
7268 return VINF_SUCCESS;
7269
7270 case IEMMODE_32BIT:
7271 IEM_MC_BEGIN(3, 3);
7272 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7273 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7274 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7275 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7277
7278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7279 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7280 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7281 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7282 IEM_MC_FETCH_EFLAGS(EFlags);
7283 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7284 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7285 else
7286 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7287
7288 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7289 IEM_MC_COMMIT_EFLAGS(EFlags);
7290 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7291 IEM_MC_ADVANCE_RIP();
7292 IEM_MC_END();
7293 return VINF_SUCCESS;
7294
7295 case IEMMODE_64BIT:
7296 IEM_MC_BEGIN(3, 3);
7297 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7298 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7299 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7300 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7302
7303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7304 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7305 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7306 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7307 IEM_MC_FETCH_EFLAGS(EFlags);
7308 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7309 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7310 else
7311 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7312
7313 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7314 IEM_MC_COMMIT_EFLAGS(EFlags);
7315 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7316 IEM_MC_ADVANCE_RIP();
7317 IEM_MC_END();
7318 return VINF_SUCCESS;
7319
7320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7321 }
7322 }
7323}
7324
7325
7326/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
7327FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
7328/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
7329FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
7330/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
7331FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
7332/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
7333FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
7334
7335
7336/** Opcode 0x0f 0xc3. */
7337FNIEMOP_DEF(iemOp_movnti_My_Gy)
7338{
7339 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7340
7341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7342
7343 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7344 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7345 {
7346 switch (pVCpu->iem.s.enmEffOpSize)
7347 {
7348 case IEMMODE_32BIT:
7349 IEM_MC_BEGIN(0, 2);
7350 IEM_MC_LOCAL(uint32_t, u32Value);
7351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7352
7353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7355 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7356 return IEMOP_RAISE_INVALID_OPCODE();
7357
7358 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7359 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7360 IEM_MC_ADVANCE_RIP();
7361 IEM_MC_END();
7362 break;
7363
7364 case IEMMODE_64BIT:
7365 IEM_MC_BEGIN(0, 2);
7366 IEM_MC_LOCAL(uint64_t, u64Value);
7367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7368
7369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7371 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7372 return IEMOP_RAISE_INVALID_OPCODE();
7373
7374 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7375 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7376 IEM_MC_ADVANCE_RIP();
7377 IEM_MC_END();
7378 break;
7379
7380 case IEMMODE_16BIT:
7381 /** @todo check this form. */
7382 return IEMOP_RAISE_INVALID_OPCODE();
7383 }
7384 }
7385 else
7386 return IEMOP_RAISE_INVALID_OPCODE();
7387 return VINF_SUCCESS;
7388}
7389/* Opcode 0x66 0x0f 0xc3 - invalid */
7390/* Opcode 0xf3 0x0f 0xc3 - invalid */
7391/* Opcode 0xf2 0x0f 0xc3 - invalid */
7392
7393/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
7394FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7395/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
7396FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
7397/* Opcode 0xf3 0x0f 0xc4 - invalid */
7398/* Opcode 0xf2 0x0f 0xc4 - invalid */
7399
7400/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7401FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7402/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
7403FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
7404/* Opcode 0xf3 0x0f 0xc5 - invalid */
7405/* Opcode 0xf2 0x0f 0xc5 - invalid */
7406
7407/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
7408FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
7409/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
7410FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
7411/* Opcode 0xf3 0x0f 0xc6 - invalid */
7412/* Opcode 0xf2 0x0f 0xc6 - invalid */
7413
7414
7415/** Opcode 0x0f 0xc7 !11/1. */
7416FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7417{
7418 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7419
7420 IEM_MC_BEGIN(4, 3);
7421 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7422 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7423 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7424 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7425 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7426 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7428
7429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7430 IEMOP_HLP_DONE_DECODING();
7431 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7432
7433 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7434 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7435 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7436
7437 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7438 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7439 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7440
7441 IEM_MC_FETCH_EFLAGS(EFlags);
7442 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7443 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7444 else
7445 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7446
7447 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7448 IEM_MC_COMMIT_EFLAGS(EFlags);
7449 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7450 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7451 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7452 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7453 IEM_MC_ENDIF();
7454 IEM_MC_ADVANCE_RIP();
7455
7456 IEM_MC_END();
7457 return VINF_SUCCESS;
7458}
7459
7460
7461/** Opcode REX.W 0x0f 0xc7 !11/1. */
7462FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7463{
7464 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7465 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7466 {
7467#if 0
7468 RT_NOREF(bRm);
7469 IEMOP_BITCH_ABOUT_STUB();
7470 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7471#else
7472 IEM_MC_BEGIN(4, 3);
7473 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7474 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7475 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7476 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7477 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7478 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7480
7481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7482 IEMOP_HLP_DONE_DECODING();
7483 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7484 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7485
7486 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7487 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7488 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7489
7490 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7491 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7492 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7493
7494 IEM_MC_FETCH_EFLAGS(EFlags);
7495# ifdef RT_ARCH_AMD64
7496 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7497 {
7498 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7499 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7500 else
7501 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7502 }
7503 else
7504# endif
7505 {
7506 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7507 accesses and not all all atomic, which works fine on in UNI CPU guest
7508 configuration (ignoring DMA). If guest SMP is active we have no choice
7509 but to use a rendezvous callback here. Sigh. */
7510 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7511 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7512 else
7513 {
7514 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7515 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7516 }
7517 }
7518
7519 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7520 IEM_MC_COMMIT_EFLAGS(EFlags);
7521 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7522 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7523 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7524 IEM_MC_ENDIF();
7525 IEM_MC_ADVANCE_RIP();
7526
7527 IEM_MC_END();
7528 return VINF_SUCCESS;
7529#endif
7530 }
7531 Log(("cmpxchg16b -> #UD\n"));
7532 return IEMOP_RAISE_INVALID_OPCODE();
7533}
7534
7535FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
7536{
7537 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7538 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7539 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7540}
7541
7542/** Opcode 0x0f 0xc7 11/6. */
7543FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7544
7545/** Opcode 0x0f 0xc7 !11/6. */
7546FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7547
7548/** Opcode 0x66 0x0f 0xc7 !11/6. */
7549FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7550
7551/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7552FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7553
7554/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7555FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7556
7557/** Opcode 0x0f 0xc7 11/7. */
7558FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
7559
7560
7561/**
7562 * Group 9 jump table for register variant.
7563 */
7564IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
7565{ /* pfx: none, 066h, 0f3h, 0f2h */
7566 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7567 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
7568 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7569 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7570 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7571 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7572 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7573 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7574};
7575AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
7576
7577
7578/**
7579 * Group 9 jump table for memory variant.
7580 */
7581IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
7582{ /* pfx: none, 066h, 0f3h, 0f2h */
7583 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7584 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
7585 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7586 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7587 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7588 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7589 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
7590 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7591};
7592AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
7593
7594
7595/** Opcode 0x0f 0xc7. */
7596FNIEMOP_DEF(iemOp_Grp9)
7597{
7598 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7599 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7600 /* register, register */
7601 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7602 + pVCpu->iem.s.idxPrefix], bRm);
7603 /* memory, register */
7604 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7605 + pVCpu->iem.s.idxPrefix], bRm);
7606}
7607
7608
7609/**
7610 * Common 'bswap register' helper.
7611 */
7612FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7613{
7614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7615 switch (pVCpu->iem.s.enmEffOpSize)
7616 {
7617 case IEMMODE_16BIT:
7618 IEM_MC_BEGIN(1, 0);
7619 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7620 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7621 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7622 IEM_MC_ADVANCE_RIP();
7623 IEM_MC_END();
7624 return VINF_SUCCESS;
7625
7626 case IEMMODE_32BIT:
7627 IEM_MC_BEGIN(1, 0);
7628 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7629 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7630 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7631 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7632 IEM_MC_ADVANCE_RIP();
7633 IEM_MC_END();
7634 return VINF_SUCCESS;
7635
7636 case IEMMODE_64BIT:
7637 IEM_MC_BEGIN(1, 0);
7638 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7639 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7640 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7641 IEM_MC_ADVANCE_RIP();
7642 IEM_MC_END();
7643 return VINF_SUCCESS;
7644
7645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7646 }
7647}
7648
7649
7650/** Opcode 0x0f 0xc8. */
7651FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7652{
7653 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7654 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7655 prefix. REX.B is the correct prefix it appears. For a parallel
7656 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7657 IEMOP_HLP_MIN_486();
7658 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7659}
7660
7661
7662/** Opcode 0x0f 0xc9. */
7663FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7664{
7665 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7666 IEMOP_HLP_MIN_486();
7667 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7668}
7669
7670
7671/** Opcode 0x0f 0xca. */
7672FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7673{
7674 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7675 IEMOP_HLP_MIN_486();
7676 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7677}
7678
7679
7680/** Opcode 0x0f 0xcb. */
7681FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7682{
7683 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7684 IEMOP_HLP_MIN_486();
7685 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7686}
7687
7688
7689/** Opcode 0x0f 0xcc. */
7690FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7691{
7692 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7693 IEMOP_HLP_MIN_486();
7694 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7695}
7696
7697
7698/** Opcode 0x0f 0xcd. */
7699FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7700{
7701 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7702 IEMOP_HLP_MIN_486();
7703 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7704}
7705
7706
7707/** Opcode 0x0f 0xce. */
7708FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7709{
7710 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7711 IEMOP_HLP_MIN_486();
7712 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7713}
7714
7715
7716/** Opcode 0x0f 0xcf. */
7717FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7718{
7719 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7720 IEMOP_HLP_MIN_486();
7721 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7722}
7723
7724
7725/* Opcode 0x0f 0xd0 - invalid */
7726/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
7727FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
7728/* Opcode 0xf3 0x0f 0xd0 - invalid */
7729/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
7730FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
7731
7732/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7733FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7734/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
7735FNIEMOP_STUB(iemOp_psrlw_Vx_W);
7736/* Opcode 0xf3 0x0f 0xd1 - invalid */
7737/* Opcode 0xf2 0x0f 0xd1 - invalid */
7738
7739/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7740FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7741/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
7742FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
7743/* Opcode 0xf3 0x0f 0xd2 - invalid */
7744/* Opcode 0xf2 0x0f 0xd2 - invalid */
7745
7746/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7747FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7748/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
7749FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
7750/* Opcode 0xf3 0x0f 0xd3 - invalid */
7751/* Opcode 0xf2 0x0f 0xd3 - invalid */
7752
7753/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7754FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7755/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
7756FNIEMOP_STUB(iemOp_paddq_Vx_W);
7757/* Opcode 0xf3 0x0f 0xd4 - invalid */
7758/* Opcode 0xf2 0x0f 0xd4 - invalid */
7759
7760/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7761FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7762/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
7763FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
7764/* Opcode 0xf3 0x0f 0xd5 - invalid */
7765/* Opcode 0xf2 0x0f 0xd5 - invalid */
7766
7767/* Opcode 0x0f 0xd6 - invalid */
7768
7769/**
7770 * @opcode 0xd6
7771 * @oppfx 0x66
7772 * @opcpuid sse2
7773 * @opgroup og_sse2_pcksclr_datamove
7774 * @opxcpttype none
7775 * @optest op1=-1 op2=2 -> op1=2
7776 * @optest op1=0 op2=-42 -> op1=-42
7777 */
7778FNIEMOP_DEF(iemOp_movq_Wq_Vq)
7779{
7780 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
7781 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7782 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7783 {
7784 /*
7785 * Register, register.
7786 */
7787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7788 IEM_MC_BEGIN(0, 2);
7789 IEM_MC_LOCAL(uint64_t, uSrc);
7790
7791 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7792 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7793
7794 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7795 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
7796
7797 IEM_MC_ADVANCE_RIP();
7798 IEM_MC_END();
7799 }
7800 else
7801 {
7802 /*
7803 * Memory, register.
7804 */
7805 IEM_MC_BEGIN(0, 2);
7806 IEM_MC_LOCAL(uint64_t, uSrc);
7807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7808
7809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7811 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7812 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7813
7814 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7815 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7816
7817 IEM_MC_ADVANCE_RIP();
7818 IEM_MC_END();
7819 }
7820 return VINF_SUCCESS;
7821}
7822
7823
7824/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7825FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7826/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7827FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7828#if 0
7829FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7830{
7831 /* Docs says register only. */
7832 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7833
7834 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7835 {
7836 case IEM_OP_PRF_SIZE_OP: /* SSE */
7837 I E M O P _ M N E M O N I C(movq_Wq_Vq, "movq Wq,Vq");
7838 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7839 IEM_MC_BEGIN(2, 0);
7840 IEM_MC_ARG(uint64_t *, pDst, 0);
7841 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
7842 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7843 IEM_MC_PREPARE_SSE_USAGE();
7844 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7845 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7846 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7847 IEM_MC_ADVANCE_RIP();
7848 IEM_MC_END();
7849 return VINF_SUCCESS;
7850
7851 case 0: /* MMX */
7852 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7853 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7854 IEM_MC_BEGIN(2, 0);
7855 IEM_MC_ARG(uint64_t *, pDst, 0);
7856 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7857 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7858 IEM_MC_PREPARE_FPU_USAGE();
7859 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7860 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7861 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7862 IEM_MC_ADVANCE_RIP();
7863 IEM_MC_END();
7864 return VINF_SUCCESS;
7865
7866 default:
7867 return IEMOP_RAISE_INVALID_OPCODE();
7868 }
7869}
7870#endif
7871
7872
7873/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7874FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7875{
7876 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7877 /** @todo testcase: Check that the instruction implicitly clears the high
7878 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7879 * and opcode modifications are made to work with the whole width (not
7880 * just 128). */
7881 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
7882 /* Docs says register only. */
7883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7884 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7885 {
7886 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7887 IEM_MC_BEGIN(2, 0);
7888 IEM_MC_ARG(uint64_t *, pDst, 0);
7889 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7890 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7891 IEM_MC_PREPARE_FPU_USAGE();
7892 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7893 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7894 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7895 IEM_MC_ADVANCE_RIP();
7896 IEM_MC_END();
7897 return VINF_SUCCESS;
7898 }
7899 return IEMOP_RAISE_INVALID_OPCODE();
7900}
7901
7902/** Opcode 0x66 0x0f 0xd7 - */
7903FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
7904{
7905 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7906 /** @todo testcase: Check that the instruction implicitly clears the high
7907 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7908 * and opcode modifications are made to work with the whole width (not
7909 * just 128). */
7910 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
7911 /* Docs says register only. */
7912 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7913 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7914 {
7915 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7916 IEM_MC_BEGIN(2, 0);
7917 IEM_MC_ARG(uint64_t *, pDst, 0);
7918 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
7919 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7920 IEM_MC_PREPARE_SSE_USAGE();
7921 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7922 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7923 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7924 IEM_MC_ADVANCE_RIP();
7925 IEM_MC_END();
7926 return VINF_SUCCESS;
7927 }
7928 return IEMOP_RAISE_INVALID_OPCODE();
7929}
7930
7931/* Opcode 0xf3 0x0f 0xd7 - invalid */
7932/* Opcode 0xf2 0x0f 0xd7 - invalid */
7933
7934
7935/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7936FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7937/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
7938FNIEMOP_STUB(iemOp_psubusb_Vx_W);
7939/* Opcode 0xf3 0x0f 0xd8 - invalid */
7940/* Opcode 0xf2 0x0f 0xd8 - invalid */
7941
7942/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7943FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7944/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
7945FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
7946/* Opcode 0xf3 0x0f 0xd9 - invalid */
7947/* Opcode 0xf2 0x0f 0xd9 - invalid */
7948
7949/** Opcode 0x0f 0xda - pminub Pq, Qq */
7950FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7951/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
7952FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
7953/* Opcode 0xf3 0x0f 0xda - invalid */
7954/* Opcode 0xf2 0x0f 0xda - invalid */
7955
7956/** Opcode 0x0f 0xdb - pand Pq, Qq */
7957FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7958/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
7959FNIEMOP_STUB(iemOp_pand_Vx_W);
7960/* Opcode 0xf3 0x0f 0xdb - invalid */
7961/* Opcode 0xf2 0x0f 0xdb - invalid */
7962
7963/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7964FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7965/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
7966FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
7967/* Opcode 0xf3 0x0f 0xdc - invalid */
7968/* Opcode 0xf2 0x0f 0xdc - invalid */
7969
7970/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7971FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7972/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
7973FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
7974/* Opcode 0xf3 0x0f 0xdd - invalid */
7975/* Opcode 0xf2 0x0f 0xdd - invalid */
7976
7977/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7978FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7979/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
7980FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
7981/* Opcode 0xf3 0x0f 0xde - invalid */
7982/* Opcode 0xf2 0x0f 0xde - invalid */
7983
7984/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7985FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7986/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
7987FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
7988/* Opcode 0xf3 0x0f 0xdf - invalid */
7989/* Opcode 0xf2 0x0f 0xdf - invalid */
7990
7991/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7992FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7993/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
7994FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
7995/* Opcode 0xf3 0x0f 0xe0 - invalid */
7996/* Opcode 0xf2 0x0f 0xe0 - invalid */
7997
7998/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7999FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8000/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8001FNIEMOP_STUB(iemOp_psraw_Vx_W);
8002/* Opcode 0xf3 0x0f 0xe1 - invalid */
8003/* Opcode 0xf2 0x0f 0xe1 - invalid */
8004
8005/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8006FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8007/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8008FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8009/* Opcode 0xf3 0x0f 0xe2 - invalid */
8010/* Opcode 0xf2 0x0f 0xe2 - invalid */
8011
8012/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8013FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8014/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8015FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8016/* Opcode 0xf3 0x0f 0xe3 - invalid */
8017/* Opcode 0xf2 0x0f 0xe3 - invalid */
8018
8019/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8020FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8021/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8022FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8023/* Opcode 0xf3 0x0f 0xe4 - invalid */
8024/* Opcode 0xf2 0x0f 0xe4 - invalid */
8025
8026/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8027FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8028/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8029FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8030/* Opcode 0xf3 0x0f 0xe5 - invalid */
8031/* Opcode 0xf2 0x0f 0xe5 - invalid */
8032
8033/* Opcode 0x0f 0xe6 - invalid */
8034/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8035FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8036/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8037FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8038/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8039FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8040
8041
8042/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8043FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8044{
8045 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8046 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8047 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8048 {
8049 /* Register, memory. */
8050 IEM_MC_BEGIN(0, 2);
8051 IEM_MC_LOCAL(uint64_t, uSrc);
8052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8053
8054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8056 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8057 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8058
8059 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8060 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8061
8062 IEM_MC_ADVANCE_RIP();
8063 IEM_MC_END();
8064 return VINF_SUCCESS;
8065 }
8066 /* The register, register encoding is invalid. */
8067 return IEMOP_RAISE_INVALID_OPCODE();
8068}
8069
8070/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
8071FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
8072{
8073 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8074 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8075 {
8076 /* Register, memory. */
8077 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
8078 IEM_MC_BEGIN(0, 2);
8079 IEM_MC_LOCAL(RTUINT128U, uSrc);
8080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8081
8082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8084 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8085 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8086
8087 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8088 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8089
8090 IEM_MC_ADVANCE_RIP();
8091 IEM_MC_END();
8092 return VINF_SUCCESS;
8093 }
8094
8095 /* The register, register encoding is invalid. */
8096 return IEMOP_RAISE_INVALID_OPCODE();
8097}
8098
8099/* Opcode 0xf3 0x0f 0xe7 - invalid */
8100/* Opcode 0xf2 0x0f 0xe7 - invalid */
8101
8102
8103/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8104FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8105/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8106FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8107/* Opcode 0xf3 0x0f 0xe8 - invalid */
8108/* Opcode 0xf2 0x0f 0xe8 - invalid */
8109
8110/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8111FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8112/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8113FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8114/* Opcode 0xf3 0x0f 0xe9 - invalid */
8115/* Opcode 0xf2 0x0f 0xe9 - invalid */
8116
8117/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8118FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8119/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8120FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8121/* Opcode 0xf3 0x0f 0xea - invalid */
8122/* Opcode 0xf2 0x0f 0xea - invalid */
8123
8124/** Opcode 0x0f 0xeb - por Pq, Qq */
8125FNIEMOP_STUB(iemOp_por_Pq_Qq);
8126/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8127FNIEMOP_STUB(iemOp_por_Vx_W);
8128/* Opcode 0xf3 0x0f 0xeb - invalid */
8129/* Opcode 0xf2 0x0f 0xeb - invalid */
8130
8131/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8132FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8133/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8134FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8135/* Opcode 0xf3 0x0f 0xec - invalid */
8136/* Opcode 0xf2 0x0f 0xec - invalid */
8137
8138/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8139FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8140/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8141FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8142/* Opcode 0xf3 0x0f 0xed - invalid */
8143/* Opcode 0xf2 0x0f 0xed - invalid */
8144
8145/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8146FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8147/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8148FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8149/* Opcode 0xf3 0x0f 0xee - invalid */
8150/* Opcode 0xf2 0x0f 0xee - invalid */
8151
8152
8153/** Opcode 0x0f 0xef - pxor Pq, Qq */
8154FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8155{
8156 IEMOP_MNEMONIC(pxor, "pxor");
8157 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8158}
8159
8160/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8161FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8162{
8163 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8164 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8165}
8166
8167/* Opcode 0xf3 0x0f 0xef - invalid */
8168/* Opcode 0xf2 0x0f 0xef - invalid */
8169
8170/* Opcode 0x0f 0xf0 - invalid */
8171/* Opcode 0x66 0x0f 0xf0 - invalid */
8172/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8173FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8174
8175/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8176FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8177/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8178FNIEMOP_STUB(iemOp_psllw_Vx_W);
8179/* Opcode 0xf2 0x0f 0xf1 - invalid */
8180
8181/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8182FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8183/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8184FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8185/* Opcode 0xf2 0x0f 0xf2 - invalid */
8186
8187/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8188FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8189/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8190FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8191/* Opcode 0xf2 0x0f 0xf3 - invalid */
8192
8193/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8194FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8195/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8196FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8197/* Opcode 0xf2 0x0f 0xf4 - invalid */
8198
8199/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8200FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8201/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
8202FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
8203/* Opcode 0xf2 0x0f 0xf5 - invalid */
8204
8205/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8206FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8207/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
8208FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
8209/* Opcode 0xf2 0x0f 0xf6 - invalid */
8210
8211/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8212FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8213/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
8214FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
8215/* Opcode 0xf2 0x0f 0xf7 - invalid */
8216
8217/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8218FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8219/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
8220FNIEMOP_STUB(iemOp_psubb_Vx_W);
8221/* Opcode 0xf2 0x0f 0xf8 - invalid */
8222
8223/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8224FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8225/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
8226FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
8227/* Opcode 0xf2 0x0f 0xf9 - invalid */
8228
8229/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8230FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8231/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
8232FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
8233/* Opcode 0xf2 0x0f 0xfa - invalid */
8234
8235/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8236FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8237/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
8238FNIEMOP_STUB(iemOp_psubq_Vx_W);
8239/* Opcode 0xf2 0x0f 0xfb - invalid */
8240
8241/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8242FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8243/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
8244FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
8245/* Opcode 0xf2 0x0f 0xfc - invalid */
8246
8247/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8248FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8249/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
8250FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
8251/* Opcode 0xf2 0x0f 0xfd - invalid */
8252
8253/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8254FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8255/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
8256FNIEMOP_STUB(iemOp_paddd_Vx_W);
8257/* Opcode 0xf2 0x0f 0xfe - invalid */
8258
8259
8260/** Opcode **** 0x0f 0xff - UD0 */
8261FNIEMOP_DEF(iemOp_ud0)
8262{
8263 IEMOP_MNEMONIC(ud0, "ud0");
8264 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8265 {
8266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8267#ifndef TST_IEM_CHECK_MC
8268 RTGCPTR GCPtrEff;
8269 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8270 if (rcStrict != VINF_SUCCESS)
8271 return rcStrict;
8272#endif
8273 IEMOP_HLP_DONE_DECODING();
8274 }
8275 return IEMOP_RAISE_INVALID_OPCODE();
8276}
8277
8278
8279
8280/**
8281 * Two byte opcode map, first byte 0x0f.
8282 *
8283 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
8284 * check if it needs updating as well when making changes.
8285 */
8286IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8287{
8288 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8289 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8290 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8291 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8292 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8293 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8294 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8295 /* 0x06 */ IEMOP_X4(iemOp_clts),
8296 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8297 /* 0x08 */ IEMOP_X4(iemOp_invd),
8298 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8299 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8300 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8301 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8302 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8303 /* 0x0e */ IEMOP_X4(iemOp_femms),
8304 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8305
8306 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vx_Wsd,
8307 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
8308 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
8309 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8310 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8311 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8312 /* 0x16 */ iemOp_movhpsv1_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpdv1_Vdq_Mq, iemOp_movshdup_Vx_Wx, iemOp_InvalidNeedRM,
8313 /* 0x17 */ iemOp_movhpsv1_Mq_Vq, iemOp_movhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8314 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8315 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8316 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8317 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8318 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8319 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8320 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8321 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8322
8323 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8324 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8325 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8326 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8327 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8328 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8329 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8330 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8331 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8332 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8333 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
8334 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8335 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
8336 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
8337 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8338 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8339
8340 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8341 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8342 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8343 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8344 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8345 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8346 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8347 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8348 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
8349 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8350 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
8351 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8352 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8353 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8354 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8355 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8356
8357 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8358 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8359 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8360 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8361 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8362 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8363 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8364 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8365 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8366 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8367 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8368 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8369 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8370 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8371 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8372 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8373
8374 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8375 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
8376 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
8377 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
8378 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8379 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8380 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8381 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8382 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
8383 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
8384 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
8385 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8386 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
8387 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
8388 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
8389 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
8390
8391 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8392 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8393 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8394 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8395 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8396 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8397 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8398 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8399 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8400 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8401 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8402 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8403 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8404 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8405 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8406 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vx_Wx, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM,
8407
8408 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
8409 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8410 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8411 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8412 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8413 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8414 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8415 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8416
8417 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8418 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8419 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8420 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8421 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
8422 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
8423 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
8424 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
8425
8426 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8427 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8428 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8429 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8430 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8431 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8432 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8433 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8434 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8435 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8436 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8437 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8438 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8439 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8440 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8441 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8442
8443 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8444 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8445 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8446 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8447 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8448 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8449 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8450 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8451 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8452 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8453 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8454 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8455 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8456 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8457 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8458 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8459
8460 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8461 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8462 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8463 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8464 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8465 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8466 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8467 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8468 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8469 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8470 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8471 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8472 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8473 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8474 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8475 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8476
8477 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8478 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8479 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8480 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8481 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8482 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8483 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8484 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8485 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8486 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8487 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8488 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8489 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8490 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8491 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8492 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8493
8494 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8495 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8496 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
8497 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8498 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8499 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8500 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8501 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8502 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8503 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8504 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8505 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8506 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8507 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8508 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8509 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8510
8511 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
8512 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8513 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8514 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8515 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8516 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8517 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8518 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8519 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8520 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8521 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8522 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8523 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8524 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8525 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8526 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8527
8528 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8529 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8530 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8531 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8532 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8533 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8534 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
8535 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8536 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8537 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8538 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8539 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8540 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8541 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8542 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8543 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8544
8545 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
8546 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8547 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8548 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8549 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8550 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8551 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8552 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8553 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8554 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8555 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8556 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8557 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8558 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8559 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8560 /* 0xff */ IEMOP_X4(iemOp_ud0),
8561};
8562AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8563
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette