VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66746

Last change on this file since 66746 was 66746, checked in by vboxsync, 8 years ago

IEM: Implemented movsd Vsd,Wsd (0xf2 0x0f 0x10).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 309.9 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66746 2017-05-02 11:46:46Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.215389.xyz. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
38 switch (pVCpu->iem.s.enmEffOpSize)
39 {
40 case IEMMODE_16BIT:
41 IEM_MC_BEGIN(0, 1);
42 IEM_MC_LOCAL(uint16_t, u16Ldtr);
43 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
44 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
45 IEM_MC_ADVANCE_RIP();
46 IEM_MC_END();
47 break;
48
49 case IEMMODE_32BIT:
50 IEM_MC_BEGIN(0, 1);
51 IEM_MC_LOCAL(uint32_t, u32Ldtr);
52 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
53 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
54 IEM_MC_ADVANCE_RIP();
55 IEM_MC_END();
56 break;
57
58 case IEMMODE_64BIT:
59 IEM_MC_BEGIN(0, 1);
60 IEM_MC_LOCAL(uint64_t, u64Ldtr);
61 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
62 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 break;
66
67 IEM_NOT_REACHED_DEFAULT_CASE_RET();
68 }
69 }
70 else
71 {
72 IEM_MC_BEGIN(0, 2);
73 IEM_MC_LOCAL(uint16_t, u16Ldtr);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
76 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
77 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
78 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
79 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/** Opcode 0x0f 0x00 /1. */
88FNIEMOPRM_DEF(iemOp_Grp6_str)
89{
90 IEMOP_MNEMONIC(str, "str Rv/Mw");
91 IEMOP_HLP_MIN_286();
92 IEMOP_HLP_NO_REAL_OR_V86_MODE();
93
94 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
95 {
96 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
97 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
98 switch (pVCpu->iem.s.enmEffOpSize)
99 {
100 case IEMMODE_16BIT:
101 IEM_MC_BEGIN(0, 1);
102 IEM_MC_LOCAL(uint16_t, u16Tr);
103 IEM_MC_FETCH_TR_U16(u16Tr);
104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
105 IEM_MC_ADVANCE_RIP();
106 IEM_MC_END();
107 break;
108
109 case IEMMODE_32BIT:
110 IEM_MC_BEGIN(0, 1);
111 IEM_MC_LOCAL(uint32_t, u32Tr);
112 IEM_MC_FETCH_TR_U32(u32Tr);
113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
114 IEM_MC_ADVANCE_RIP();
115 IEM_MC_END();
116 break;
117
118 case IEMMODE_64BIT:
119 IEM_MC_BEGIN(0, 1);
120 IEM_MC_LOCAL(uint64_t, u64Tr);
121 IEM_MC_FETCH_TR_U64(u64Tr);
122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 break;
126
127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
128 }
129 }
130 else
131 {
132 IEM_MC_BEGIN(0, 2);
133 IEM_MC_LOCAL(uint16_t, u16Tr);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
136 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
137 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
138 IEM_MC_FETCH_TR_U16(u16Tr);
139 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
140 IEM_MC_ADVANCE_RIP();
141 IEM_MC_END();
142 }
143 return VINF_SUCCESS;
144}
145
146
147/** Opcode 0x0f 0x00 /2. */
148FNIEMOPRM_DEF(iemOp_Grp6_lldt)
149{
150 IEMOP_MNEMONIC(lldt, "lldt Ew");
151 IEMOP_HLP_MIN_286();
152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
153
154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
155 {
156 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
157 IEM_MC_BEGIN(1, 0);
158 IEM_MC_ARG(uint16_t, u16Sel, 0);
159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
161 IEM_MC_END();
162 }
163 else
164 {
165 IEM_MC_BEGIN(1, 1);
166 IEM_MC_ARG(uint16_t, u16Sel, 0);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
169 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
170 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
171 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
172 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
173 IEM_MC_END();
174 }
175 return VINF_SUCCESS;
176}
177
178
179/** Opcode 0x0f 0x00 /3. */
180FNIEMOPRM_DEF(iemOp_Grp6_ltr)
181{
182 IEMOP_MNEMONIC(ltr, "ltr Ew");
183 IEMOP_HLP_MIN_286();
184 IEMOP_HLP_NO_REAL_OR_V86_MODE();
185
186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
187 {
188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
189 IEM_MC_BEGIN(1, 0);
190 IEM_MC_ARG(uint16_t, u16Sel, 0);
191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
192 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
193 IEM_MC_END();
194 }
195 else
196 {
197 IEM_MC_BEGIN(1, 1);
198 IEM_MC_ARG(uint16_t, u16Sel, 0);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
202 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
203 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
204 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
205 IEM_MC_END();
206 }
207 return VINF_SUCCESS;
208}
209
210
211/** Opcode 0x0f 0x00 /3. */
212FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
213{
214 IEMOP_HLP_MIN_286();
215 IEMOP_HLP_NO_REAL_OR_V86_MODE();
216
217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
218 {
219 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
220 IEM_MC_BEGIN(2, 0);
221 IEM_MC_ARG(uint16_t, u16Sel, 0);
222 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
223 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
224 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
225 IEM_MC_END();
226 }
227 else
228 {
229 IEM_MC_BEGIN(2, 1);
230 IEM_MC_ARG(uint16_t, u16Sel, 0);
231 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
234 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
235 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
236 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
237 IEM_MC_END();
238 }
239 return VINF_SUCCESS;
240}
241
242
243/** Opcode 0x0f 0x00 /4. */
244FNIEMOPRM_DEF(iemOp_Grp6_verr)
245{
246 IEMOP_MNEMONIC(verr, "verr Ew");
247 IEMOP_HLP_MIN_286();
248 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
249}
250
251
252/** Opcode 0x0f 0x00 /5. */
253FNIEMOPRM_DEF(iemOp_Grp6_verw)
254{
255 IEMOP_MNEMONIC(verw, "verw Ew");
256 IEMOP_HLP_MIN_286();
257 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
258}
259
260
261/**
262 * Group 6 jump table.
263 */
264IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
265{
266 iemOp_Grp6_sldt,
267 iemOp_Grp6_str,
268 iemOp_Grp6_lldt,
269 iemOp_Grp6_ltr,
270 iemOp_Grp6_verr,
271 iemOp_Grp6_verw,
272 iemOp_InvalidWithRM,
273 iemOp_InvalidWithRM
274};
275
276/** Opcode 0x0f 0x00. */
277FNIEMOP_DEF(iemOp_Grp6)
278{
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
280 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
281}
282
283
284/** Opcode 0x0f 0x01 /0. */
285FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
286{
287 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
288 IEMOP_HLP_MIN_286();
289 IEMOP_HLP_64BIT_OP_SIZE();
290 IEM_MC_BEGIN(2, 1);
291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
292 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
295 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
296 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
297 IEM_MC_END();
298 return VINF_SUCCESS;
299}
300
301
302/** Opcode 0x0f 0x01 /0. */
303FNIEMOP_DEF(iemOp_Grp7_vmcall)
304{
305 IEMOP_BITCH_ABOUT_STUB();
306 return IEMOP_RAISE_INVALID_OPCODE();
307}
308
309
310/** Opcode 0x0f 0x01 /0. */
311FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
312{
313 IEMOP_BITCH_ABOUT_STUB();
314 return IEMOP_RAISE_INVALID_OPCODE();
315}
316
317
318/** Opcode 0x0f 0x01 /0. */
319FNIEMOP_DEF(iemOp_Grp7_vmresume)
320{
321 IEMOP_BITCH_ABOUT_STUB();
322 return IEMOP_RAISE_INVALID_OPCODE();
323}
324
325
326/** Opcode 0x0f 0x01 /0. */
327FNIEMOP_DEF(iemOp_Grp7_vmxoff)
328{
329 IEMOP_BITCH_ABOUT_STUB();
330 return IEMOP_RAISE_INVALID_OPCODE();
331}
332
333
334/** Opcode 0x0f 0x01 /1. */
335FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
336{
337 IEMOP_MNEMONIC(sidt, "sidt Ms");
338 IEMOP_HLP_MIN_286();
339 IEMOP_HLP_64BIT_OP_SIZE();
340 IEM_MC_BEGIN(2, 1);
341 IEM_MC_ARG(uint8_t, iEffSeg, 0);
342 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
345 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
346 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
347 IEM_MC_END();
348 return VINF_SUCCESS;
349}
350
351
352/** Opcode 0x0f 0x01 /1. */
353FNIEMOP_DEF(iemOp_Grp7_monitor)
354{
355 IEMOP_MNEMONIC(monitor, "monitor");
356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
357 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
358}
359
360
361/** Opcode 0x0f 0x01 /1. */
362FNIEMOP_DEF(iemOp_Grp7_mwait)
363{
364 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
367}
368
369
370/** Opcode 0x0f 0x01 /2. */
371FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
372{
373 IEMOP_MNEMONIC(lgdt, "lgdt");
374 IEMOP_HLP_64BIT_OP_SIZE();
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd0. */
389FNIEMOP_DEF(iemOp_Grp7_xgetbv)
390{
391 IEMOP_MNEMONIC(xgetbv, "xgetbv");
392 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
393 {
394 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
396 }
397 return IEMOP_RAISE_INVALID_OPCODE();
398}
399
400
401/** Opcode 0x0f 0x01 0xd1. */
402FNIEMOP_DEF(iemOp_Grp7_xsetbv)
403{
404 IEMOP_MNEMONIC(xsetbv, "xsetbv");
405 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
406 {
407 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
408 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
409 }
410 return IEMOP_RAISE_INVALID_OPCODE();
411}
412
413
414/** Opcode 0x0f 0x01 /3. */
415FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
416{
417 IEMOP_MNEMONIC(lidt, "lidt");
418 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
419 ? IEMMODE_64BIT
420 : pVCpu->iem.s.enmEffOpSize;
421 IEM_MC_BEGIN(3, 1);
422 IEM_MC_ARG(uint8_t, iEffSeg, 0);
423 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
424 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
428 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
429 IEM_MC_END();
430 return VINF_SUCCESS;
431}
432
433
434#ifdef VBOX_WITH_NESTED_HWVIRT
435/** Opcode 0x0f 0x01 0xd8. */
436FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
437{
438 IEMOP_MNEMONIC(vmrun, "vmrun");
439 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
440}
441
442/** Opcode 0x0f 0x01 0xd9. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
444{
445 IEMOP_MNEMONIC(vmmcall, "vmmcall");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
447}
448
449
450/** Opcode 0x0f 0x01 0xda. */
451FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
452{
453 IEMOP_MNEMONIC(vmload, "vmload");
454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
455}
456
457
458/** Opcode 0x0f 0x01 0xdb. */
459FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
460{
461 IEMOP_MNEMONIC(vmsave, "vmsave");
462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
463}
464
465
466/** Opcode 0x0f 0x01 0xdc. */
467FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
468{
469 IEMOP_MNEMONIC(stgi, "stgi");
470 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
471}
472
473
474/** Opcode 0x0f 0x01 0xdd. */
475FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
476{
477 IEMOP_MNEMONIC(clgi, "clgi");
478 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
479}
480
481
482/** Opcode 0x0f 0x01 0xdf. */
483FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
484{
485 IEMOP_MNEMONIC(invlpga, "invlpga");
486 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
487}
488
489
490/** Opcode 0x0f 0x01 0xde. */
491FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
492{
493 IEMOP_MNEMONIC(skinit, "skinit");
494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
495}
496#else
497/** Opcode 0x0f 0x01 0xd8. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
499
500/** Opcode 0x0f 0x01 0xd9. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
502
503/** Opcode 0x0f 0x01 0xda. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
505
506/** Opcode 0x0f 0x01 0xdb. */
507FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
508
509/** Opcode 0x0f 0x01 0xdc. */
510FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
511
512/** Opcode 0x0f 0x01 0xdd. */
513FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
514
515/** Opcode 0x0f 0x01 0xdf. */
516FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520#endif /* VBOX_WITH_NESTED_HWVIRT */
521
522/** Opcode 0x0f 0x01 /4. */
523FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
524{
525 IEMOP_MNEMONIC(smsw, "smsw");
526 IEMOP_HLP_MIN_286();
527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
528 {
529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
530 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
531 switch (pVCpu->iem.s.enmEffOpSize)
532 {
533 case IEMMODE_16BIT:
534 IEM_MC_BEGIN(0, 1);
535 IEM_MC_LOCAL(uint16_t, u16Tmp);
536 IEM_MC_FETCH_CR0_U16(u16Tmp);
537 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
538 { /* likely */ }
539 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
541 else
542 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
543 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
544 IEM_MC_ADVANCE_RIP();
545 IEM_MC_END();
546 return VINF_SUCCESS;
547
548 case IEMMODE_32BIT:
549 IEM_MC_BEGIN(0, 1);
550 IEM_MC_LOCAL(uint32_t, u32Tmp);
551 IEM_MC_FETCH_CR0_U32(u32Tmp);
552 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 return VINF_SUCCESS;
556
557 case IEMMODE_64BIT:
558 IEM_MC_BEGIN(0, 1);
559 IEM_MC_LOCAL(uint64_t, u64Tmp);
560 IEM_MC_FETCH_CR0_U64(u64Tmp);
561 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
562 IEM_MC_ADVANCE_RIP();
563 IEM_MC_END();
564 return VINF_SUCCESS;
565
566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
567 }
568 }
569 else
570 {
571 /* Ignore operand size here, memory refs are always 16-bit. */
572 IEM_MC_BEGIN(0, 2);
573 IEM_MC_LOCAL(uint16_t, u16Tmp);
574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
578 IEM_MC_FETCH_CR0_U16(u16Tmp);
579 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
580 { /* likely */ }
581 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
582 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
583 else
584 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
585 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
586 IEM_MC_ADVANCE_RIP();
587 IEM_MC_END();
588 return VINF_SUCCESS;
589 }
590}
591
592
593/** Opcode 0x0f 0x01 /6. */
594FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
595{
596 /* The operand size is effectively ignored, all is 16-bit and only the
597 lower 3-bits are used. */
598 IEMOP_MNEMONIC(lmsw, "lmsw");
599 IEMOP_HLP_MIN_286();
600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
601 {
602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
603 IEM_MC_BEGIN(1, 0);
604 IEM_MC_ARG(uint16_t, u16Tmp, 0);
605 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
606 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
607 IEM_MC_END();
608 }
609 else
610 {
611 IEM_MC_BEGIN(1, 1);
612 IEM_MC_ARG(uint16_t, u16Tmp, 0);
613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
617 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
618 IEM_MC_END();
619 }
620 return VINF_SUCCESS;
621}
622
623
624/** Opcode 0x0f 0x01 /7. */
625FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
626{
627 IEMOP_MNEMONIC(invlpg, "invlpg");
628 IEMOP_HLP_MIN_486();
629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
630 IEM_MC_BEGIN(1, 1);
631 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
633 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
634 IEM_MC_END();
635 return VINF_SUCCESS;
636}
637
638
639/** Opcode 0x0f 0x01 /7. */
640FNIEMOP_DEF(iemOp_Grp7_swapgs)
641{
642 IEMOP_MNEMONIC(swapgs, "swapgs");
643 IEMOP_HLP_ONLY_64BIT();
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
646}
647
648
649/** Opcode 0x0f 0x01 /7. */
650FNIEMOP_DEF(iemOp_Grp7_rdtscp)
651{
652 IEMOP_MNEMONIC(rdtscp, "rdtscp");
653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
654 /** @todo SVM intercept removal from here. */
655 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RDTSCP, SVM_EXIT_RDTSCP, 0, 0);
656 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
657}
658
659
660/**
661 * Group 7 jump table, memory variant.
662 */
663IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
664{
665 iemOp_Grp7_sgdt,
666 iemOp_Grp7_sidt,
667 iemOp_Grp7_lgdt,
668 iemOp_Grp7_lidt,
669 iemOp_Grp7_smsw,
670 iemOp_InvalidWithRM,
671 iemOp_Grp7_lmsw,
672 iemOp_Grp7_invlpg
673};
674
675
676/** Opcode 0x0f 0x01. */
677FNIEMOP_DEF(iemOp_Grp7)
678{
679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
680 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
681 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
682
683 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
684 {
685 case 0:
686 switch (bRm & X86_MODRM_RM_MASK)
687 {
688 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
689 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
690 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
691 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 1:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
700 }
701 return IEMOP_RAISE_INVALID_OPCODE();
702
703 case 2:
704 switch (bRm & X86_MODRM_RM_MASK)
705 {
706 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
707 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
708 }
709 return IEMOP_RAISE_INVALID_OPCODE();
710
711 case 3:
712 switch (bRm & X86_MODRM_RM_MASK)
713 {
714 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
715 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
716 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
717 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
718 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
719 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
720 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
721 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
723 }
724
725 case 4:
726 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
727
728 case 5:
729 return IEMOP_RAISE_INVALID_OPCODE();
730
731 case 6:
732 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
733
734 case 7:
735 switch (bRm & X86_MODRM_RM_MASK)
736 {
737 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
738 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
739 }
740 return IEMOP_RAISE_INVALID_OPCODE();
741
742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
743 }
744}
745
746/** Opcode 0x0f 0x00 /3. */
747FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
748{
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
751
752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
753 {
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 switch (pVCpu->iem.s.enmEffOpSize)
756 {
757 case IEMMODE_16BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 case IEMMODE_32BIT:
773 case IEMMODE_64BIT:
774 {
775 IEM_MC_BEGIN(3, 0);
776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
777 IEM_MC_ARG(uint16_t, u16Sel, 1);
778 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
779
780 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
781 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
782 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
783
784 IEM_MC_END();
785 return VINF_SUCCESS;
786 }
787
788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
789 }
790 }
791 else
792 {
793 switch (pVCpu->iem.s.enmEffOpSize)
794 {
795 case IEMMODE_16BIT:
796 {
797 IEM_MC_BEGIN(3, 1);
798 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
799 IEM_MC_ARG(uint16_t, u16Sel, 1);
800 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
802
803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
804 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
805
806 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
807 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
808 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
809
810 IEM_MC_END();
811 return VINF_SUCCESS;
812 }
813
814 case IEMMODE_32BIT:
815 case IEMMODE_64BIT:
816 {
817 IEM_MC_BEGIN(3, 1);
818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 1);
820 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
822
823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
824 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
825/** @todo testcase: make sure it's a 16-bit read. */
826
827 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
828 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
829 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
830
831 IEM_MC_END();
832 return VINF_SUCCESS;
833 }
834
835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
836 }
837 }
838}
839
840
841
842/** Opcode 0x0f 0x02. */
843FNIEMOP_DEF(iemOp_lar_Gv_Ew)
844{
845 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
846 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
847}
848
849
850/** Opcode 0x0f 0x03. */
851FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
852{
853 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
854 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
855}
856
857
858/** Opcode 0x0f 0x05. */
859FNIEMOP_DEF(iemOp_syscall)
860{
861 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
863 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
864}
865
866
867/** Opcode 0x0f 0x06. */
868FNIEMOP_DEF(iemOp_clts)
869{
870 IEMOP_MNEMONIC(clts, "clts");
871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
872 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
873}
874
875
876/** Opcode 0x0f 0x07. */
877FNIEMOP_DEF(iemOp_sysret)
878{
879 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
881 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
882}
883
884
885/** Opcode 0x0f 0x08. */
886FNIEMOP_DEF(iemOp_invd)
887{
888 IEMOP_MNEMONIC(invd, "invd");
889#ifdef VBOX_WITH_NESTED_HWVIRT
890 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
891 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
892#else
893 RT_NOREF_PV(pVCpu);
894#endif
895 /** @todo implement invd for the regular case (above only handles nested SVM
896 * exits). */
897 IEMOP_BITCH_ABOUT_STUB();
898 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
899}
900
901// IEMOP_HLP_MIN_486();
902
903
904/** Opcode 0x0f 0x09. */
905FNIEMOP_DEF(iemOp_wbinvd)
906{
907 IEMOP_MNEMONIC(wbinvd, "wbinvd");
908 IEMOP_HLP_MIN_486();
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
910 IEM_MC_BEGIN(0, 0);
911 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
912 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
913 IEM_MC_ADVANCE_RIP();
914 IEM_MC_END();
915 return VINF_SUCCESS; /* ignore for now */
916}
917
918
919/** Opcode 0x0f 0x0b. */
920FNIEMOP_DEF(iemOp_ud2)
921{
922 IEMOP_MNEMONIC(ud2, "ud2");
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926/** Opcode 0x0f 0x0d. */
927FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
928{
929 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
931 {
932 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
933 return IEMOP_RAISE_INVALID_OPCODE();
934 }
935
936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
938 {
939 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
940 return IEMOP_RAISE_INVALID_OPCODE();
941 }
942
943 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
944 {
945 case 2: /* Aliased to /0 for the time being. */
946 case 4: /* Aliased to /0 for the time being. */
947 case 5: /* Aliased to /0 for the time being. */
948 case 6: /* Aliased to /0 for the time being. */
949 case 7: /* Aliased to /0 for the time being. */
950 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
951 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
952 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
954 }
955
956 IEM_MC_BEGIN(0, 1);
957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
960 /* Currently a NOP. */
961 NOREF(GCPtrEffSrc);
962 IEM_MC_ADVANCE_RIP();
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x0e. */
969FNIEMOP_STUB(iemOp_femms);
970
971
972/** Opcode 0x0f 0x0f. */
973FNIEMOP_DEF(iemOp_3Dnow)
974{
975 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
976 {
977 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
978 return IEMOP_RAISE_INVALID_OPCODE();
979 }
980
981#ifdef IEM_WITH_3DNOW
982 /* This is pretty sparse, use switch instead of table. */
983 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
984 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
985#else
986 IEMOP_BITCH_ABOUT_STUB();
987 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
988#endif
989}
990
991
992/**
993 * @opcode 0x10
994 * @oppfx none
995 * @opcpuid sse
996 * @opgroup og_sse_simdfp_datamove
997 * @opxcpttype 4UA
998 * @optest op1=1 op2=2 -> op1=2
999 * @optest op1=0 op2=-22 -> op1=-22
1000 */
1001FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1002{
1003 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1006 {
1007 /*
1008 * Register, register.
1009 */
1010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1011 IEM_MC_BEGIN(0, 0);
1012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1013 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1014 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1015 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1016 IEM_MC_ADVANCE_RIP();
1017 IEM_MC_END();
1018 }
1019 else
1020 {
1021 /*
1022 * Memory, register.
1023 */
1024 IEM_MC_BEGIN(0, 2);
1025 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1027
1028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1032
1033 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1034 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1035
1036 IEM_MC_ADVANCE_RIP();
1037 IEM_MC_END();
1038 }
1039 return VINF_SUCCESS;
1040
1041}
1042
1043
1044/**
1045 * @opcode 0x10
1046 * @oppfx 0x66
1047 * @opcpuid sse2
1048 * @opgroup og_sse2_pcksclr_datamove
1049 * @opxcpttype 4UA
1050 * @optest op1=1 op2=2 -> op1=2
1051 * @optest op1=0 op2=-42 -> op1=-42
1052 */
1053FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1054{
1055 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1058 {
1059 /*
1060 * Register, register.
1061 */
1062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1063 IEM_MC_BEGIN(0, 0);
1064 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1066 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1067 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1068 IEM_MC_ADVANCE_RIP();
1069 IEM_MC_END();
1070 }
1071 else
1072 {
1073 /*
1074 * Memory, register.
1075 */
1076 IEM_MC_BEGIN(0, 2);
1077 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1079
1080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1083 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1084
1085 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1086 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1087
1088 IEM_MC_ADVANCE_RIP();
1089 IEM_MC_END();
1090 }
1091 return VINF_SUCCESS;
1092}
1093
1094
1095/**
1096 * @opcode 0x10
1097 * @oppfx 0xf3
1098 * @opcpuid sse
1099 * @opgroup og_sse_simdfp_datamove
1100 * @opxcpttype 5
1101 * @optest op1=1 op2=2 -> op1=2
1102 * @optest op1=0 op2=-22 -> op1=-22
1103 */
1104FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1105{
1106 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZxReg, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1109 {
1110 /*
1111 * Register, register.
1112 */
1113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1114 IEM_MC_BEGIN(0, 1);
1115 IEM_MC_LOCAL(uint32_t, uSrc);
1116
1117 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1118 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1119 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1120 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 else
1126 {
1127 /*
1128 * Memory, register.
1129 */
1130 IEM_MC_BEGIN(0, 2);
1131 IEM_MC_LOCAL(uint32_t, uSrc);
1132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1133
1134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1136 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1138
1139 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1140 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1141
1142 IEM_MC_ADVANCE_RIP();
1143 IEM_MC_END();
1144 }
1145 return VINF_SUCCESS;
1146}
1147
1148
1149/**
1150 * @opcode 0x10
1151 * @oppfx 0xf2
1152 * @opcpuid sse2
1153 * @opgroup og_sse2_pcksclr_datamove
1154 * @opxcpttype 5
1155 * @optest op1=1 op2=2 -> op1=2
1156 * @optest op1=0 op2=-42 -> op1=-42
1157 */
1158FNIEMOP_DEF(iemOp_movsd_Vx_Wsd)
1159{
1160 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZxReg, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1163 {
1164 /*
1165 * Register, register.
1166 */
1167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1168 IEM_MC_BEGIN(0, 1);
1169 IEM_MC_LOCAL(uint64_t, uSrc);
1170
1171 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1172 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1173 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1174 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1175
1176 IEM_MC_ADVANCE_RIP();
1177 IEM_MC_END();
1178 }
1179 else
1180 {
1181 /*
1182 * Memory, register.
1183 */
1184 IEM_MC_BEGIN(0, 2);
1185 IEM_MC_LOCAL(uint64_t, uSrc);
1186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1187
1188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1190 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1191 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1192
1193 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1194 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1195
1196 IEM_MC_ADVANCE_RIP();
1197 IEM_MC_END();
1198 }
1199 return VINF_SUCCESS;
1200}
1201
1202
1203/**
1204 * @opcode 0x11
1205 * @oppfx none
1206 * @opcpuid sse
1207 * @opgroup og_sse_simdfp_datamove
1208 * @opxcpttype 4UA
1209 * @optest op1=1 op2=2 -> op1=2
1210 * @optest op1=0 op2=-42 -> op1=-42
1211 */
1212FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1213{
1214 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1217 {
1218 /*
1219 * Register, register.
1220 */
1221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1222 IEM_MC_BEGIN(0, 0);
1223 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1224 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1225 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1226 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1227 IEM_MC_ADVANCE_RIP();
1228 IEM_MC_END();
1229 }
1230 else
1231 {
1232 /*
1233 * Memory, register.
1234 */
1235 IEM_MC_BEGIN(0, 2);
1236 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1238
1239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1243
1244 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1245 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1246
1247 IEM_MC_ADVANCE_RIP();
1248 IEM_MC_END();
1249 }
1250 return VINF_SUCCESS;
1251}
1252
1253
1254/**
1255 * @opcode 0x11
1256 * @oppfx 0x66
1257 * @opcpuid sse2
1258 * @opgroup og_sse2_pcksclr_datamove
1259 * @opxcpttype 4UA
1260 * @optest op1=1 op2=2 -> op1=2
1261 * @optest op1=0 op2=-42 -> op1=-42
1262 */
1263FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1264{
1265 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1268 {
1269 /*
1270 * Register, register.
1271 */
1272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1273 IEM_MC_BEGIN(0, 0);
1274 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1275 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1276 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1277 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1278 IEM_MC_ADVANCE_RIP();
1279 IEM_MC_END();
1280 }
1281 else
1282 {
1283 /*
1284 * Memory, register.
1285 */
1286 IEM_MC_BEGIN(0, 2);
1287 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1289
1290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1292 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1294
1295 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1296 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1297
1298 IEM_MC_ADVANCE_RIP();
1299 IEM_MC_END();
1300 }
1301 return VINF_SUCCESS;
1302}
1303
1304
1305/**
1306 * @opcode 0x11
1307 * @oppfx 0xf3
1308 * @opcpuid sse
1309 * @opgroup og_sse_simdfp_datamove
1310 * @opxcpttype 5
1311 * @optest op1=1 op2=2 -> op1=2
1312 * @optest op1=0 op2=-22 -> op1=-22
1313 */
1314FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1315{
1316 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1319 {
1320 /*
1321 * Register, register.
1322 */
1323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1324 IEM_MC_BEGIN(0, 1);
1325 IEM_MC_LOCAL(uint32_t, uSrc);
1326
1327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1328 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1329 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1330 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1331
1332 IEM_MC_ADVANCE_RIP();
1333 IEM_MC_END();
1334 }
1335 else
1336 {
1337 /*
1338 * Memory, register.
1339 */
1340 IEM_MC_BEGIN(0, 2);
1341 IEM_MC_LOCAL(uint32_t, uSrc);
1342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1343
1344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1346 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1348
1349 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1350 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1351
1352 IEM_MC_ADVANCE_RIP();
1353 IEM_MC_END();
1354 }
1355 return VINF_SUCCESS;
1356}
1357
1358
1359/**
1360 * @opcode 0x11
1361 * @oppfx 0xf2
1362 * @opcpuid sse2
1363 * @opgroup og_sse2_pcksclr_datamove
1364 * @opxcpttype 5
1365 * @optest op1=1 op2=2 -> op1=2
1366 * @optest op1=0 op2=-42 -> op1=-42
1367 */
1368FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1369{
1370 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1373 {
1374 /*
1375 * Register, register.
1376 */
1377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1378 IEM_MC_BEGIN(0, 1);
1379 IEM_MC_LOCAL(uint64_t, uSrc);
1380
1381 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1382 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1383 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1384 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1385
1386 IEM_MC_ADVANCE_RIP();
1387 IEM_MC_END();
1388 }
1389 else
1390 {
1391 /*
1392 * Memory, register.
1393 */
1394 IEM_MC_BEGIN(0, 2);
1395 IEM_MC_LOCAL(uint64_t, uSrc);
1396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1397
1398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1400 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1402
1403 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1404 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1405
1406 IEM_MC_ADVANCE_RIP();
1407 IEM_MC_END();
1408 }
1409 return VINF_SUCCESS;
1410}
1411
1412
1413FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1414{
1415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1416 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1417 {
1418 /**
1419 * @opcode 0x12
1420 * @opcodesub 11 mr/reg
1421 * @oppfx none
1422 * @opcpuid sse
1423 * @opgroup og_sse_simdfp_datamove
1424 * @opxcpttype 5
1425 * @optest op1=1 op2=2 -> op1=2
1426 * @optest op1=0 op2=-42 -> op1=-42
1427 */
1428 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1429
1430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1431 IEM_MC_BEGIN(0, 1);
1432 IEM_MC_LOCAL(uint64_t, uSrc);
1433
1434 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1436 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1437 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1438
1439 IEM_MC_ADVANCE_RIP();
1440 IEM_MC_END();
1441 }
1442 else
1443 {
1444 /**
1445 * @opdone
1446 * @opcode 0x12
1447 * @opcodesub !11 mr/reg
1448 * @oppfx none
1449 * @opcpuid sse
1450 * @opgroup og_sse_simdfp_datamove
1451 * @opxcpttype 5
1452 * @optest op1=1 op2=2 -> op1=2
1453 * @optest op1=0 op2=-42 -> op1=-42
1454 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1455 */
1456 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1457
1458 IEM_MC_BEGIN(0, 2);
1459 IEM_MC_LOCAL(uint64_t, uSrc);
1460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1461
1462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1466
1467 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1468 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1469
1470 IEM_MC_ADVANCE_RIP();
1471 IEM_MC_END();
1472 }
1473 return VINF_SUCCESS;
1474}
1475
1476
1477/**
1478 * @opcode 0x12
1479 * @opcodesub !11 mr/reg
1480 * @oppfx 0x66
1481 * @opcpuid sse2
1482 * @opgroup og_sse2_pcksclr_datamove
1483 * @opxcpttype 5
1484 * @optest op1=1 op2=2 -> op1=2
1485 * @optest op1=0 op2=-42 -> op1=-42
1486 */
1487FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1488{
1489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1490 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1491 {
1492 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1493
1494 IEM_MC_BEGIN(0, 2);
1495 IEM_MC_LOCAL(uint64_t, uSrc);
1496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1497
1498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1502
1503 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1504 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1505
1506 IEM_MC_ADVANCE_RIP();
1507 IEM_MC_END();
1508 return VINF_SUCCESS;
1509 }
1510
1511 /**
1512 * @opdone
1513 * @opmnemonic ud660f12m3
1514 * @opcode 0x12
1515 * @opcodesub 11 mr/reg
1516 * @oppfx 0x66
1517 * @opunused immediate
1518 * @opcpuid sse
1519 * @optest ->
1520 */
1521 return IEMOP_RAISE_INVALID_OPCODE();
1522}
1523
1524
1525/**
1526 * @opcode 0x12
1527 * @oppfx 0xf3
1528 * @opcpuid sse3
1529 * @opgroup og_sse3_pcksclr_datamove
1530 * @opxcpttype 4
1531 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1532 * op1=0x00000002000000020000000100000001
1533 */
1534FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1535{
1536 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1538 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1539 {
1540 /*
1541 * Register, register.
1542 */
1543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1544 IEM_MC_BEGIN(2, 0);
1545 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1546 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1547
1548 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1549 IEM_MC_PREPARE_SSE_USAGE();
1550
1551 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1552 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1553 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1554
1555 IEM_MC_ADVANCE_RIP();
1556 IEM_MC_END();
1557 }
1558 else
1559 {
1560 /*
1561 * Register, memory.
1562 */
1563 IEM_MC_BEGIN(2, 2);
1564 IEM_MC_LOCAL(RTUINT128U, uSrc);
1565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1566 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1567 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1568
1569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1571 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1572 IEM_MC_PREPARE_SSE_USAGE();
1573
1574 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1575 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1576 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1577
1578 IEM_MC_ADVANCE_RIP();
1579 IEM_MC_END();
1580 }
1581 return VINF_SUCCESS;
1582}
1583
1584
1585/**
1586 * @opcode 0x12
1587 * @oppfx 0xf2
1588 * @opcpuid sse3
1589 * @opgroup og_sse3_pcksclr_datamove
1590 * @opxcpttype 5
1591 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1592 * op1=0x22222222111111112222222211111111
1593 */
1594FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1595{
1596 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1599 {
1600 /*
1601 * Register, register.
1602 */
1603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1604 IEM_MC_BEGIN(2, 0);
1605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1606 IEM_MC_ARG(uint64_t, uSrc, 1);
1607
1608 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1609 IEM_MC_PREPARE_SSE_USAGE();
1610
1611 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1612 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1613 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1614
1615 IEM_MC_ADVANCE_RIP();
1616 IEM_MC_END();
1617 }
1618 else
1619 {
1620 /*
1621 * Register, memory.
1622 */
1623 IEM_MC_BEGIN(2, 2);
1624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1625 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1626 IEM_MC_ARG(uint64_t, uSrc, 1);
1627
1628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1630 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1631 IEM_MC_PREPARE_SSE_USAGE();
1632
1633 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1634 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1635 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1636
1637 IEM_MC_ADVANCE_RIP();
1638 IEM_MC_END();
1639 }
1640 return VINF_SUCCESS;
1641}
1642
1643
1644/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1645FNIEMOP_STUB(iemOp_movlps_Mq_Vq);
1646
1647/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1648FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1649{
1650 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1651 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1652 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1653 {
1654#if 0
1655 /*
1656 * Register, register.
1657 */
1658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1659 IEM_MC_BEGIN(0, 1);
1660 IEM_MC_LOCAL(uint64_t, uSrc);
1661 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1662 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1663 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1664 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1665 IEM_MC_ADVANCE_RIP();
1666 IEM_MC_END();
1667#else
1668 return IEMOP_RAISE_INVALID_OPCODE();
1669#endif
1670 }
1671 else
1672 {
1673 /*
1674 * Memory, register.
1675 */
1676 IEM_MC_BEGIN(0, 2);
1677 IEM_MC_LOCAL(uint64_t, uSrc);
1678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1679
1680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1682 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1683 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1684
1685 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1686 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1687
1688 IEM_MC_ADVANCE_RIP();
1689 IEM_MC_END();
1690 }
1691 return VINF_SUCCESS;
1692}
1693
1694/* Opcode 0xf3 0x0f 0x13 - invalid */
1695/* Opcode 0xf2 0x0f 0x13 - invalid */
1696
1697/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1698FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1699/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1700FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1701/* Opcode 0xf3 0x0f 0x14 - invalid */
1702/* Opcode 0xf2 0x0f 0x14 - invalid */
1703/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1704FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1705/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1706FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1707/* Opcode 0xf3 0x0f 0x15 - invalid */
1708/* Opcode 0xf2 0x0f 0x15 - invalid */
1709/** Opcode 0x0f 0x16 - movhpsv1 Vdq, Mq movlhps Vdq, Uq */
1710FNIEMOP_STUB(iemOp_movhpsv1_Vdq_Mq__movlhps_Vdq_Uq); //NEXT
1711/** Opcode 0x66 0x0f 0x16 - movhpdv1 Vdq, Mq */
1712FNIEMOP_STUB(iemOp_movhpdv1_Vdq_Mq); //NEXT
1713/** Opcode 0xf3 0x0f 0x16 - movshdup Vx, Wx */
1714FNIEMOP_STUB(iemOp_movshdup_Vx_Wx); //NEXT
1715/* Opcode 0xf2 0x0f 0x16 - invalid */
1716/** Opcode 0x0f 0x17 - movhpsv1 Mq, Vq */
1717FNIEMOP_STUB(iemOp_movhpsv1_Mq_Vq); //NEXT
1718/** Opcode 0x66 0x0f 0x17 - movhpdv1 Mq, Vq */
1719FNIEMOP_STUB(iemOp_movhpdv1_Mq_Vq); //NEXT
1720/* Opcode 0xf3 0x0f 0x17 - invalid */
1721/* Opcode 0xf2 0x0f 0x17 - invalid */
1722
1723
1724/** Opcode 0x0f 0x18. */
1725FNIEMOP_DEF(iemOp_prefetch_Grp16)
1726{
1727 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1728 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1729 {
1730 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1731 {
1732 case 4: /* Aliased to /0 for the time being according to AMD. */
1733 case 5: /* Aliased to /0 for the time being according to AMD. */
1734 case 6: /* Aliased to /0 for the time being according to AMD. */
1735 case 7: /* Aliased to /0 for the time being according to AMD. */
1736 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1737 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1738 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1739 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1740 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1741 }
1742
1743 IEM_MC_BEGIN(0, 1);
1744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1747 /* Currently a NOP. */
1748 NOREF(GCPtrEffSrc);
1749 IEM_MC_ADVANCE_RIP();
1750 IEM_MC_END();
1751 return VINF_SUCCESS;
1752 }
1753
1754 return IEMOP_RAISE_INVALID_OPCODE();
1755}
1756
1757
1758/** Opcode 0x0f 0x19..0x1f. */
1759FNIEMOP_DEF(iemOp_nop_Ev)
1760{
1761 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1762 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1763 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1764 {
1765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1766 IEM_MC_BEGIN(0, 0);
1767 IEM_MC_ADVANCE_RIP();
1768 IEM_MC_END();
1769 }
1770 else
1771 {
1772 IEM_MC_BEGIN(0, 1);
1773 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1776 /* Currently a NOP. */
1777 NOREF(GCPtrEffSrc);
1778 IEM_MC_ADVANCE_RIP();
1779 IEM_MC_END();
1780 }
1781 return VINF_SUCCESS;
1782}
1783
1784
1785/** Opcode 0x0f 0x20. */
1786FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1787{
1788 /* mod is ignored, as is operand size overrides. */
1789 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1790 IEMOP_HLP_MIN_386();
1791 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1792 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1793 else
1794 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1795
1796 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1797 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1798 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1799 {
1800 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1801 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1802 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1803 iCrReg |= 8;
1804 }
1805 switch (iCrReg)
1806 {
1807 case 0: case 2: case 3: case 4: case 8:
1808 break;
1809 default:
1810 return IEMOP_RAISE_INVALID_OPCODE();
1811 }
1812 IEMOP_HLP_DONE_DECODING();
1813
1814 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1815}
1816
1817
1818/** Opcode 0x0f 0x21. */
1819FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1820{
1821 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1822 IEMOP_HLP_MIN_386();
1823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1825 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1826 return IEMOP_RAISE_INVALID_OPCODE();
1827 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1828 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1829 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1830}
1831
1832
1833/** Opcode 0x0f 0x22. */
1834FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1835{
1836 /* mod is ignored, as is operand size overrides. */
1837 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1838 IEMOP_HLP_MIN_386();
1839 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1840 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1841 else
1842 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1843
1844 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1845 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1846 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1847 {
1848 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1849 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1850 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1851 iCrReg |= 8;
1852 }
1853 switch (iCrReg)
1854 {
1855 case 0: case 2: case 3: case 4: case 8:
1856 break;
1857 default:
1858 return IEMOP_RAISE_INVALID_OPCODE();
1859 }
1860 IEMOP_HLP_DONE_DECODING();
1861
1862 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1863}
1864
1865
1866/** Opcode 0x0f 0x23. */
1867FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1868{
1869 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1870 IEMOP_HLP_MIN_386();
1871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1873 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1874 return IEMOP_RAISE_INVALID_OPCODE();
1875 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1876 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1877 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1878}
1879
1880
1881/** Opcode 0x0f 0x24. */
1882FNIEMOP_DEF(iemOp_mov_Rd_Td)
1883{
1884 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1885 /** @todo works on 386 and 486. */
1886 /* The RM byte is not considered, see testcase. */
1887 return IEMOP_RAISE_INVALID_OPCODE();
1888}
1889
1890
1891/** Opcode 0x0f 0x26. */
1892FNIEMOP_DEF(iemOp_mov_Td_Rd)
1893{
1894 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1895 /** @todo works on 386 and 486. */
1896 /* The RM byte is not considered, see testcase. */
1897 return IEMOP_RAISE_INVALID_OPCODE();
1898}
1899
1900
1901/** Opcode 0x0f 0x28 - movaps Vps, Wps */
1902FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
1903{
1904 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1905 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1906 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1907 {
1908 /*
1909 * Register, register.
1910 */
1911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1912 IEM_MC_BEGIN(0, 0);
1913 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1914 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1915 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1916 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1917 IEM_MC_ADVANCE_RIP();
1918 IEM_MC_END();
1919 }
1920 else
1921 {
1922 /*
1923 * Register, memory.
1924 */
1925 IEM_MC_BEGIN(0, 2);
1926 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1927 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1928
1929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1931 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1932 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1933
1934 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1935 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1936
1937 IEM_MC_ADVANCE_RIP();
1938 IEM_MC_END();
1939 }
1940 return VINF_SUCCESS;
1941}
1942
1943/** Opcode 0x66 0x0f 0x28 - movapd Vpd, Wpd */
1944FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
1945{
1946 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1947 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1948 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1949 {
1950 /*
1951 * Register, register.
1952 */
1953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1954 IEM_MC_BEGIN(0, 0);
1955 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1956 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1957 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1958 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1959 IEM_MC_ADVANCE_RIP();
1960 IEM_MC_END();
1961 }
1962 else
1963 {
1964 /*
1965 * Register, memory.
1966 */
1967 IEM_MC_BEGIN(0, 2);
1968 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1970
1971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1973 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1974 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1975
1976 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1977 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1978
1979 IEM_MC_ADVANCE_RIP();
1980 IEM_MC_END();
1981 }
1982 return VINF_SUCCESS;
1983}
1984
1985/* Opcode 0xf3 0x0f 0x28 - invalid */
1986/* Opcode 0xf2 0x0f 0x28 - invalid */
1987
1988/** Opcode 0x0f 0x29 - movaps Wps, Vps */
1989FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
1990{
1991 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1992 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1993 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1994 {
1995 /*
1996 * Register, register.
1997 */
1998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1999 IEM_MC_BEGIN(0, 0);
2000 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2001 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2002 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2003 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2004 IEM_MC_ADVANCE_RIP();
2005 IEM_MC_END();
2006 }
2007 else
2008 {
2009 /*
2010 * Memory, register.
2011 */
2012 IEM_MC_BEGIN(0, 2);
2013 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2015
2016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2018 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2019 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2020
2021 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2022 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2023
2024 IEM_MC_ADVANCE_RIP();
2025 IEM_MC_END();
2026 }
2027 return VINF_SUCCESS;
2028}
2029
2030/** Opcode 0x66 0x0f 0x29 - movapd Wpd,Vpd */
2031FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2032{
2033 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
2034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2035 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2036 {
2037 /*
2038 * Register, register.
2039 */
2040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2041 IEM_MC_BEGIN(0, 0);
2042 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2043 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2044 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2045 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2046 IEM_MC_ADVANCE_RIP();
2047 IEM_MC_END();
2048 }
2049 else
2050 {
2051 /*
2052 * Memory, register.
2053 */
2054 IEM_MC_BEGIN(0, 2);
2055 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2057
2058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2060 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2061 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2062
2063 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2064 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2065
2066 IEM_MC_ADVANCE_RIP();
2067 IEM_MC_END();
2068 }
2069 return VINF_SUCCESS;
2070}
2071
2072/* Opcode 0xf3 0x0f 0x29 - invalid */
2073/* Opcode 0xf2 0x0f 0x29 - invalid */
2074
2075
2076/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2077FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2078/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2079FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2080/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2081FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2082/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2083FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2084
2085
2086/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
2087FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2088{
2089 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2091 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2092 {
2093 /*
2094 * memory, register.
2095 */
2096 IEM_MC_BEGIN(0, 2);
2097 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2099
2100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2102 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2103 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2104
2105 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2106 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2107
2108 IEM_MC_ADVANCE_RIP();
2109 IEM_MC_END();
2110 }
2111 /* The register, register encoding is invalid. */
2112 else
2113 return IEMOP_RAISE_INVALID_OPCODE();
2114 return VINF_SUCCESS;
2115}
2116
2117/** Opcode 0x66 0x0f 0x2b - movntpd Mpd, Vpd */
2118FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2119{
2120 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2121 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2122 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2123 {
2124 /*
2125 * memory, register.
2126 */
2127 IEM_MC_BEGIN(0, 2);
2128 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2130
2131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2133 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2134 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2135
2136 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2137 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2138
2139 IEM_MC_ADVANCE_RIP();
2140 IEM_MC_END();
2141 }
2142 /* The register, register encoding is invalid. */
2143 else
2144 return IEMOP_RAISE_INVALID_OPCODE();
2145 return VINF_SUCCESS;
2146}
2147/* Opcode 0xf3 0x0f 0x2b - invalid */
2148/* Opcode 0xf2 0x0f 0x2b - invalid */
2149
2150
2151/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2152FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2153/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2154FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2155/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2156FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2157/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2158FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2159
2160/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2161FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2162/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2163FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2164/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2165FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2166/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2167FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2168
2169/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2170FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2171/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2172FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2173/* Opcode 0xf3 0x0f 0x2e - invalid */
2174/* Opcode 0xf2 0x0f 0x2e - invalid */
2175
2176/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2177FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2178/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2179FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2180/* Opcode 0xf3 0x0f 0x2f - invalid */
2181/* Opcode 0xf2 0x0f 0x2f - invalid */
2182
2183/** Opcode 0x0f 0x30. */
2184FNIEMOP_DEF(iemOp_wrmsr)
2185{
2186 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2188 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2189}
2190
2191
2192/** Opcode 0x0f 0x31. */
2193FNIEMOP_DEF(iemOp_rdtsc)
2194{
2195 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2197 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2198}
2199
2200
2201/** Opcode 0x0f 0x33. */
2202FNIEMOP_DEF(iemOp_rdmsr)
2203{
2204 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2206 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2207}
2208
2209
2210/** Opcode 0x0f 0x34. */
2211FNIEMOP_DEF(iemOp_rdpmc)
2212{
2213 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2215 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2216}
2217
2218
2219/** Opcode 0x0f 0x34. */
2220FNIEMOP_STUB(iemOp_sysenter);
2221/** Opcode 0x0f 0x35. */
2222FNIEMOP_STUB(iemOp_sysexit);
2223/** Opcode 0x0f 0x37. */
2224FNIEMOP_STUB(iemOp_getsec);
2225
2226
2227/** Opcode 0x0f 0x38. */
2228FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2229{
2230#ifdef IEM_WITH_THREE_0F_38
2231 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2232 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2233#else
2234 IEMOP_BITCH_ABOUT_STUB();
2235 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2236#endif
2237}
2238
2239
2240/** Opcode 0x0f 0x3a. */
2241FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2242{
2243#ifdef IEM_WITH_THREE_0F_3A
2244 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2245 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2246#else
2247 IEMOP_BITCH_ABOUT_STUB();
2248 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2249#endif
2250}
2251
2252
2253/**
2254 * Implements a conditional move.
2255 *
2256 * Wish there was an obvious way to do this where we could share and reduce
2257 * code bloat.
2258 *
2259 * @param a_Cnd The conditional "microcode" operation.
2260 */
2261#define CMOV_X(a_Cnd) \
2262 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2263 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2264 { \
2265 switch (pVCpu->iem.s.enmEffOpSize) \
2266 { \
2267 case IEMMODE_16BIT: \
2268 IEM_MC_BEGIN(0, 1); \
2269 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2270 a_Cnd { \
2271 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2272 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2273 } IEM_MC_ENDIF(); \
2274 IEM_MC_ADVANCE_RIP(); \
2275 IEM_MC_END(); \
2276 return VINF_SUCCESS; \
2277 \
2278 case IEMMODE_32BIT: \
2279 IEM_MC_BEGIN(0, 1); \
2280 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2281 a_Cnd { \
2282 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2283 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2284 } IEM_MC_ELSE() { \
2285 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2286 } IEM_MC_ENDIF(); \
2287 IEM_MC_ADVANCE_RIP(); \
2288 IEM_MC_END(); \
2289 return VINF_SUCCESS; \
2290 \
2291 case IEMMODE_64BIT: \
2292 IEM_MC_BEGIN(0, 1); \
2293 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2294 a_Cnd { \
2295 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2296 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2297 } IEM_MC_ENDIF(); \
2298 IEM_MC_ADVANCE_RIP(); \
2299 IEM_MC_END(); \
2300 return VINF_SUCCESS; \
2301 \
2302 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2303 } \
2304 } \
2305 else \
2306 { \
2307 switch (pVCpu->iem.s.enmEffOpSize) \
2308 { \
2309 case IEMMODE_16BIT: \
2310 IEM_MC_BEGIN(0, 2); \
2311 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2312 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2314 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2315 a_Cnd { \
2316 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2317 } IEM_MC_ENDIF(); \
2318 IEM_MC_ADVANCE_RIP(); \
2319 IEM_MC_END(); \
2320 return VINF_SUCCESS; \
2321 \
2322 case IEMMODE_32BIT: \
2323 IEM_MC_BEGIN(0, 2); \
2324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2325 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2327 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2328 a_Cnd { \
2329 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2330 } IEM_MC_ELSE() { \
2331 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2332 } IEM_MC_ENDIF(); \
2333 IEM_MC_ADVANCE_RIP(); \
2334 IEM_MC_END(); \
2335 return VINF_SUCCESS; \
2336 \
2337 case IEMMODE_64BIT: \
2338 IEM_MC_BEGIN(0, 2); \
2339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2340 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2342 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2343 a_Cnd { \
2344 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2345 } IEM_MC_ENDIF(); \
2346 IEM_MC_ADVANCE_RIP(); \
2347 IEM_MC_END(); \
2348 return VINF_SUCCESS; \
2349 \
2350 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2351 } \
2352 } do {} while (0)
2353
2354
2355
2356/** Opcode 0x0f 0x40. */
2357FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2358{
2359 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2360 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2361}
2362
2363
2364/** Opcode 0x0f 0x41. */
2365FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2366{
2367 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2368 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2369}
2370
2371
2372/** Opcode 0x0f 0x42. */
2373FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2374{
2375 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2376 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2377}
2378
2379
2380/** Opcode 0x0f 0x43. */
2381FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2382{
2383 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2384 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2385}
2386
2387
2388/** Opcode 0x0f 0x44. */
2389FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2390{
2391 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2392 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2393}
2394
2395
2396/** Opcode 0x0f 0x45. */
2397FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2398{
2399 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2400 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2401}
2402
2403
2404/** Opcode 0x0f 0x46. */
2405FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2406{
2407 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2408 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2409}
2410
2411
2412/** Opcode 0x0f 0x47. */
2413FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2414{
2415 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2416 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2417}
2418
2419
2420/** Opcode 0x0f 0x48. */
2421FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2422{
2423 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2424 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2425}
2426
2427
2428/** Opcode 0x0f 0x49. */
2429FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2430{
2431 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2432 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2433}
2434
2435
2436/** Opcode 0x0f 0x4a. */
2437FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2438{
2439 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2440 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2441}
2442
2443
2444/** Opcode 0x0f 0x4b. */
2445FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2446{
2447 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2448 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2449}
2450
2451
2452/** Opcode 0x0f 0x4c. */
2453FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2454{
2455 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2456 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2457}
2458
2459
2460/** Opcode 0x0f 0x4d. */
2461FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2462{
2463 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2464 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2465}
2466
2467
2468/** Opcode 0x0f 0x4e. */
2469FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2470{
2471 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2472 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2473}
2474
2475
2476/** Opcode 0x0f 0x4f. */
2477FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2478{
2479 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2480 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2481}
2482
2483#undef CMOV_X
2484
2485/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2486FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2487/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2488FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2489/* Opcode 0xf3 0x0f 0x50 - invalid */
2490/* Opcode 0xf2 0x0f 0x50 - invalid */
2491
2492/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2493FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2494/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2495FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2496/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2497FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2498/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2499FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2500
2501/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2502FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2503/* Opcode 0x66 0x0f 0x52 - invalid */
2504/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2505FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2506/* Opcode 0xf2 0x0f 0x52 - invalid */
2507
2508/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2509FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2510/* Opcode 0x66 0x0f 0x53 - invalid */
2511/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2512FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2513/* Opcode 0xf2 0x0f 0x53 - invalid */
2514
2515/** Opcode 0x0f 0x54 - andps Vps, Wps */
2516FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2517/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2518FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2519/* Opcode 0xf3 0x0f 0x54 - invalid */
2520/* Opcode 0xf2 0x0f 0x54 - invalid */
2521
2522/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2523FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2524/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2525FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2526/* Opcode 0xf3 0x0f 0x55 - invalid */
2527/* Opcode 0xf2 0x0f 0x55 - invalid */
2528
2529/** Opcode 0x0f 0x56 - orps Vps, Wps */
2530FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2531/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2532FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2533/* Opcode 0xf3 0x0f 0x56 - invalid */
2534/* Opcode 0xf2 0x0f 0x56 - invalid */
2535
2536/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2537FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2538/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2539FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2540/* Opcode 0xf3 0x0f 0x57 - invalid */
2541/* Opcode 0xf2 0x0f 0x57 - invalid */
2542
2543/** Opcode 0x0f 0x58 - addps Vps, Wps */
2544FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2545/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2546FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2547/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2548FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2549/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2550FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2551
2552/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2553FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2554/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2555FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2556/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2557FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2558/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2559FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2560
2561/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2562FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2563/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2564FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2565/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2566FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2567/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2568FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2569
2570/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2571FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2572/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2573FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2574/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2575FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2576/* Opcode 0xf2 0x0f 0x5b - invalid */
2577
2578/** Opcode 0x0f 0x5c - subps Vps, Wps */
2579FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2580/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2581FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2582/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2583FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2584/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2585FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2586
2587/** Opcode 0x0f 0x5d - minps Vps, Wps */
2588FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2589/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2590FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2591/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2592FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2593/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2594FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2595
2596/** Opcode 0x0f 0x5e - divps Vps, Wps */
2597FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2598/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2599FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2600/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2601FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2602/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2603FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2604
2605/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2606FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2607/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2608FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2609/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2610FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2611/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2612FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2613
2614/**
2615 * Common worker for MMX instructions on the forms:
2616 * pxxxx mm1, mm2/mem32
2617 *
2618 * The 2nd operand is the first half of a register, which in the memory case
2619 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2620 * memory accessed for MMX.
2621 *
2622 * Exceptions type 4.
2623 */
2624FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2625{
2626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2627 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2628 {
2629 /*
2630 * Register, register.
2631 */
2632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2633 IEM_MC_BEGIN(2, 0);
2634 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2635 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2636 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2637 IEM_MC_PREPARE_SSE_USAGE();
2638 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2639 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2640 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2641 IEM_MC_ADVANCE_RIP();
2642 IEM_MC_END();
2643 }
2644 else
2645 {
2646 /*
2647 * Register, memory.
2648 */
2649 IEM_MC_BEGIN(2, 2);
2650 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2651 IEM_MC_LOCAL(uint64_t, uSrc);
2652 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2654
2655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2657 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2658 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2659
2660 IEM_MC_PREPARE_SSE_USAGE();
2661 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2662 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2663
2664 IEM_MC_ADVANCE_RIP();
2665 IEM_MC_END();
2666 }
2667 return VINF_SUCCESS;
2668}
2669
2670
2671/**
2672 * Common worker for SSE2 instructions on the forms:
2673 * pxxxx xmm1, xmm2/mem128
2674 *
2675 * The 2nd operand is the first half of a register, which in the memory case
2676 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2677 * memory accessed for MMX.
2678 *
2679 * Exceptions type 4.
2680 */
2681FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2682{
2683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2684 if (!pImpl->pfnU64)
2685 return IEMOP_RAISE_INVALID_OPCODE();
2686 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2687 {
2688 /*
2689 * Register, register.
2690 */
2691 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2692 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2694 IEM_MC_BEGIN(2, 0);
2695 IEM_MC_ARG(uint64_t *, pDst, 0);
2696 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2697 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2698 IEM_MC_PREPARE_FPU_USAGE();
2699 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2700 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2701 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2702 IEM_MC_ADVANCE_RIP();
2703 IEM_MC_END();
2704 }
2705 else
2706 {
2707 /*
2708 * Register, memory.
2709 */
2710 IEM_MC_BEGIN(2, 2);
2711 IEM_MC_ARG(uint64_t *, pDst, 0);
2712 IEM_MC_LOCAL(uint32_t, uSrc);
2713 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2715
2716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2718 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2719 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2720
2721 IEM_MC_PREPARE_FPU_USAGE();
2722 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2723 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2724
2725 IEM_MC_ADVANCE_RIP();
2726 IEM_MC_END();
2727 }
2728 return VINF_SUCCESS;
2729}
2730
2731
2732/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2733FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2734{
2735 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2736 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2737}
2738
2739/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
2740FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
2741{
2742 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
2743 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2744}
2745
2746/* Opcode 0xf3 0x0f 0x60 - invalid */
2747
2748
2749/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2750FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2751{
2752 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2753 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2754}
2755
2756/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
2757FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
2758{
2759 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
2760 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2761}
2762
2763/* Opcode 0xf3 0x0f 0x61 - invalid */
2764
2765
2766/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2767FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2768{
2769 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2770 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2771}
2772
2773/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
2774FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
2775{
2776 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
2777 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2778}
2779
2780/* Opcode 0xf3 0x0f 0x62 - invalid */
2781
2782
2783
2784/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2785FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2786/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
2787FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
2788/* Opcode 0xf3 0x0f 0x63 - invalid */
2789
2790/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2791FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2792/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
2793FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
2794/* Opcode 0xf3 0x0f 0x64 - invalid */
2795
2796/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2797FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2798/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
2799FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
2800/* Opcode 0xf3 0x0f 0x65 - invalid */
2801
2802/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2803FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2804/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
2805FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
2806/* Opcode 0xf3 0x0f 0x66 - invalid */
2807
2808/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2809FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2810/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
2811FNIEMOP_STUB(iemOp_packuswb_Vx_W);
2812/* Opcode 0xf3 0x0f 0x67 - invalid */
2813
2814
2815/**
2816 * Common worker for MMX instructions on the form:
2817 * pxxxx mm1, mm2/mem64
2818 *
2819 * The 2nd operand is the second half of a register, which in the memory case
2820 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2821 * where it may read the full 128 bits or only the upper 64 bits.
2822 *
2823 * Exceptions type 4.
2824 */
2825FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2826{
2827 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2828 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2829 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2830 {
2831 /*
2832 * Register, register.
2833 */
2834 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2835 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2837 IEM_MC_BEGIN(2, 0);
2838 IEM_MC_ARG(uint64_t *, pDst, 0);
2839 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2840 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2841 IEM_MC_PREPARE_FPU_USAGE();
2842 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2843 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2844 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2845 IEM_MC_ADVANCE_RIP();
2846 IEM_MC_END();
2847 }
2848 else
2849 {
2850 /*
2851 * Register, memory.
2852 */
2853 IEM_MC_BEGIN(2, 2);
2854 IEM_MC_ARG(uint64_t *, pDst, 0);
2855 IEM_MC_LOCAL(uint64_t, uSrc);
2856 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2858
2859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2861 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2862 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2863
2864 IEM_MC_PREPARE_FPU_USAGE();
2865 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2866 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2867
2868 IEM_MC_ADVANCE_RIP();
2869 IEM_MC_END();
2870 }
2871 return VINF_SUCCESS;
2872}
2873
2874
2875/**
2876 * Common worker for SSE2 instructions on the form:
2877 * pxxxx xmm1, xmm2/mem128
2878 *
2879 * The 2nd operand is the second half of a register, which in the memory case
2880 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2881 * where it may read the full 128 bits or only the upper 64 bits.
2882 *
2883 * Exceptions type 4.
2884 */
2885FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2886{
2887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2888 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2889 {
2890 /*
2891 * Register, register.
2892 */
2893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2894 IEM_MC_BEGIN(2, 0);
2895 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2896 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2897 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2898 IEM_MC_PREPARE_SSE_USAGE();
2899 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2900 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2901 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2902 IEM_MC_ADVANCE_RIP();
2903 IEM_MC_END();
2904 }
2905 else
2906 {
2907 /*
2908 * Register, memory.
2909 */
2910 IEM_MC_BEGIN(2, 2);
2911 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2912 IEM_MC_LOCAL(RTUINT128U, uSrc);
2913 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2915
2916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2918 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2919 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2920
2921 IEM_MC_PREPARE_SSE_USAGE();
2922 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2923 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2924
2925 IEM_MC_ADVANCE_RIP();
2926 IEM_MC_END();
2927 }
2928 return VINF_SUCCESS;
2929}
2930
2931
2932/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2933FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2934{
2935 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2936 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2937}
2938
2939/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
2940FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
2941{
2942 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
2943 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2944}
2945/* Opcode 0xf3 0x0f 0x68 - invalid */
2946
2947
2948/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2949FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2950{
2951 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2952 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2953}
2954
2955/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
2956FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
2957{
2958 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
2959 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2960
2961}
2962/* Opcode 0xf3 0x0f 0x69 - invalid */
2963
2964
2965/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2966FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2967{
2968 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2969 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2970}
2971
2972/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
2973FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
2974{
2975 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
2976 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2977}
2978/* Opcode 0xf3 0x0f 0x6a - invalid */
2979
2980
2981/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2982FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2983/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
2984FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
2985/* Opcode 0xf3 0x0f 0x6b - invalid */
2986
2987
2988/* Opcode 0x0f 0x6c - invalid */
2989
2990/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
2991FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
2992{
2993 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
2994 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2995}
2996
2997/* Opcode 0xf3 0x0f 0x6c - invalid */
2998/* Opcode 0xf2 0x0f 0x6c - invalid */
2999
3000
3001/* Opcode 0x0f 0x6d - invalid */
3002
3003/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3004FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3005{
3006 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3007 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3008}
3009
3010/* Opcode 0xf3 0x0f 0x6d - invalid */
3011
3012
3013/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
3014FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3015{
3016 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3017 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3018 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
3019 else
3020 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
3021 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3022 {
3023 /* MMX, greg */
3024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3025 IEM_MC_BEGIN(0, 1);
3026 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3027 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3028 IEM_MC_LOCAL(uint64_t, u64Tmp);
3029 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3030 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3031 else
3032 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3033 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3034 IEM_MC_ADVANCE_RIP();
3035 IEM_MC_END();
3036 }
3037 else
3038 {
3039 /* MMX, [mem] */
3040 IEM_MC_BEGIN(0, 2);
3041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3042 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3045 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3046 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3047 {
3048 IEM_MC_LOCAL(uint64_t, u64Tmp);
3049 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3050 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3051 }
3052 else
3053 {
3054 IEM_MC_LOCAL(uint32_t, u32Tmp);
3055 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3056 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3057 }
3058 IEM_MC_ADVANCE_RIP();
3059 IEM_MC_END();
3060 }
3061 return VINF_SUCCESS;
3062}
3063
3064/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
3065FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3066{
3067 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3068 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3069 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
3070 else
3071 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
3072 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3073 {
3074 /* XMM, greg*/
3075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3076 IEM_MC_BEGIN(0, 1);
3077 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3078 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3079 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3080 {
3081 IEM_MC_LOCAL(uint64_t, u64Tmp);
3082 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3083 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3084 }
3085 else
3086 {
3087 IEM_MC_LOCAL(uint32_t, u32Tmp);
3088 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3089 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3090 }
3091 IEM_MC_ADVANCE_RIP();
3092 IEM_MC_END();
3093 }
3094 else
3095 {
3096 /* XMM, [mem] */
3097 IEM_MC_BEGIN(0, 2);
3098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3099 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3102 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3103 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3104 {
3105 IEM_MC_LOCAL(uint64_t, u64Tmp);
3106 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3107 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3108 }
3109 else
3110 {
3111 IEM_MC_LOCAL(uint32_t, u32Tmp);
3112 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3113 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3114 }
3115 IEM_MC_ADVANCE_RIP();
3116 IEM_MC_END();
3117 }
3118 return VINF_SUCCESS;
3119}
3120
3121/* Opcode 0xf3 0x0f 0x6e - invalid */
3122
3123
3124/** Opcode 0x0f 0x6f - movq Pq, Qq */
3125FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3126{
3127 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3128 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3129 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3130 {
3131 /*
3132 * Register, register.
3133 */
3134 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3135 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3137 IEM_MC_BEGIN(0, 1);
3138 IEM_MC_LOCAL(uint64_t, u64Tmp);
3139 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3140 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3141 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3142 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3143 IEM_MC_ADVANCE_RIP();
3144 IEM_MC_END();
3145 }
3146 else
3147 {
3148 /*
3149 * Register, memory.
3150 */
3151 IEM_MC_BEGIN(0, 2);
3152 IEM_MC_LOCAL(uint64_t, u64Tmp);
3153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3154
3155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3157 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3158 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3159 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3160 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3161
3162 IEM_MC_ADVANCE_RIP();
3163 IEM_MC_END();
3164 }
3165 return VINF_SUCCESS;
3166}
3167
3168/** Opcode 0x66 0x0f 0x6f - movdqa Vx, Wx */
3169FNIEMOP_DEF(iemOp_movdqa_Vx_Wx)
3170{
3171 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3172 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3173 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3174 {
3175 /*
3176 * Register, register.
3177 */
3178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3179 IEM_MC_BEGIN(0, 0);
3180 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3181 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3182 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3183 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3184 IEM_MC_ADVANCE_RIP();
3185 IEM_MC_END();
3186 }
3187 else
3188 {
3189 /*
3190 * Register, memory.
3191 */
3192 IEM_MC_BEGIN(0, 2);
3193 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3195
3196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3198 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3199 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3200 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3201 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3202
3203 IEM_MC_ADVANCE_RIP();
3204 IEM_MC_END();
3205 }
3206 return VINF_SUCCESS;
3207}
3208
3209/** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */
3210FNIEMOP_DEF(iemOp_movdqu_Vx_Wx)
3211{
3212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3213 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3214 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3215 {
3216 /*
3217 * Register, register.
3218 */
3219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3220 IEM_MC_BEGIN(0, 0);
3221 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3222 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3223 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3224 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3225 IEM_MC_ADVANCE_RIP();
3226 IEM_MC_END();
3227 }
3228 else
3229 {
3230 /*
3231 * Register, memory.
3232 */
3233 IEM_MC_BEGIN(0, 2);
3234 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3236
3237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3239 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3240 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3241 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3242 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3243
3244 IEM_MC_ADVANCE_RIP();
3245 IEM_MC_END();
3246 }
3247 return VINF_SUCCESS;
3248}
3249
3250
3251/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3252FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3253{
3254 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3255 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3256 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3257 {
3258 /*
3259 * Register, register.
3260 */
3261 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3263
3264 IEM_MC_BEGIN(3, 0);
3265 IEM_MC_ARG(uint64_t *, pDst, 0);
3266 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3267 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3268 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3269 IEM_MC_PREPARE_FPU_USAGE();
3270 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3271 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3272 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3273 IEM_MC_ADVANCE_RIP();
3274 IEM_MC_END();
3275 }
3276 else
3277 {
3278 /*
3279 * Register, memory.
3280 */
3281 IEM_MC_BEGIN(3, 2);
3282 IEM_MC_ARG(uint64_t *, pDst, 0);
3283 IEM_MC_LOCAL(uint64_t, uSrc);
3284 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3286
3287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3288 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3289 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3291 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3292
3293 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3294 IEM_MC_PREPARE_FPU_USAGE();
3295 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3296 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3297
3298 IEM_MC_ADVANCE_RIP();
3299 IEM_MC_END();
3300 }
3301 return VINF_SUCCESS;
3302}
3303
3304/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3305FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3306{
3307 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3309 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3310 {
3311 /*
3312 * Register, register.
3313 */
3314 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3316
3317 IEM_MC_BEGIN(3, 0);
3318 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3319 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3320 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3321 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3322 IEM_MC_PREPARE_SSE_USAGE();
3323 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3324 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3325 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3326 IEM_MC_ADVANCE_RIP();
3327 IEM_MC_END();
3328 }
3329 else
3330 {
3331 /*
3332 * Register, memory.
3333 */
3334 IEM_MC_BEGIN(3, 2);
3335 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3336 IEM_MC_LOCAL(RTUINT128U, uSrc);
3337 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3339
3340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3341 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3342 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3344 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3345
3346 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3347 IEM_MC_PREPARE_SSE_USAGE();
3348 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3349 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3350
3351 IEM_MC_ADVANCE_RIP();
3352 IEM_MC_END();
3353 }
3354 return VINF_SUCCESS;
3355}
3356
3357/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3358FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3359{
3360 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3361 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3362 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3363 {
3364 /*
3365 * Register, register.
3366 */
3367 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3369
3370 IEM_MC_BEGIN(3, 0);
3371 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3372 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3373 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3374 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3375 IEM_MC_PREPARE_SSE_USAGE();
3376 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3377 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3378 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3379 IEM_MC_ADVANCE_RIP();
3380 IEM_MC_END();
3381 }
3382 else
3383 {
3384 /*
3385 * Register, memory.
3386 */
3387 IEM_MC_BEGIN(3, 2);
3388 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3389 IEM_MC_LOCAL(RTUINT128U, uSrc);
3390 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3392
3393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3394 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3395 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3397 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3398
3399 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3400 IEM_MC_PREPARE_SSE_USAGE();
3401 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3402 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3403
3404 IEM_MC_ADVANCE_RIP();
3405 IEM_MC_END();
3406 }
3407 return VINF_SUCCESS;
3408}
3409
3410/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3411FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3412{
3413 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3415 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3416 {
3417 /*
3418 * Register, register.
3419 */
3420 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3422
3423 IEM_MC_BEGIN(3, 0);
3424 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3425 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3426 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3427 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3428 IEM_MC_PREPARE_SSE_USAGE();
3429 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3430 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3431 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3432 IEM_MC_ADVANCE_RIP();
3433 IEM_MC_END();
3434 }
3435 else
3436 {
3437 /*
3438 * Register, memory.
3439 */
3440 IEM_MC_BEGIN(3, 2);
3441 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3442 IEM_MC_LOCAL(RTUINT128U, uSrc);
3443 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3445
3446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3447 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3448 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3450 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3451
3452 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3453 IEM_MC_PREPARE_SSE_USAGE();
3454 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3455 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3456
3457 IEM_MC_ADVANCE_RIP();
3458 IEM_MC_END();
3459 }
3460 return VINF_SUCCESS;
3461}
3462
3463
3464/** Opcode 0x0f 0x71 11/2. */
3465FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3466
3467/** Opcode 0x66 0x0f 0x71 11/2. */
3468FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3469
3470/** Opcode 0x0f 0x71 11/4. */
3471FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3472
3473/** Opcode 0x66 0x0f 0x71 11/4. */
3474FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3475
3476/** Opcode 0x0f 0x71 11/6. */
3477FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3478
3479/** Opcode 0x66 0x0f 0x71 11/6. */
3480FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3481
3482
3483/**
3484 * Group 12 jump table for register variant.
3485 */
3486IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3487{
3488 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3489 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3490 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3491 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3492 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3493 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3494 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3495 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3496};
3497AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3498
3499
3500/** Opcode 0x0f 0x71. */
3501FNIEMOP_DEF(iemOp_Grp12)
3502{
3503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3504 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3505 /* register, register */
3506 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3507 + pVCpu->iem.s.idxPrefix], bRm);
3508 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3509}
3510
3511
3512/** Opcode 0x0f 0x72 11/2. */
3513FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3514
3515/** Opcode 0x66 0x0f 0x72 11/2. */
3516FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
3517
3518/** Opcode 0x0f 0x72 11/4. */
3519FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3520
3521/** Opcode 0x66 0x0f 0x72 11/4. */
3522FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
3523
3524/** Opcode 0x0f 0x72 11/6. */
3525FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3526
3527/** Opcode 0x66 0x0f 0x72 11/6. */
3528FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
3529
3530
3531/**
3532 * Group 13 jump table for register variant.
3533 */
3534IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3535{
3536 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3537 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3538 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3539 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3540 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3541 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3542 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3543 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3544};
3545AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3546
3547/** Opcode 0x0f 0x72. */
3548FNIEMOP_DEF(iemOp_Grp13)
3549{
3550 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3551 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3552 /* register, register */
3553 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3554 + pVCpu->iem.s.idxPrefix], bRm);
3555 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3556}
3557
3558
3559/** Opcode 0x0f 0x73 11/2. */
3560FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3561
3562/** Opcode 0x66 0x0f 0x73 11/2. */
3563FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
3564
3565/** Opcode 0x66 0x0f 0x73 11/3. */
3566FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
3567
3568/** Opcode 0x0f 0x73 11/6. */
3569FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3570
3571/** Opcode 0x66 0x0f 0x73 11/6. */
3572FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
3573
3574/** Opcode 0x66 0x0f 0x73 11/7. */
3575FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
3576
3577/**
3578 * Group 14 jump table for register variant.
3579 */
3580IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
3581{
3582 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3583 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3584 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3585 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3586 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3587 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3588 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3589 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3590};
3591AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
3592
3593
3594/** Opcode 0x0f 0x73. */
3595FNIEMOP_DEF(iemOp_Grp14)
3596{
3597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3599 /* register, register */
3600 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3601 + pVCpu->iem.s.idxPrefix], bRm);
3602 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3603}
3604
3605
3606/**
3607 * Common worker for MMX instructions on the form:
3608 * pxxx mm1, mm2/mem64
3609 */
3610FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3611{
3612 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3613 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3614 {
3615 /*
3616 * Register, register.
3617 */
3618 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3619 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3621 IEM_MC_BEGIN(2, 0);
3622 IEM_MC_ARG(uint64_t *, pDst, 0);
3623 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3624 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3625 IEM_MC_PREPARE_FPU_USAGE();
3626 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3627 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3628 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3629 IEM_MC_ADVANCE_RIP();
3630 IEM_MC_END();
3631 }
3632 else
3633 {
3634 /*
3635 * Register, memory.
3636 */
3637 IEM_MC_BEGIN(2, 2);
3638 IEM_MC_ARG(uint64_t *, pDst, 0);
3639 IEM_MC_LOCAL(uint64_t, uSrc);
3640 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3642
3643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3645 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3646 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3647
3648 IEM_MC_PREPARE_FPU_USAGE();
3649 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3650 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3651
3652 IEM_MC_ADVANCE_RIP();
3653 IEM_MC_END();
3654 }
3655 return VINF_SUCCESS;
3656}
3657
3658
3659/**
3660 * Common worker for SSE2 instructions on the forms:
3661 * pxxx xmm1, xmm2/mem128
3662 *
3663 * Proper alignment of the 128-bit operand is enforced.
3664 * Exceptions type 4. SSE2 cpuid checks.
3665 */
3666FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3667{
3668 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3669 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3670 {
3671 /*
3672 * Register, register.
3673 */
3674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3675 IEM_MC_BEGIN(2, 0);
3676 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3677 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3678 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3679 IEM_MC_PREPARE_SSE_USAGE();
3680 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3681 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3682 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3683 IEM_MC_ADVANCE_RIP();
3684 IEM_MC_END();
3685 }
3686 else
3687 {
3688 /*
3689 * Register, memory.
3690 */
3691 IEM_MC_BEGIN(2, 2);
3692 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3693 IEM_MC_LOCAL(RTUINT128U, uSrc);
3694 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3696
3697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3699 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3700 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3701
3702 IEM_MC_PREPARE_SSE_USAGE();
3703 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3704 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3705
3706 IEM_MC_ADVANCE_RIP();
3707 IEM_MC_END();
3708 }
3709 return VINF_SUCCESS;
3710}
3711
3712
3713/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3714FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3715{
3716 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3717 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3718}
3719
3720/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
3721FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
3722{
3723 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
3724 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3725}
3726
3727/* Opcode 0xf3 0x0f 0x74 - invalid */
3728/* Opcode 0xf2 0x0f 0x74 - invalid */
3729
3730
3731/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3732FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3733{
3734 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3735 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3736}
3737
3738/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
3739FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
3740{
3741 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
3742 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3743}
3744
3745/* Opcode 0xf3 0x0f 0x75 - invalid */
3746/* Opcode 0xf2 0x0f 0x75 - invalid */
3747
3748
3749/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3750FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3751{
3752 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3753 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3754}
3755
3756/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
3757FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
3758{
3759 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
3760 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3761}
3762
3763/* Opcode 0xf3 0x0f 0x76 - invalid */
3764/* Opcode 0xf2 0x0f 0x76 - invalid */
3765
3766
3767/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
3768FNIEMOP_STUB(iemOp_emms);
3769/* Opcode 0x66 0x0f 0x77 - invalid */
3770/* Opcode 0xf3 0x0f 0x77 - invalid */
3771/* Opcode 0xf2 0x0f 0x77 - invalid */
3772
3773/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3774FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3775/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3776FNIEMOP_STUB(iemOp_AmdGrp17);
3777/* Opcode 0xf3 0x0f 0x78 - invalid */
3778/* Opcode 0xf2 0x0f 0x78 - invalid */
3779
3780/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3781FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3782/* Opcode 0x66 0x0f 0x79 - invalid */
3783/* Opcode 0xf3 0x0f 0x79 - invalid */
3784/* Opcode 0xf2 0x0f 0x79 - invalid */
3785
3786/* Opcode 0x0f 0x7a - invalid */
3787/* Opcode 0x66 0x0f 0x7a - invalid */
3788/* Opcode 0xf3 0x0f 0x7a - invalid */
3789/* Opcode 0xf2 0x0f 0x7a - invalid */
3790
3791/* Opcode 0x0f 0x7b - invalid */
3792/* Opcode 0x66 0x0f 0x7b - invalid */
3793/* Opcode 0xf3 0x0f 0x7b - invalid */
3794/* Opcode 0xf2 0x0f 0x7b - invalid */
3795
3796/* Opcode 0x0f 0x7c - invalid */
3797/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
3798FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
3799/* Opcode 0xf3 0x0f 0x7c - invalid */
3800/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
3801FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
3802
3803/* Opcode 0x0f 0x7d - invalid */
3804/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
3805FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
3806/* Opcode 0xf3 0x0f 0x7d - invalid */
3807/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
3808FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
3809
3810
3811/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3812FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3813{
3814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3815 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3816 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3817 else
3818 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3819 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3820 {
3821 /* greg, MMX */
3822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3823 IEM_MC_BEGIN(0, 1);
3824 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3825 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3826 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3827 {
3828 IEM_MC_LOCAL(uint64_t, u64Tmp);
3829 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3830 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3831 }
3832 else
3833 {
3834 IEM_MC_LOCAL(uint32_t, u32Tmp);
3835 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3836 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3837 }
3838 IEM_MC_ADVANCE_RIP();
3839 IEM_MC_END();
3840 }
3841 else
3842 {
3843 /* [mem], MMX */
3844 IEM_MC_BEGIN(0, 2);
3845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3846 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3849 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3850 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3851 {
3852 IEM_MC_LOCAL(uint64_t, u64Tmp);
3853 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3854 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3855 }
3856 else
3857 {
3858 IEM_MC_LOCAL(uint32_t, u32Tmp);
3859 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3860 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3861 }
3862 IEM_MC_ADVANCE_RIP();
3863 IEM_MC_END();
3864 }
3865 return VINF_SUCCESS;
3866}
3867
3868/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
3869FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
3870{
3871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3872 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3873 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3874 else
3875 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3876 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3877 {
3878 /* greg, XMM */
3879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3880 IEM_MC_BEGIN(0, 1);
3881 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3882 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3883 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3884 {
3885 IEM_MC_LOCAL(uint64_t, u64Tmp);
3886 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3887 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3888 }
3889 else
3890 {
3891 IEM_MC_LOCAL(uint32_t, u32Tmp);
3892 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3893 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3894 }
3895 IEM_MC_ADVANCE_RIP();
3896 IEM_MC_END();
3897 }
3898 else
3899 {
3900 /* [mem], XMM */
3901 IEM_MC_BEGIN(0, 2);
3902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3903 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3906 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3907 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3908 {
3909 IEM_MC_LOCAL(uint64_t, u64Tmp);
3910 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3911 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3912 }
3913 else
3914 {
3915 IEM_MC_LOCAL(uint32_t, u32Tmp);
3916 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3917 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3918 }
3919 IEM_MC_ADVANCE_RIP();
3920 IEM_MC_END();
3921 }
3922 return VINF_SUCCESS;
3923}
3924
3925/** Opcode 0xf3 0x0f 0x7e - movq Vq, Wq */
3926FNIEMOP_STUB(iemOp_movq_Vq_Wq);
3927/* Opcode 0xf2 0x0f 0x7e - invalid */
3928
3929
3930/** Opcode 0x0f 0x7f - movq Qq, Pq */
3931FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3932{
3933 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3935 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3936 {
3937 /*
3938 * Register, register.
3939 */
3940 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3941 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3943 IEM_MC_BEGIN(0, 1);
3944 IEM_MC_LOCAL(uint64_t, u64Tmp);
3945 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3946 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3947 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3948 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3949 IEM_MC_ADVANCE_RIP();
3950 IEM_MC_END();
3951 }
3952 else
3953 {
3954 /*
3955 * Register, memory.
3956 */
3957 IEM_MC_BEGIN(0, 2);
3958 IEM_MC_LOCAL(uint64_t, u64Tmp);
3959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3960
3961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3963 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3964 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3965
3966 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3967 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3968
3969 IEM_MC_ADVANCE_RIP();
3970 IEM_MC_END();
3971 }
3972 return VINF_SUCCESS;
3973}
3974
3975/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
3976FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
3977{
3978 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
3979 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3980 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3981 {
3982 /*
3983 * Register, register.
3984 */
3985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3986 IEM_MC_BEGIN(0, 0);
3987 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3988 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3989 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3990 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3991 IEM_MC_ADVANCE_RIP();
3992 IEM_MC_END();
3993 }
3994 else
3995 {
3996 /*
3997 * Register, memory.
3998 */
3999 IEM_MC_BEGIN(0, 2);
4000 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4002
4003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4005 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4006 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4007
4008 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4009 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4010
4011 IEM_MC_ADVANCE_RIP();
4012 IEM_MC_END();
4013 }
4014 return VINF_SUCCESS;
4015}
4016
4017/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4018FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4019{
4020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4021 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4022 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4023 {
4024 /*
4025 * Register, register.
4026 */
4027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4028 IEM_MC_BEGIN(0, 0);
4029 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4030 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4031 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4032 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4033 IEM_MC_ADVANCE_RIP();
4034 IEM_MC_END();
4035 }
4036 else
4037 {
4038 /*
4039 * Register, memory.
4040 */
4041 IEM_MC_BEGIN(0, 2);
4042 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4044
4045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4047 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4048 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4049
4050 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4051 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4052
4053 IEM_MC_ADVANCE_RIP();
4054 IEM_MC_END();
4055 }
4056 return VINF_SUCCESS;
4057}
4058
4059/* Opcode 0xf2 0x0f 0x7f - invalid */
4060
4061
4062
4063/** Opcode 0x0f 0x80. */
4064FNIEMOP_DEF(iemOp_jo_Jv)
4065{
4066 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4067 IEMOP_HLP_MIN_386();
4068 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4069 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4070 {
4071 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4073
4074 IEM_MC_BEGIN(0, 0);
4075 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4076 IEM_MC_REL_JMP_S16(i16Imm);
4077 } IEM_MC_ELSE() {
4078 IEM_MC_ADVANCE_RIP();
4079 } IEM_MC_ENDIF();
4080 IEM_MC_END();
4081 }
4082 else
4083 {
4084 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4086
4087 IEM_MC_BEGIN(0, 0);
4088 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4089 IEM_MC_REL_JMP_S32(i32Imm);
4090 } IEM_MC_ELSE() {
4091 IEM_MC_ADVANCE_RIP();
4092 } IEM_MC_ENDIF();
4093 IEM_MC_END();
4094 }
4095 return VINF_SUCCESS;
4096}
4097
4098
4099/** Opcode 0x0f 0x81. */
4100FNIEMOP_DEF(iemOp_jno_Jv)
4101{
4102 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4103 IEMOP_HLP_MIN_386();
4104 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4105 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4106 {
4107 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4109
4110 IEM_MC_BEGIN(0, 0);
4111 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4112 IEM_MC_ADVANCE_RIP();
4113 } IEM_MC_ELSE() {
4114 IEM_MC_REL_JMP_S16(i16Imm);
4115 } IEM_MC_ENDIF();
4116 IEM_MC_END();
4117 }
4118 else
4119 {
4120 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4122
4123 IEM_MC_BEGIN(0, 0);
4124 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4125 IEM_MC_ADVANCE_RIP();
4126 } IEM_MC_ELSE() {
4127 IEM_MC_REL_JMP_S32(i32Imm);
4128 } IEM_MC_ENDIF();
4129 IEM_MC_END();
4130 }
4131 return VINF_SUCCESS;
4132}
4133
4134
4135/** Opcode 0x0f 0x82. */
4136FNIEMOP_DEF(iemOp_jc_Jv)
4137{
4138 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4139 IEMOP_HLP_MIN_386();
4140 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4141 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4142 {
4143 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4145
4146 IEM_MC_BEGIN(0, 0);
4147 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4148 IEM_MC_REL_JMP_S16(i16Imm);
4149 } IEM_MC_ELSE() {
4150 IEM_MC_ADVANCE_RIP();
4151 } IEM_MC_ENDIF();
4152 IEM_MC_END();
4153 }
4154 else
4155 {
4156 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4158
4159 IEM_MC_BEGIN(0, 0);
4160 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4161 IEM_MC_REL_JMP_S32(i32Imm);
4162 } IEM_MC_ELSE() {
4163 IEM_MC_ADVANCE_RIP();
4164 } IEM_MC_ENDIF();
4165 IEM_MC_END();
4166 }
4167 return VINF_SUCCESS;
4168}
4169
4170
4171/** Opcode 0x0f 0x83. */
4172FNIEMOP_DEF(iemOp_jnc_Jv)
4173{
4174 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4175 IEMOP_HLP_MIN_386();
4176 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4177 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4178 {
4179 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4181
4182 IEM_MC_BEGIN(0, 0);
4183 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4184 IEM_MC_ADVANCE_RIP();
4185 } IEM_MC_ELSE() {
4186 IEM_MC_REL_JMP_S16(i16Imm);
4187 } IEM_MC_ENDIF();
4188 IEM_MC_END();
4189 }
4190 else
4191 {
4192 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4194
4195 IEM_MC_BEGIN(0, 0);
4196 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4197 IEM_MC_ADVANCE_RIP();
4198 } IEM_MC_ELSE() {
4199 IEM_MC_REL_JMP_S32(i32Imm);
4200 } IEM_MC_ENDIF();
4201 IEM_MC_END();
4202 }
4203 return VINF_SUCCESS;
4204}
4205
4206
4207/** Opcode 0x0f 0x84. */
4208FNIEMOP_DEF(iemOp_je_Jv)
4209{
4210 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4211 IEMOP_HLP_MIN_386();
4212 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4213 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4214 {
4215 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4217
4218 IEM_MC_BEGIN(0, 0);
4219 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4220 IEM_MC_REL_JMP_S16(i16Imm);
4221 } IEM_MC_ELSE() {
4222 IEM_MC_ADVANCE_RIP();
4223 } IEM_MC_ENDIF();
4224 IEM_MC_END();
4225 }
4226 else
4227 {
4228 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4230
4231 IEM_MC_BEGIN(0, 0);
4232 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4233 IEM_MC_REL_JMP_S32(i32Imm);
4234 } IEM_MC_ELSE() {
4235 IEM_MC_ADVANCE_RIP();
4236 } IEM_MC_ENDIF();
4237 IEM_MC_END();
4238 }
4239 return VINF_SUCCESS;
4240}
4241
4242
4243/** Opcode 0x0f 0x85. */
4244FNIEMOP_DEF(iemOp_jne_Jv)
4245{
4246 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4247 IEMOP_HLP_MIN_386();
4248 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4249 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4250 {
4251 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4253
4254 IEM_MC_BEGIN(0, 0);
4255 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4256 IEM_MC_ADVANCE_RIP();
4257 } IEM_MC_ELSE() {
4258 IEM_MC_REL_JMP_S16(i16Imm);
4259 } IEM_MC_ENDIF();
4260 IEM_MC_END();
4261 }
4262 else
4263 {
4264 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4266
4267 IEM_MC_BEGIN(0, 0);
4268 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4269 IEM_MC_ADVANCE_RIP();
4270 } IEM_MC_ELSE() {
4271 IEM_MC_REL_JMP_S32(i32Imm);
4272 } IEM_MC_ENDIF();
4273 IEM_MC_END();
4274 }
4275 return VINF_SUCCESS;
4276}
4277
4278
4279/** Opcode 0x0f 0x86. */
4280FNIEMOP_DEF(iemOp_jbe_Jv)
4281{
4282 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4283 IEMOP_HLP_MIN_386();
4284 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4285 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4286 {
4287 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4289
4290 IEM_MC_BEGIN(0, 0);
4291 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4292 IEM_MC_REL_JMP_S16(i16Imm);
4293 } IEM_MC_ELSE() {
4294 IEM_MC_ADVANCE_RIP();
4295 } IEM_MC_ENDIF();
4296 IEM_MC_END();
4297 }
4298 else
4299 {
4300 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4302
4303 IEM_MC_BEGIN(0, 0);
4304 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4305 IEM_MC_REL_JMP_S32(i32Imm);
4306 } IEM_MC_ELSE() {
4307 IEM_MC_ADVANCE_RIP();
4308 } IEM_MC_ENDIF();
4309 IEM_MC_END();
4310 }
4311 return VINF_SUCCESS;
4312}
4313
4314
4315/** Opcode 0x0f 0x87. */
4316FNIEMOP_DEF(iemOp_jnbe_Jv)
4317{
4318 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4319 IEMOP_HLP_MIN_386();
4320 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4321 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4322 {
4323 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4325
4326 IEM_MC_BEGIN(0, 0);
4327 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4328 IEM_MC_ADVANCE_RIP();
4329 } IEM_MC_ELSE() {
4330 IEM_MC_REL_JMP_S16(i16Imm);
4331 } IEM_MC_ENDIF();
4332 IEM_MC_END();
4333 }
4334 else
4335 {
4336 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4338
4339 IEM_MC_BEGIN(0, 0);
4340 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4341 IEM_MC_ADVANCE_RIP();
4342 } IEM_MC_ELSE() {
4343 IEM_MC_REL_JMP_S32(i32Imm);
4344 } IEM_MC_ENDIF();
4345 IEM_MC_END();
4346 }
4347 return VINF_SUCCESS;
4348}
4349
4350
4351/** Opcode 0x0f 0x88. */
4352FNIEMOP_DEF(iemOp_js_Jv)
4353{
4354 IEMOP_MNEMONIC(js_Jv, "js Jv");
4355 IEMOP_HLP_MIN_386();
4356 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4357 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4358 {
4359 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4361
4362 IEM_MC_BEGIN(0, 0);
4363 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4364 IEM_MC_REL_JMP_S16(i16Imm);
4365 } IEM_MC_ELSE() {
4366 IEM_MC_ADVANCE_RIP();
4367 } IEM_MC_ENDIF();
4368 IEM_MC_END();
4369 }
4370 else
4371 {
4372 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4374
4375 IEM_MC_BEGIN(0, 0);
4376 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4377 IEM_MC_REL_JMP_S32(i32Imm);
4378 } IEM_MC_ELSE() {
4379 IEM_MC_ADVANCE_RIP();
4380 } IEM_MC_ENDIF();
4381 IEM_MC_END();
4382 }
4383 return VINF_SUCCESS;
4384}
4385
4386
4387/** Opcode 0x0f 0x89. */
4388FNIEMOP_DEF(iemOp_jns_Jv)
4389{
4390 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4391 IEMOP_HLP_MIN_386();
4392 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4393 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4394 {
4395 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4397
4398 IEM_MC_BEGIN(0, 0);
4399 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4400 IEM_MC_ADVANCE_RIP();
4401 } IEM_MC_ELSE() {
4402 IEM_MC_REL_JMP_S16(i16Imm);
4403 } IEM_MC_ENDIF();
4404 IEM_MC_END();
4405 }
4406 else
4407 {
4408 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4410
4411 IEM_MC_BEGIN(0, 0);
4412 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4413 IEM_MC_ADVANCE_RIP();
4414 } IEM_MC_ELSE() {
4415 IEM_MC_REL_JMP_S32(i32Imm);
4416 } IEM_MC_ENDIF();
4417 IEM_MC_END();
4418 }
4419 return VINF_SUCCESS;
4420}
4421
4422
4423/** Opcode 0x0f 0x8a. */
4424FNIEMOP_DEF(iemOp_jp_Jv)
4425{
4426 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4427 IEMOP_HLP_MIN_386();
4428 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4429 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4430 {
4431 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4433
4434 IEM_MC_BEGIN(0, 0);
4435 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4436 IEM_MC_REL_JMP_S16(i16Imm);
4437 } IEM_MC_ELSE() {
4438 IEM_MC_ADVANCE_RIP();
4439 } IEM_MC_ENDIF();
4440 IEM_MC_END();
4441 }
4442 else
4443 {
4444 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4446
4447 IEM_MC_BEGIN(0, 0);
4448 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4449 IEM_MC_REL_JMP_S32(i32Imm);
4450 } IEM_MC_ELSE() {
4451 IEM_MC_ADVANCE_RIP();
4452 } IEM_MC_ENDIF();
4453 IEM_MC_END();
4454 }
4455 return VINF_SUCCESS;
4456}
4457
4458
4459/** Opcode 0x0f 0x8b. */
4460FNIEMOP_DEF(iemOp_jnp_Jv)
4461{
4462 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4463 IEMOP_HLP_MIN_386();
4464 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4465 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4466 {
4467 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4469
4470 IEM_MC_BEGIN(0, 0);
4471 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4472 IEM_MC_ADVANCE_RIP();
4473 } IEM_MC_ELSE() {
4474 IEM_MC_REL_JMP_S16(i16Imm);
4475 } IEM_MC_ENDIF();
4476 IEM_MC_END();
4477 }
4478 else
4479 {
4480 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4482
4483 IEM_MC_BEGIN(0, 0);
4484 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4485 IEM_MC_ADVANCE_RIP();
4486 } IEM_MC_ELSE() {
4487 IEM_MC_REL_JMP_S32(i32Imm);
4488 } IEM_MC_ENDIF();
4489 IEM_MC_END();
4490 }
4491 return VINF_SUCCESS;
4492}
4493
4494
4495/** Opcode 0x0f 0x8c. */
4496FNIEMOP_DEF(iemOp_jl_Jv)
4497{
4498 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4499 IEMOP_HLP_MIN_386();
4500 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4501 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4502 {
4503 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4505
4506 IEM_MC_BEGIN(0, 0);
4507 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4508 IEM_MC_REL_JMP_S16(i16Imm);
4509 } IEM_MC_ELSE() {
4510 IEM_MC_ADVANCE_RIP();
4511 } IEM_MC_ENDIF();
4512 IEM_MC_END();
4513 }
4514 else
4515 {
4516 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4518
4519 IEM_MC_BEGIN(0, 0);
4520 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4521 IEM_MC_REL_JMP_S32(i32Imm);
4522 } IEM_MC_ELSE() {
4523 IEM_MC_ADVANCE_RIP();
4524 } IEM_MC_ENDIF();
4525 IEM_MC_END();
4526 }
4527 return VINF_SUCCESS;
4528}
4529
4530
4531/** Opcode 0x0f 0x8d. */
4532FNIEMOP_DEF(iemOp_jnl_Jv)
4533{
4534 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4535 IEMOP_HLP_MIN_386();
4536 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4537 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4538 {
4539 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4541
4542 IEM_MC_BEGIN(0, 0);
4543 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4544 IEM_MC_ADVANCE_RIP();
4545 } IEM_MC_ELSE() {
4546 IEM_MC_REL_JMP_S16(i16Imm);
4547 } IEM_MC_ENDIF();
4548 IEM_MC_END();
4549 }
4550 else
4551 {
4552 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4554
4555 IEM_MC_BEGIN(0, 0);
4556 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4557 IEM_MC_ADVANCE_RIP();
4558 } IEM_MC_ELSE() {
4559 IEM_MC_REL_JMP_S32(i32Imm);
4560 } IEM_MC_ENDIF();
4561 IEM_MC_END();
4562 }
4563 return VINF_SUCCESS;
4564}
4565
4566
4567/** Opcode 0x0f 0x8e. */
4568FNIEMOP_DEF(iemOp_jle_Jv)
4569{
4570 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4571 IEMOP_HLP_MIN_386();
4572 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4573 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4574 {
4575 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4577
4578 IEM_MC_BEGIN(0, 0);
4579 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4580 IEM_MC_REL_JMP_S16(i16Imm);
4581 } IEM_MC_ELSE() {
4582 IEM_MC_ADVANCE_RIP();
4583 } IEM_MC_ENDIF();
4584 IEM_MC_END();
4585 }
4586 else
4587 {
4588 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4590
4591 IEM_MC_BEGIN(0, 0);
4592 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4593 IEM_MC_REL_JMP_S32(i32Imm);
4594 } IEM_MC_ELSE() {
4595 IEM_MC_ADVANCE_RIP();
4596 } IEM_MC_ENDIF();
4597 IEM_MC_END();
4598 }
4599 return VINF_SUCCESS;
4600}
4601
4602
4603/** Opcode 0x0f 0x8f. */
4604FNIEMOP_DEF(iemOp_jnle_Jv)
4605{
4606 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4607 IEMOP_HLP_MIN_386();
4608 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4609 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4610 {
4611 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4613
4614 IEM_MC_BEGIN(0, 0);
4615 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4616 IEM_MC_ADVANCE_RIP();
4617 } IEM_MC_ELSE() {
4618 IEM_MC_REL_JMP_S16(i16Imm);
4619 } IEM_MC_ENDIF();
4620 IEM_MC_END();
4621 }
4622 else
4623 {
4624 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4626
4627 IEM_MC_BEGIN(0, 0);
4628 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4629 IEM_MC_ADVANCE_RIP();
4630 } IEM_MC_ELSE() {
4631 IEM_MC_REL_JMP_S32(i32Imm);
4632 } IEM_MC_ENDIF();
4633 IEM_MC_END();
4634 }
4635 return VINF_SUCCESS;
4636}
4637
4638
4639/** Opcode 0x0f 0x90. */
4640FNIEMOP_DEF(iemOp_seto_Eb)
4641{
4642 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4643 IEMOP_HLP_MIN_386();
4644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4645
4646 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4647 * any way. AMD says it's "unused", whatever that means. We're
4648 * ignoring for now. */
4649 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4650 {
4651 /* register target */
4652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4653 IEM_MC_BEGIN(0, 0);
4654 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4655 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4656 } IEM_MC_ELSE() {
4657 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4658 } IEM_MC_ENDIF();
4659 IEM_MC_ADVANCE_RIP();
4660 IEM_MC_END();
4661 }
4662 else
4663 {
4664 /* memory target */
4665 IEM_MC_BEGIN(0, 1);
4666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4669 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4670 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4671 } IEM_MC_ELSE() {
4672 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4673 } IEM_MC_ENDIF();
4674 IEM_MC_ADVANCE_RIP();
4675 IEM_MC_END();
4676 }
4677 return VINF_SUCCESS;
4678}
4679
4680
4681/** Opcode 0x0f 0x91. */
4682FNIEMOP_DEF(iemOp_setno_Eb)
4683{
4684 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4685 IEMOP_HLP_MIN_386();
4686 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4687
4688 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4689 * any way. AMD says it's "unused", whatever that means. We're
4690 * ignoring for now. */
4691 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4692 {
4693 /* register target */
4694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4695 IEM_MC_BEGIN(0, 0);
4696 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4697 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4698 } IEM_MC_ELSE() {
4699 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4700 } IEM_MC_ENDIF();
4701 IEM_MC_ADVANCE_RIP();
4702 IEM_MC_END();
4703 }
4704 else
4705 {
4706 /* memory target */
4707 IEM_MC_BEGIN(0, 1);
4708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4711 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4712 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4713 } IEM_MC_ELSE() {
4714 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4715 } IEM_MC_ENDIF();
4716 IEM_MC_ADVANCE_RIP();
4717 IEM_MC_END();
4718 }
4719 return VINF_SUCCESS;
4720}
4721
4722
4723/** Opcode 0x0f 0x92. */
4724FNIEMOP_DEF(iemOp_setc_Eb)
4725{
4726 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4727 IEMOP_HLP_MIN_386();
4728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4729
4730 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4731 * any way. AMD says it's "unused", whatever that means. We're
4732 * ignoring for now. */
4733 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4734 {
4735 /* register target */
4736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4737 IEM_MC_BEGIN(0, 0);
4738 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4739 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4740 } IEM_MC_ELSE() {
4741 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4742 } IEM_MC_ENDIF();
4743 IEM_MC_ADVANCE_RIP();
4744 IEM_MC_END();
4745 }
4746 else
4747 {
4748 /* memory target */
4749 IEM_MC_BEGIN(0, 1);
4750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4753 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4754 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4755 } IEM_MC_ELSE() {
4756 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4757 } IEM_MC_ENDIF();
4758 IEM_MC_ADVANCE_RIP();
4759 IEM_MC_END();
4760 }
4761 return VINF_SUCCESS;
4762}
4763
4764
4765/** Opcode 0x0f 0x93. */
4766FNIEMOP_DEF(iemOp_setnc_Eb)
4767{
4768 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4769 IEMOP_HLP_MIN_386();
4770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4771
4772 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4773 * any way. AMD says it's "unused", whatever that means. We're
4774 * ignoring for now. */
4775 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4776 {
4777 /* register target */
4778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4779 IEM_MC_BEGIN(0, 0);
4780 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4781 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4782 } IEM_MC_ELSE() {
4783 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4784 } IEM_MC_ENDIF();
4785 IEM_MC_ADVANCE_RIP();
4786 IEM_MC_END();
4787 }
4788 else
4789 {
4790 /* memory target */
4791 IEM_MC_BEGIN(0, 1);
4792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4795 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4796 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4797 } IEM_MC_ELSE() {
4798 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4799 } IEM_MC_ENDIF();
4800 IEM_MC_ADVANCE_RIP();
4801 IEM_MC_END();
4802 }
4803 return VINF_SUCCESS;
4804}
4805
4806
4807/** Opcode 0x0f 0x94. */
4808FNIEMOP_DEF(iemOp_sete_Eb)
4809{
4810 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4811 IEMOP_HLP_MIN_386();
4812 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4813
4814 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4815 * any way. AMD says it's "unused", whatever that means. We're
4816 * ignoring for now. */
4817 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4818 {
4819 /* register target */
4820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4821 IEM_MC_BEGIN(0, 0);
4822 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4823 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4824 } IEM_MC_ELSE() {
4825 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4826 } IEM_MC_ENDIF();
4827 IEM_MC_ADVANCE_RIP();
4828 IEM_MC_END();
4829 }
4830 else
4831 {
4832 /* memory target */
4833 IEM_MC_BEGIN(0, 1);
4834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4837 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4838 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4839 } IEM_MC_ELSE() {
4840 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4841 } IEM_MC_ENDIF();
4842 IEM_MC_ADVANCE_RIP();
4843 IEM_MC_END();
4844 }
4845 return VINF_SUCCESS;
4846}
4847
4848
4849/** Opcode 0x0f 0x95. */
4850FNIEMOP_DEF(iemOp_setne_Eb)
4851{
4852 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4853 IEMOP_HLP_MIN_386();
4854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4855
4856 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4857 * any way. AMD says it's "unused", whatever that means. We're
4858 * ignoring for now. */
4859 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4860 {
4861 /* register target */
4862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4863 IEM_MC_BEGIN(0, 0);
4864 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4865 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4866 } IEM_MC_ELSE() {
4867 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4868 } IEM_MC_ENDIF();
4869 IEM_MC_ADVANCE_RIP();
4870 IEM_MC_END();
4871 }
4872 else
4873 {
4874 /* memory target */
4875 IEM_MC_BEGIN(0, 1);
4876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4879 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4880 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4881 } IEM_MC_ELSE() {
4882 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4883 } IEM_MC_ENDIF();
4884 IEM_MC_ADVANCE_RIP();
4885 IEM_MC_END();
4886 }
4887 return VINF_SUCCESS;
4888}
4889
4890
4891/** Opcode 0x0f 0x96. */
4892FNIEMOP_DEF(iemOp_setbe_Eb)
4893{
4894 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4895 IEMOP_HLP_MIN_386();
4896 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4897
4898 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4899 * any way. AMD says it's "unused", whatever that means. We're
4900 * ignoring for now. */
4901 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4902 {
4903 /* register target */
4904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4905 IEM_MC_BEGIN(0, 0);
4906 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4907 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4908 } IEM_MC_ELSE() {
4909 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4910 } IEM_MC_ENDIF();
4911 IEM_MC_ADVANCE_RIP();
4912 IEM_MC_END();
4913 }
4914 else
4915 {
4916 /* memory target */
4917 IEM_MC_BEGIN(0, 1);
4918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4921 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4922 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4923 } IEM_MC_ELSE() {
4924 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4925 } IEM_MC_ENDIF();
4926 IEM_MC_ADVANCE_RIP();
4927 IEM_MC_END();
4928 }
4929 return VINF_SUCCESS;
4930}
4931
4932
4933/** Opcode 0x0f 0x97. */
4934FNIEMOP_DEF(iemOp_setnbe_Eb)
4935{
4936 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4937 IEMOP_HLP_MIN_386();
4938 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4939
4940 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4941 * any way. AMD says it's "unused", whatever that means. We're
4942 * ignoring for now. */
4943 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4944 {
4945 /* register target */
4946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4947 IEM_MC_BEGIN(0, 0);
4948 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4949 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4950 } IEM_MC_ELSE() {
4951 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4952 } IEM_MC_ENDIF();
4953 IEM_MC_ADVANCE_RIP();
4954 IEM_MC_END();
4955 }
4956 else
4957 {
4958 /* memory target */
4959 IEM_MC_BEGIN(0, 1);
4960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4963 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4964 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4965 } IEM_MC_ELSE() {
4966 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4967 } IEM_MC_ENDIF();
4968 IEM_MC_ADVANCE_RIP();
4969 IEM_MC_END();
4970 }
4971 return VINF_SUCCESS;
4972}
4973
4974
4975/** Opcode 0x0f 0x98. */
4976FNIEMOP_DEF(iemOp_sets_Eb)
4977{
4978 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4979 IEMOP_HLP_MIN_386();
4980 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4981
4982 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4983 * any way. AMD says it's "unused", whatever that means. We're
4984 * ignoring for now. */
4985 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4986 {
4987 /* register target */
4988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4989 IEM_MC_BEGIN(0, 0);
4990 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4991 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4992 } IEM_MC_ELSE() {
4993 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4994 } IEM_MC_ENDIF();
4995 IEM_MC_ADVANCE_RIP();
4996 IEM_MC_END();
4997 }
4998 else
4999 {
5000 /* memory target */
5001 IEM_MC_BEGIN(0, 1);
5002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5005 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5006 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5007 } IEM_MC_ELSE() {
5008 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5009 } IEM_MC_ENDIF();
5010 IEM_MC_ADVANCE_RIP();
5011 IEM_MC_END();
5012 }
5013 return VINF_SUCCESS;
5014}
5015
5016
5017/** Opcode 0x0f 0x99. */
5018FNIEMOP_DEF(iemOp_setns_Eb)
5019{
5020 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5021 IEMOP_HLP_MIN_386();
5022 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5023
5024 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5025 * any way. AMD says it's "unused", whatever that means. We're
5026 * ignoring for now. */
5027 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5028 {
5029 /* register target */
5030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5031 IEM_MC_BEGIN(0, 0);
5032 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5033 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5034 } IEM_MC_ELSE() {
5035 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5036 } IEM_MC_ENDIF();
5037 IEM_MC_ADVANCE_RIP();
5038 IEM_MC_END();
5039 }
5040 else
5041 {
5042 /* memory target */
5043 IEM_MC_BEGIN(0, 1);
5044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5047 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5048 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5049 } IEM_MC_ELSE() {
5050 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5051 } IEM_MC_ENDIF();
5052 IEM_MC_ADVANCE_RIP();
5053 IEM_MC_END();
5054 }
5055 return VINF_SUCCESS;
5056}
5057
5058
5059/** Opcode 0x0f 0x9a. */
5060FNIEMOP_DEF(iemOp_setp_Eb)
5061{
5062 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5063 IEMOP_HLP_MIN_386();
5064 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5065
5066 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5067 * any way. AMD says it's "unused", whatever that means. We're
5068 * ignoring for now. */
5069 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5070 {
5071 /* register target */
5072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5073 IEM_MC_BEGIN(0, 0);
5074 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5075 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5076 } IEM_MC_ELSE() {
5077 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5078 } IEM_MC_ENDIF();
5079 IEM_MC_ADVANCE_RIP();
5080 IEM_MC_END();
5081 }
5082 else
5083 {
5084 /* memory target */
5085 IEM_MC_BEGIN(0, 1);
5086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5089 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5090 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5091 } IEM_MC_ELSE() {
5092 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5093 } IEM_MC_ENDIF();
5094 IEM_MC_ADVANCE_RIP();
5095 IEM_MC_END();
5096 }
5097 return VINF_SUCCESS;
5098}
5099
5100
5101/** Opcode 0x0f 0x9b. */
5102FNIEMOP_DEF(iemOp_setnp_Eb)
5103{
5104 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5105 IEMOP_HLP_MIN_386();
5106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5107
5108 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5109 * any way. AMD says it's "unused", whatever that means. We're
5110 * ignoring for now. */
5111 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5112 {
5113 /* register target */
5114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5115 IEM_MC_BEGIN(0, 0);
5116 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5117 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5118 } IEM_MC_ELSE() {
5119 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5120 } IEM_MC_ENDIF();
5121 IEM_MC_ADVANCE_RIP();
5122 IEM_MC_END();
5123 }
5124 else
5125 {
5126 /* memory target */
5127 IEM_MC_BEGIN(0, 1);
5128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5131 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5132 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5133 } IEM_MC_ELSE() {
5134 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5135 } IEM_MC_ENDIF();
5136 IEM_MC_ADVANCE_RIP();
5137 IEM_MC_END();
5138 }
5139 return VINF_SUCCESS;
5140}
5141
5142
5143/** Opcode 0x0f 0x9c. */
5144FNIEMOP_DEF(iemOp_setl_Eb)
5145{
5146 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5147 IEMOP_HLP_MIN_386();
5148 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5149
5150 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5151 * any way. AMD says it's "unused", whatever that means. We're
5152 * ignoring for now. */
5153 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5154 {
5155 /* register target */
5156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5157 IEM_MC_BEGIN(0, 0);
5158 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5159 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5160 } IEM_MC_ELSE() {
5161 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5162 } IEM_MC_ENDIF();
5163 IEM_MC_ADVANCE_RIP();
5164 IEM_MC_END();
5165 }
5166 else
5167 {
5168 /* memory target */
5169 IEM_MC_BEGIN(0, 1);
5170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5173 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5174 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5175 } IEM_MC_ELSE() {
5176 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5177 } IEM_MC_ENDIF();
5178 IEM_MC_ADVANCE_RIP();
5179 IEM_MC_END();
5180 }
5181 return VINF_SUCCESS;
5182}
5183
5184
5185/** Opcode 0x0f 0x9d. */
5186FNIEMOP_DEF(iemOp_setnl_Eb)
5187{
5188 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5189 IEMOP_HLP_MIN_386();
5190 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5191
5192 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5193 * any way. AMD says it's "unused", whatever that means. We're
5194 * ignoring for now. */
5195 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5196 {
5197 /* register target */
5198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5199 IEM_MC_BEGIN(0, 0);
5200 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5201 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5202 } IEM_MC_ELSE() {
5203 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5204 } IEM_MC_ENDIF();
5205 IEM_MC_ADVANCE_RIP();
5206 IEM_MC_END();
5207 }
5208 else
5209 {
5210 /* memory target */
5211 IEM_MC_BEGIN(0, 1);
5212 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5215 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5216 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5217 } IEM_MC_ELSE() {
5218 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5219 } IEM_MC_ENDIF();
5220 IEM_MC_ADVANCE_RIP();
5221 IEM_MC_END();
5222 }
5223 return VINF_SUCCESS;
5224}
5225
5226
5227/** Opcode 0x0f 0x9e. */
5228FNIEMOP_DEF(iemOp_setle_Eb)
5229{
5230 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5231 IEMOP_HLP_MIN_386();
5232 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5233
5234 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5235 * any way. AMD says it's "unused", whatever that means. We're
5236 * ignoring for now. */
5237 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5238 {
5239 /* register target */
5240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5241 IEM_MC_BEGIN(0, 0);
5242 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5243 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5244 } IEM_MC_ELSE() {
5245 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5246 } IEM_MC_ENDIF();
5247 IEM_MC_ADVANCE_RIP();
5248 IEM_MC_END();
5249 }
5250 else
5251 {
5252 /* memory target */
5253 IEM_MC_BEGIN(0, 1);
5254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5257 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5258 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5259 } IEM_MC_ELSE() {
5260 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5261 } IEM_MC_ENDIF();
5262 IEM_MC_ADVANCE_RIP();
5263 IEM_MC_END();
5264 }
5265 return VINF_SUCCESS;
5266}
5267
5268
5269/** Opcode 0x0f 0x9f. */
5270FNIEMOP_DEF(iemOp_setnle_Eb)
5271{
5272 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5273 IEMOP_HLP_MIN_386();
5274 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5275
5276 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5277 * any way. AMD says it's "unused", whatever that means. We're
5278 * ignoring for now. */
5279 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5280 {
5281 /* register target */
5282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5283 IEM_MC_BEGIN(0, 0);
5284 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5285 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5286 } IEM_MC_ELSE() {
5287 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5288 } IEM_MC_ENDIF();
5289 IEM_MC_ADVANCE_RIP();
5290 IEM_MC_END();
5291 }
5292 else
5293 {
5294 /* memory target */
5295 IEM_MC_BEGIN(0, 1);
5296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5299 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5300 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5301 } IEM_MC_ELSE() {
5302 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5303 } IEM_MC_ENDIF();
5304 IEM_MC_ADVANCE_RIP();
5305 IEM_MC_END();
5306 }
5307 return VINF_SUCCESS;
5308}
5309
5310
5311/**
5312 * Common 'push segment-register' helper.
5313 */
5314FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5315{
5316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5317 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5318 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5319
5320 switch (pVCpu->iem.s.enmEffOpSize)
5321 {
5322 case IEMMODE_16BIT:
5323 IEM_MC_BEGIN(0, 1);
5324 IEM_MC_LOCAL(uint16_t, u16Value);
5325 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5326 IEM_MC_PUSH_U16(u16Value);
5327 IEM_MC_ADVANCE_RIP();
5328 IEM_MC_END();
5329 break;
5330
5331 case IEMMODE_32BIT:
5332 IEM_MC_BEGIN(0, 1);
5333 IEM_MC_LOCAL(uint32_t, u32Value);
5334 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5335 IEM_MC_PUSH_U32_SREG(u32Value);
5336 IEM_MC_ADVANCE_RIP();
5337 IEM_MC_END();
5338 break;
5339
5340 case IEMMODE_64BIT:
5341 IEM_MC_BEGIN(0, 1);
5342 IEM_MC_LOCAL(uint64_t, u64Value);
5343 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5344 IEM_MC_PUSH_U64(u64Value);
5345 IEM_MC_ADVANCE_RIP();
5346 IEM_MC_END();
5347 break;
5348 }
5349
5350 return VINF_SUCCESS;
5351}
5352
5353
5354/** Opcode 0x0f 0xa0. */
5355FNIEMOP_DEF(iemOp_push_fs)
5356{
5357 IEMOP_MNEMONIC(push_fs, "push fs");
5358 IEMOP_HLP_MIN_386();
5359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5360 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5361}
5362
5363
5364/** Opcode 0x0f 0xa1. */
5365FNIEMOP_DEF(iemOp_pop_fs)
5366{
5367 IEMOP_MNEMONIC(pop_fs, "pop fs");
5368 IEMOP_HLP_MIN_386();
5369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5370 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5371}
5372
5373
5374/** Opcode 0x0f 0xa2. */
5375FNIEMOP_DEF(iemOp_cpuid)
5376{
5377 IEMOP_MNEMONIC(cpuid, "cpuid");
5378 IEMOP_HLP_MIN_486(); /* not all 486es. */
5379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5380 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5381}
5382
5383
5384/**
5385 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5386 * iemOp_bts_Ev_Gv.
5387 */
5388FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5389{
5390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5391 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5392
5393 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5394 {
5395 /* register destination. */
5396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5397 switch (pVCpu->iem.s.enmEffOpSize)
5398 {
5399 case IEMMODE_16BIT:
5400 IEM_MC_BEGIN(3, 0);
5401 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5402 IEM_MC_ARG(uint16_t, u16Src, 1);
5403 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5404
5405 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5406 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5407 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5408 IEM_MC_REF_EFLAGS(pEFlags);
5409 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5410
5411 IEM_MC_ADVANCE_RIP();
5412 IEM_MC_END();
5413 return VINF_SUCCESS;
5414
5415 case IEMMODE_32BIT:
5416 IEM_MC_BEGIN(3, 0);
5417 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5418 IEM_MC_ARG(uint32_t, u32Src, 1);
5419 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5420
5421 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5422 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5423 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5424 IEM_MC_REF_EFLAGS(pEFlags);
5425 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5426
5427 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5428 IEM_MC_ADVANCE_RIP();
5429 IEM_MC_END();
5430 return VINF_SUCCESS;
5431
5432 case IEMMODE_64BIT:
5433 IEM_MC_BEGIN(3, 0);
5434 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5435 IEM_MC_ARG(uint64_t, u64Src, 1);
5436 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5437
5438 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5439 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5440 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5441 IEM_MC_REF_EFLAGS(pEFlags);
5442 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5443
5444 IEM_MC_ADVANCE_RIP();
5445 IEM_MC_END();
5446 return VINF_SUCCESS;
5447
5448 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5449 }
5450 }
5451 else
5452 {
5453 /* memory destination. */
5454
5455 uint32_t fAccess;
5456 if (pImpl->pfnLockedU16)
5457 fAccess = IEM_ACCESS_DATA_RW;
5458 else /* BT */
5459 fAccess = IEM_ACCESS_DATA_R;
5460
5461 /** @todo test negative bit offsets! */
5462 switch (pVCpu->iem.s.enmEffOpSize)
5463 {
5464 case IEMMODE_16BIT:
5465 IEM_MC_BEGIN(3, 2);
5466 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5467 IEM_MC_ARG(uint16_t, u16Src, 1);
5468 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5470 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5471
5472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5473 if (pImpl->pfnLockedU16)
5474 IEMOP_HLP_DONE_DECODING();
5475 else
5476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5477 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5478 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5479 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5480 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5481 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5482 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5483 IEM_MC_FETCH_EFLAGS(EFlags);
5484
5485 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5486 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5487 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5488 else
5489 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5490 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5491
5492 IEM_MC_COMMIT_EFLAGS(EFlags);
5493 IEM_MC_ADVANCE_RIP();
5494 IEM_MC_END();
5495 return VINF_SUCCESS;
5496
5497 case IEMMODE_32BIT:
5498 IEM_MC_BEGIN(3, 2);
5499 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5500 IEM_MC_ARG(uint32_t, u32Src, 1);
5501 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5503 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5504
5505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5506 if (pImpl->pfnLockedU16)
5507 IEMOP_HLP_DONE_DECODING();
5508 else
5509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5510 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5511 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5512 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5513 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5514 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5515 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5516 IEM_MC_FETCH_EFLAGS(EFlags);
5517
5518 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5519 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5520 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5521 else
5522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5523 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5524
5525 IEM_MC_COMMIT_EFLAGS(EFlags);
5526 IEM_MC_ADVANCE_RIP();
5527 IEM_MC_END();
5528 return VINF_SUCCESS;
5529
5530 case IEMMODE_64BIT:
5531 IEM_MC_BEGIN(3, 2);
5532 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5533 IEM_MC_ARG(uint64_t, u64Src, 1);
5534 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5536 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5537
5538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5539 if (pImpl->pfnLockedU16)
5540 IEMOP_HLP_DONE_DECODING();
5541 else
5542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5543 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5544 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5545 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5546 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5547 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5548 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5549 IEM_MC_FETCH_EFLAGS(EFlags);
5550
5551 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5552 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5553 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5554 else
5555 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5556 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5557
5558 IEM_MC_COMMIT_EFLAGS(EFlags);
5559 IEM_MC_ADVANCE_RIP();
5560 IEM_MC_END();
5561 return VINF_SUCCESS;
5562
5563 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5564 }
5565 }
5566}
5567
5568
5569/** Opcode 0x0f 0xa3. */
5570FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5571{
5572 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5573 IEMOP_HLP_MIN_386();
5574 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5575}
5576
5577
5578/**
5579 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5580 */
5581FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5582{
5583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5584 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5585
5586 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5587 {
5588 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5590
5591 switch (pVCpu->iem.s.enmEffOpSize)
5592 {
5593 case IEMMODE_16BIT:
5594 IEM_MC_BEGIN(4, 0);
5595 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5596 IEM_MC_ARG(uint16_t, u16Src, 1);
5597 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5598 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5599
5600 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5601 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5602 IEM_MC_REF_EFLAGS(pEFlags);
5603 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5604
5605 IEM_MC_ADVANCE_RIP();
5606 IEM_MC_END();
5607 return VINF_SUCCESS;
5608
5609 case IEMMODE_32BIT:
5610 IEM_MC_BEGIN(4, 0);
5611 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5612 IEM_MC_ARG(uint32_t, u32Src, 1);
5613 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5614 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5615
5616 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5617 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5618 IEM_MC_REF_EFLAGS(pEFlags);
5619 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5620
5621 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5622 IEM_MC_ADVANCE_RIP();
5623 IEM_MC_END();
5624 return VINF_SUCCESS;
5625
5626 case IEMMODE_64BIT:
5627 IEM_MC_BEGIN(4, 0);
5628 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5629 IEM_MC_ARG(uint64_t, u64Src, 1);
5630 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5631 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5632
5633 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5634 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5635 IEM_MC_REF_EFLAGS(pEFlags);
5636 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5637
5638 IEM_MC_ADVANCE_RIP();
5639 IEM_MC_END();
5640 return VINF_SUCCESS;
5641
5642 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5643 }
5644 }
5645 else
5646 {
5647 switch (pVCpu->iem.s.enmEffOpSize)
5648 {
5649 case IEMMODE_16BIT:
5650 IEM_MC_BEGIN(4, 2);
5651 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5652 IEM_MC_ARG(uint16_t, u16Src, 1);
5653 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5654 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5656
5657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5658 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5659 IEM_MC_ASSIGN(cShiftArg, cShift);
5660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5661 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5662 IEM_MC_FETCH_EFLAGS(EFlags);
5663 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5664 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5665
5666 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5667 IEM_MC_COMMIT_EFLAGS(EFlags);
5668 IEM_MC_ADVANCE_RIP();
5669 IEM_MC_END();
5670 return VINF_SUCCESS;
5671
5672 case IEMMODE_32BIT:
5673 IEM_MC_BEGIN(4, 2);
5674 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5675 IEM_MC_ARG(uint32_t, u32Src, 1);
5676 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5677 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5679
5680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5681 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5682 IEM_MC_ASSIGN(cShiftArg, cShift);
5683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5684 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5685 IEM_MC_FETCH_EFLAGS(EFlags);
5686 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5687 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5688
5689 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5690 IEM_MC_COMMIT_EFLAGS(EFlags);
5691 IEM_MC_ADVANCE_RIP();
5692 IEM_MC_END();
5693 return VINF_SUCCESS;
5694
5695 case IEMMODE_64BIT:
5696 IEM_MC_BEGIN(4, 2);
5697 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5698 IEM_MC_ARG(uint64_t, u64Src, 1);
5699 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5700 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5702
5703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5704 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5705 IEM_MC_ASSIGN(cShiftArg, cShift);
5706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5707 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5708 IEM_MC_FETCH_EFLAGS(EFlags);
5709 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5710 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5711
5712 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5713 IEM_MC_COMMIT_EFLAGS(EFlags);
5714 IEM_MC_ADVANCE_RIP();
5715 IEM_MC_END();
5716 return VINF_SUCCESS;
5717
5718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5719 }
5720 }
5721}
5722
5723
5724/**
5725 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5726 */
5727FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5728{
5729 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5730 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5731
5732 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5733 {
5734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5735
5736 switch (pVCpu->iem.s.enmEffOpSize)
5737 {
5738 case IEMMODE_16BIT:
5739 IEM_MC_BEGIN(4, 0);
5740 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5741 IEM_MC_ARG(uint16_t, u16Src, 1);
5742 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5743 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5744
5745 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5746 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5747 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5748 IEM_MC_REF_EFLAGS(pEFlags);
5749 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5750
5751 IEM_MC_ADVANCE_RIP();
5752 IEM_MC_END();
5753 return VINF_SUCCESS;
5754
5755 case IEMMODE_32BIT:
5756 IEM_MC_BEGIN(4, 0);
5757 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5758 IEM_MC_ARG(uint32_t, u32Src, 1);
5759 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5760 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5761
5762 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5763 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5764 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5765 IEM_MC_REF_EFLAGS(pEFlags);
5766 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5767
5768 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5769 IEM_MC_ADVANCE_RIP();
5770 IEM_MC_END();
5771 return VINF_SUCCESS;
5772
5773 case IEMMODE_64BIT:
5774 IEM_MC_BEGIN(4, 0);
5775 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5776 IEM_MC_ARG(uint64_t, u64Src, 1);
5777 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5778 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5779
5780 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5781 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5782 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5783 IEM_MC_REF_EFLAGS(pEFlags);
5784 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5785
5786 IEM_MC_ADVANCE_RIP();
5787 IEM_MC_END();
5788 return VINF_SUCCESS;
5789
5790 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5791 }
5792 }
5793 else
5794 {
5795 switch (pVCpu->iem.s.enmEffOpSize)
5796 {
5797 case IEMMODE_16BIT:
5798 IEM_MC_BEGIN(4, 2);
5799 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5800 IEM_MC_ARG(uint16_t, u16Src, 1);
5801 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5802 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5804
5805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5807 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5808 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5809 IEM_MC_FETCH_EFLAGS(EFlags);
5810 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5811 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5812
5813 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5814 IEM_MC_COMMIT_EFLAGS(EFlags);
5815 IEM_MC_ADVANCE_RIP();
5816 IEM_MC_END();
5817 return VINF_SUCCESS;
5818
5819 case IEMMODE_32BIT:
5820 IEM_MC_BEGIN(4, 2);
5821 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5822 IEM_MC_ARG(uint32_t, u32Src, 1);
5823 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5824 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5826
5827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5829 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5830 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5831 IEM_MC_FETCH_EFLAGS(EFlags);
5832 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5833 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5834
5835 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5836 IEM_MC_COMMIT_EFLAGS(EFlags);
5837 IEM_MC_ADVANCE_RIP();
5838 IEM_MC_END();
5839 return VINF_SUCCESS;
5840
5841 case IEMMODE_64BIT:
5842 IEM_MC_BEGIN(4, 2);
5843 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5844 IEM_MC_ARG(uint64_t, u64Src, 1);
5845 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5846 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5848
5849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5851 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5852 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5853 IEM_MC_FETCH_EFLAGS(EFlags);
5854 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5855 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5856
5857 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5858 IEM_MC_COMMIT_EFLAGS(EFlags);
5859 IEM_MC_ADVANCE_RIP();
5860 IEM_MC_END();
5861 return VINF_SUCCESS;
5862
5863 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5864 }
5865 }
5866}
5867
5868
5869
5870/** Opcode 0x0f 0xa4. */
5871FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5872{
5873 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5874 IEMOP_HLP_MIN_386();
5875 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5876}
5877
5878
5879/** Opcode 0x0f 0xa5. */
5880FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5881{
5882 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5883 IEMOP_HLP_MIN_386();
5884 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5885}
5886
5887
5888/** Opcode 0x0f 0xa8. */
5889FNIEMOP_DEF(iemOp_push_gs)
5890{
5891 IEMOP_MNEMONIC(push_gs, "push gs");
5892 IEMOP_HLP_MIN_386();
5893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5894 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5895}
5896
5897
5898/** Opcode 0x0f 0xa9. */
5899FNIEMOP_DEF(iemOp_pop_gs)
5900{
5901 IEMOP_MNEMONIC(pop_gs, "pop gs");
5902 IEMOP_HLP_MIN_386();
5903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5904 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5905}
5906
5907
5908/** Opcode 0x0f 0xaa. */
5909FNIEMOP_DEF(iemOp_rsm)
5910{
5911 IEMOP_MNEMONIC(rsm, "rsm");
5912 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
5913 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
5914 * intercept). */
5915 IEMOP_BITCH_ABOUT_STUB();
5916 return IEMOP_RAISE_INVALID_OPCODE();
5917}
5918
5919//IEMOP_HLP_MIN_386();
5920
5921
5922/** Opcode 0x0f 0xab. */
5923FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5924{
5925 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5926 IEMOP_HLP_MIN_386();
5927 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5928}
5929
5930
5931/** Opcode 0x0f 0xac. */
5932FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5933{
5934 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5935 IEMOP_HLP_MIN_386();
5936 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5937}
5938
5939
5940/** Opcode 0x0f 0xad. */
5941FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5942{
5943 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5944 IEMOP_HLP_MIN_386();
5945 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5946}
5947
5948
5949/** Opcode 0x0f 0xae mem/0. */
5950FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5951{
5952 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5953 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5954 return IEMOP_RAISE_INVALID_OPCODE();
5955
5956 IEM_MC_BEGIN(3, 1);
5957 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5958 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5959 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5962 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
5963 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5964 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5965 IEM_MC_END();
5966 return VINF_SUCCESS;
5967}
5968
5969
5970/** Opcode 0x0f 0xae mem/1. */
5971FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5972{
5973 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5974 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5975 return IEMOP_RAISE_INVALID_OPCODE();
5976
5977 IEM_MC_BEGIN(3, 1);
5978 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5979 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5980 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5983 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5984 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5985 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5986 IEM_MC_END();
5987 return VINF_SUCCESS;
5988}
5989
5990
5991/**
5992 * @opmaps grp15
5993 * @opcode !11/2
5994 * @oppfx none
5995 * @opcpuid sse
5996 * @opgroup og_sse_mxcsrsm
5997 * @opxcpttype 5
5998 * @optest op1=0 -> mxcsr=0
5999 * @optest op1=0x2083 -> mxcsr=0x2083
6000 * @optest op1=0xfffffffe -> value.xcpt=0xd
6001 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6002 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6003 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6004 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6005 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6006 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6007 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6008 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6009 */
6010FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6011{
6012 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6013 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6014 return IEMOP_RAISE_INVALID_OPCODE();
6015
6016 IEM_MC_BEGIN(2, 0);
6017 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6018 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6021 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6022 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6023 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6024 IEM_MC_END();
6025 return VINF_SUCCESS;
6026}
6027
6028
6029/**
6030 * @opmaps grp15
6031 * @opcode !11/3
6032 * @oppfx none
6033 * @opcpuid sse
6034 * @opgroup og_sse_mxcsrsm
6035 * @opxcpttype 5
6036 * @optest mxcsr=0 -> op1=0
6037 * @optest mxcsr=0x2083 -> op1=0x2083
6038 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6039 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6040 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6041 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6042 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6043 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6044 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6045 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6046 */
6047FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6048{
6049 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, MdWO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6050 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6051 return IEMOP_RAISE_INVALID_OPCODE();
6052
6053 IEM_MC_BEGIN(2, 0);
6054 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6055 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6058 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6059 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6060 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6061 IEM_MC_END();
6062 return VINF_SUCCESS;
6063}
6064
6065
6066/**
6067 * @opmaps grp15
6068 * @opcode !11/4
6069 * @oppfx none
6070 * @opcpuid xsave
6071 * @opgroup og_system
6072 * @opxcpttype none
6073 */
6074FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6075{
6076 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, MRW, DISOPTYPE_HARMLESS, 0);
6077 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6078 return IEMOP_RAISE_INVALID_OPCODE();
6079
6080 IEM_MC_BEGIN(3, 0);
6081 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6082 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6083 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6086 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6087 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6088 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6089 IEM_MC_END();
6090 return VINF_SUCCESS;
6091}
6092
6093
6094/**
6095 * @opmaps grp15
6096 * @opcode !11/5
6097 * @oppfx none
6098 * @opcpuid xsave
6099 * @opgroup og_system
6100 * @opxcpttype none
6101 */
6102FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6103{
6104 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, MRO, DISOPTYPE_HARMLESS, 0);
6105 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6106 return IEMOP_RAISE_INVALID_OPCODE();
6107
6108 IEM_MC_BEGIN(3, 0);
6109 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6110 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6111 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6114 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6115 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6116 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6117 IEM_MC_END();
6118 return VINF_SUCCESS;
6119}
6120
6121/** Opcode 0x0f 0xae mem/6. */
6122FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6123
6124/**
6125 * @opmaps grp15
6126 * @opcode !11/7
6127 * @oppfx none
6128 * @opcpuid clfsh
6129 * @opgroup og_cachectl
6130 * @optest op1=1 ->
6131 */
6132FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6133{
6134 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6135 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6136 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6137
6138 IEM_MC_BEGIN(2, 0);
6139 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6140 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6143 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6144 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6145 IEM_MC_END();
6146 return VINF_SUCCESS;
6147}
6148
6149/**
6150 * @opmaps grp15
6151 * @opcode !11/7
6152 * @oppfx 0x66
6153 * @opcpuid clflushopt
6154 * @opgroup og_cachectl
6155 * @optest op1=1 ->
6156 */
6157FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6158{
6159 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6160 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6161 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6162
6163 IEM_MC_BEGIN(2, 0);
6164 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6165 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6168 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6169 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6170 IEM_MC_END();
6171 return VINF_SUCCESS;
6172}
6173
6174
6175/** Opcode 0x0f 0xae 11b/5. */
6176FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6177{
6178 RT_NOREF_PV(bRm);
6179 IEMOP_MNEMONIC(lfence, "lfence");
6180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6181 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6182 return IEMOP_RAISE_INVALID_OPCODE();
6183
6184 IEM_MC_BEGIN(0, 0);
6185 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6186 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6187 else
6188 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6189 IEM_MC_ADVANCE_RIP();
6190 IEM_MC_END();
6191 return VINF_SUCCESS;
6192}
6193
6194
6195/** Opcode 0x0f 0xae 11b/6. */
6196FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6197{
6198 RT_NOREF_PV(bRm);
6199 IEMOP_MNEMONIC(mfence, "mfence");
6200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6201 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6202 return IEMOP_RAISE_INVALID_OPCODE();
6203
6204 IEM_MC_BEGIN(0, 0);
6205 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6206 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6207 else
6208 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6209 IEM_MC_ADVANCE_RIP();
6210 IEM_MC_END();
6211 return VINF_SUCCESS;
6212}
6213
6214
6215/** Opcode 0x0f 0xae 11b/7. */
6216FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6217{
6218 RT_NOREF_PV(bRm);
6219 IEMOP_MNEMONIC(sfence, "sfence");
6220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6221 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6222 return IEMOP_RAISE_INVALID_OPCODE();
6223
6224 IEM_MC_BEGIN(0, 0);
6225 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6226 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6227 else
6228 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6229 IEM_MC_ADVANCE_RIP();
6230 IEM_MC_END();
6231 return VINF_SUCCESS;
6232}
6233
6234
6235/** Opcode 0xf3 0x0f 0xae 11b/0. */
6236FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6237
6238/** Opcode 0xf3 0x0f 0xae 11b/1. */
6239FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6240
6241/** Opcode 0xf3 0x0f 0xae 11b/2. */
6242FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6243
6244/** Opcode 0xf3 0x0f 0xae 11b/3. */
6245FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6246
6247
6248/**
6249 * Group 15 jump table for register variant.
6250 */
6251IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6252{ /* pfx: none, 066h, 0f3h, 0f2h */
6253 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6254 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6255 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6256 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6257 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6258 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6259 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6260 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6261};
6262AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6263
6264
6265/**
6266 * Group 15 jump table for memory variant.
6267 */
6268IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6269{ /* pfx: none, 066h, 0f3h, 0f2h */
6270 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6271 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6272 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6273 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6274 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6275 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6276 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6277 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6278};
6279AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6280
6281
6282/** Opcode 0x0f 0xae. */
6283FNIEMOP_DEF(iemOp_Grp15)
6284{
6285 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6287 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6288 /* register, register */
6289 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6290 + pVCpu->iem.s.idxPrefix], bRm);
6291 /* memory, register */
6292 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6293 + pVCpu->iem.s.idxPrefix], bRm);
6294}
6295
6296
6297/** Opcode 0x0f 0xaf. */
6298FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6299{
6300 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6301 IEMOP_HLP_MIN_386();
6302 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6303 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6304}
6305
6306
6307/** Opcode 0x0f 0xb0. */
6308FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6309{
6310 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6311 IEMOP_HLP_MIN_486();
6312 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6313
6314 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6315 {
6316 IEMOP_HLP_DONE_DECODING();
6317 IEM_MC_BEGIN(4, 0);
6318 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6319 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6320 IEM_MC_ARG(uint8_t, u8Src, 2);
6321 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6322
6323 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6324 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6325 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6326 IEM_MC_REF_EFLAGS(pEFlags);
6327 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6328 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6329 else
6330 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6331
6332 IEM_MC_ADVANCE_RIP();
6333 IEM_MC_END();
6334 }
6335 else
6336 {
6337 IEM_MC_BEGIN(4, 3);
6338 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6339 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6340 IEM_MC_ARG(uint8_t, u8Src, 2);
6341 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6343 IEM_MC_LOCAL(uint8_t, u8Al);
6344
6345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6346 IEMOP_HLP_DONE_DECODING();
6347 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6348 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6349 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6350 IEM_MC_FETCH_EFLAGS(EFlags);
6351 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6352 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6353 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6354 else
6355 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6356
6357 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6358 IEM_MC_COMMIT_EFLAGS(EFlags);
6359 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6360 IEM_MC_ADVANCE_RIP();
6361 IEM_MC_END();
6362 }
6363 return VINF_SUCCESS;
6364}
6365
6366/** Opcode 0x0f 0xb1. */
6367FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6368{
6369 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6370 IEMOP_HLP_MIN_486();
6371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6372
6373 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6374 {
6375 IEMOP_HLP_DONE_DECODING();
6376 switch (pVCpu->iem.s.enmEffOpSize)
6377 {
6378 case IEMMODE_16BIT:
6379 IEM_MC_BEGIN(4, 0);
6380 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6381 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6382 IEM_MC_ARG(uint16_t, u16Src, 2);
6383 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6384
6385 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6386 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6387 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6388 IEM_MC_REF_EFLAGS(pEFlags);
6389 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6390 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6391 else
6392 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6393
6394 IEM_MC_ADVANCE_RIP();
6395 IEM_MC_END();
6396 return VINF_SUCCESS;
6397
6398 case IEMMODE_32BIT:
6399 IEM_MC_BEGIN(4, 0);
6400 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6401 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6402 IEM_MC_ARG(uint32_t, u32Src, 2);
6403 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6404
6405 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6406 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6407 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6408 IEM_MC_REF_EFLAGS(pEFlags);
6409 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6410 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6411 else
6412 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6413
6414 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6415 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6416 IEM_MC_ADVANCE_RIP();
6417 IEM_MC_END();
6418 return VINF_SUCCESS;
6419
6420 case IEMMODE_64BIT:
6421 IEM_MC_BEGIN(4, 0);
6422 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6423 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6424#ifdef RT_ARCH_X86
6425 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6426#else
6427 IEM_MC_ARG(uint64_t, u64Src, 2);
6428#endif
6429 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6430
6431 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6432 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6433 IEM_MC_REF_EFLAGS(pEFlags);
6434#ifdef RT_ARCH_X86
6435 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6436 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6437 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6438 else
6439 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6440#else
6441 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6442 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6443 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6444 else
6445 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6446#endif
6447
6448 IEM_MC_ADVANCE_RIP();
6449 IEM_MC_END();
6450 return VINF_SUCCESS;
6451
6452 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6453 }
6454 }
6455 else
6456 {
6457 switch (pVCpu->iem.s.enmEffOpSize)
6458 {
6459 case IEMMODE_16BIT:
6460 IEM_MC_BEGIN(4, 3);
6461 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6462 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6463 IEM_MC_ARG(uint16_t, u16Src, 2);
6464 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6465 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6466 IEM_MC_LOCAL(uint16_t, u16Ax);
6467
6468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6469 IEMOP_HLP_DONE_DECODING();
6470 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6471 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6472 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6473 IEM_MC_FETCH_EFLAGS(EFlags);
6474 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6475 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6476 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6477 else
6478 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6479
6480 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6481 IEM_MC_COMMIT_EFLAGS(EFlags);
6482 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6483 IEM_MC_ADVANCE_RIP();
6484 IEM_MC_END();
6485 return VINF_SUCCESS;
6486
6487 case IEMMODE_32BIT:
6488 IEM_MC_BEGIN(4, 3);
6489 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6490 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6491 IEM_MC_ARG(uint32_t, u32Src, 2);
6492 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6494 IEM_MC_LOCAL(uint32_t, u32Eax);
6495
6496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6497 IEMOP_HLP_DONE_DECODING();
6498 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6499 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6500 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6501 IEM_MC_FETCH_EFLAGS(EFlags);
6502 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6503 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6504 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6505 else
6506 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6507
6508 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6509 IEM_MC_COMMIT_EFLAGS(EFlags);
6510 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6511 IEM_MC_ADVANCE_RIP();
6512 IEM_MC_END();
6513 return VINF_SUCCESS;
6514
6515 case IEMMODE_64BIT:
6516 IEM_MC_BEGIN(4, 3);
6517 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6518 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6519#ifdef RT_ARCH_X86
6520 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6521#else
6522 IEM_MC_ARG(uint64_t, u64Src, 2);
6523#endif
6524 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6526 IEM_MC_LOCAL(uint64_t, u64Rax);
6527
6528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6529 IEMOP_HLP_DONE_DECODING();
6530 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6531 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6532 IEM_MC_FETCH_EFLAGS(EFlags);
6533 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6534#ifdef RT_ARCH_X86
6535 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6536 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6537 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6538 else
6539 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6540#else
6541 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6542 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6543 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6544 else
6545 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6546#endif
6547
6548 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6549 IEM_MC_COMMIT_EFLAGS(EFlags);
6550 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6551 IEM_MC_ADVANCE_RIP();
6552 IEM_MC_END();
6553 return VINF_SUCCESS;
6554
6555 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6556 }
6557 }
6558}
6559
6560
6561FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6562{
6563 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6564 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6565
6566 switch (pVCpu->iem.s.enmEffOpSize)
6567 {
6568 case IEMMODE_16BIT:
6569 IEM_MC_BEGIN(5, 1);
6570 IEM_MC_ARG(uint16_t, uSel, 0);
6571 IEM_MC_ARG(uint16_t, offSeg, 1);
6572 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6573 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6574 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6575 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6578 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6579 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6580 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6581 IEM_MC_END();
6582 return VINF_SUCCESS;
6583
6584 case IEMMODE_32BIT:
6585 IEM_MC_BEGIN(5, 1);
6586 IEM_MC_ARG(uint16_t, uSel, 0);
6587 IEM_MC_ARG(uint32_t, offSeg, 1);
6588 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6589 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6590 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6591 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6594 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6595 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6596 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6597 IEM_MC_END();
6598 return VINF_SUCCESS;
6599
6600 case IEMMODE_64BIT:
6601 IEM_MC_BEGIN(5, 1);
6602 IEM_MC_ARG(uint16_t, uSel, 0);
6603 IEM_MC_ARG(uint64_t, offSeg, 1);
6604 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6605 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6606 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6607 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6610 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6611 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6612 else
6613 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6614 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6615 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6616 IEM_MC_END();
6617 return VINF_SUCCESS;
6618
6619 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6620 }
6621}
6622
6623
6624/** Opcode 0x0f 0xb2. */
6625FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6626{
6627 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6628 IEMOP_HLP_MIN_386();
6629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6630 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6631 return IEMOP_RAISE_INVALID_OPCODE();
6632 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6633}
6634
6635
6636/** Opcode 0x0f 0xb3. */
6637FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6638{
6639 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6640 IEMOP_HLP_MIN_386();
6641 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6642}
6643
6644
6645/** Opcode 0x0f 0xb4. */
6646FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6647{
6648 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6649 IEMOP_HLP_MIN_386();
6650 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6651 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6652 return IEMOP_RAISE_INVALID_OPCODE();
6653 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6654}
6655
6656
6657/** Opcode 0x0f 0xb5. */
6658FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6659{
6660 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6661 IEMOP_HLP_MIN_386();
6662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6663 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6664 return IEMOP_RAISE_INVALID_OPCODE();
6665 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6666}
6667
6668
6669/** Opcode 0x0f 0xb6. */
6670FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6671{
6672 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6673 IEMOP_HLP_MIN_386();
6674
6675 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6676
6677 /*
6678 * If rm is denoting a register, no more instruction bytes.
6679 */
6680 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6681 {
6682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6683 switch (pVCpu->iem.s.enmEffOpSize)
6684 {
6685 case IEMMODE_16BIT:
6686 IEM_MC_BEGIN(0, 1);
6687 IEM_MC_LOCAL(uint16_t, u16Value);
6688 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6689 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6690 IEM_MC_ADVANCE_RIP();
6691 IEM_MC_END();
6692 return VINF_SUCCESS;
6693
6694 case IEMMODE_32BIT:
6695 IEM_MC_BEGIN(0, 1);
6696 IEM_MC_LOCAL(uint32_t, u32Value);
6697 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6698 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6699 IEM_MC_ADVANCE_RIP();
6700 IEM_MC_END();
6701 return VINF_SUCCESS;
6702
6703 case IEMMODE_64BIT:
6704 IEM_MC_BEGIN(0, 1);
6705 IEM_MC_LOCAL(uint64_t, u64Value);
6706 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6707 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6708 IEM_MC_ADVANCE_RIP();
6709 IEM_MC_END();
6710 return VINF_SUCCESS;
6711
6712 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6713 }
6714 }
6715 else
6716 {
6717 /*
6718 * We're loading a register from memory.
6719 */
6720 switch (pVCpu->iem.s.enmEffOpSize)
6721 {
6722 case IEMMODE_16BIT:
6723 IEM_MC_BEGIN(0, 2);
6724 IEM_MC_LOCAL(uint16_t, u16Value);
6725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6728 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6729 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6730 IEM_MC_ADVANCE_RIP();
6731 IEM_MC_END();
6732 return VINF_SUCCESS;
6733
6734 case IEMMODE_32BIT:
6735 IEM_MC_BEGIN(0, 2);
6736 IEM_MC_LOCAL(uint32_t, u32Value);
6737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6740 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6741 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6742 IEM_MC_ADVANCE_RIP();
6743 IEM_MC_END();
6744 return VINF_SUCCESS;
6745
6746 case IEMMODE_64BIT:
6747 IEM_MC_BEGIN(0, 2);
6748 IEM_MC_LOCAL(uint64_t, u64Value);
6749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6752 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6753 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6754 IEM_MC_ADVANCE_RIP();
6755 IEM_MC_END();
6756 return VINF_SUCCESS;
6757
6758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6759 }
6760 }
6761}
6762
6763
6764/** Opcode 0x0f 0xb7. */
6765FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6766{
6767 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6768 IEMOP_HLP_MIN_386();
6769
6770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6771
6772 /** @todo Not entirely sure how the operand size prefix is handled here,
6773 * assuming that it will be ignored. Would be nice to have a few
6774 * test for this. */
6775 /*
6776 * If rm is denoting a register, no more instruction bytes.
6777 */
6778 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6779 {
6780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6781 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6782 {
6783 IEM_MC_BEGIN(0, 1);
6784 IEM_MC_LOCAL(uint32_t, u32Value);
6785 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6786 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6787 IEM_MC_ADVANCE_RIP();
6788 IEM_MC_END();
6789 }
6790 else
6791 {
6792 IEM_MC_BEGIN(0, 1);
6793 IEM_MC_LOCAL(uint64_t, u64Value);
6794 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6795 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6796 IEM_MC_ADVANCE_RIP();
6797 IEM_MC_END();
6798 }
6799 }
6800 else
6801 {
6802 /*
6803 * We're loading a register from memory.
6804 */
6805 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6806 {
6807 IEM_MC_BEGIN(0, 2);
6808 IEM_MC_LOCAL(uint32_t, u32Value);
6809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6812 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6813 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6814 IEM_MC_ADVANCE_RIP();
6815 IEM_MC_END();
6816 }
6817 else
6818 {
6819 IEM_MC_BEGIN(0, 2);
6820 IEM_MC_LOCAL(uint64_t, u64Value);
6821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6824 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6825 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6826 IEM_MC_ADVANCE_RIP();
6827 IEM_MC_END();
6828 }
6829 }
6830 return VINF_SUCCESS;
6831}
6832
6833
6834/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6835FNIEMOP_UD_STUB(iemOp_jmpe);
6836/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6837FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6838
6839
6840/**
6841 * @opcode 0xb9
6842 * @opinvalid intel-modrm
6843 * @optest ->
6844 */
6845FNIEMOP_DEF(iemOp_Grp10)
6846{
6847 /*
6848 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
6849 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
6850 */
6851 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
6852 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZE); /* just picked Gb,Eb here. */
6853 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
6854}
6855
6856
6857/** Opcode 0x0f 0xba. */
6858FNIEMOP_DEF(iemOp_Grp8)
6859{
6860 IEMOP_HLP_MIN_386();
6861 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6862 PCIEMOPBINSIZES pImpl;
6863 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6864 {
6865 case 0: case 1: case 2: case 3:
6866 /* Both AMD and Intel want full modr/m decoding and imm8. */
6867 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
6868 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6869 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6870 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6871 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6873 }
6874 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6875
6876 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6877 {
6878 /* register destination. */
6879 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6881
6882 switch (pVCpu->iem.s.enmEffOpSize)
6883 {
6884 case IEMMODE_16BIT:
6885 IEM_MC_BEGIN(3, 0);
6886 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6887 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6888 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6889
6890 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6891 IEM_MC_REF_EFLAGS(pEFlags);
6892 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6893
6894 IEM_MC_ADVANCE_RIP();
6895 IEM_MC_END();
6896 return VINF_SUCCESS;
6897
6898 case IEMMODE_32BIT:
6899 IEM_MC_BEGIN(3, 0);
6900 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6901 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6902 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6903
6904 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6905 IEM_MC_REF_EFLAGS(pEFlags);
6906 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6907
6908 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6909 IEM_MC_ADVANCE_RIP();
6910 IEM_MC_END();
6911 return VINF_SUCCESS;
6912
6913 case IEMMODE_64BIT:
6914 IEM_MC_BEGIN(3, 0);
6915 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6916 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6917 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6918
6919 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6920 IEM_MC_REF_EFLAGS(pEFlags);
6921 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6922
6923 IEM_MC_ADVANCE_RIP();
6924 IEM_MC_END();
6925 return VINF_SUCCESS;
6926
6927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6928 }
6929 }
6930 else
6931 {
6932 /* memory destination. */
6933
6934 uint32_t fAccess;
6935 if (pImpl->pfnLockedU16)
6936 fAccess = IEM_ACCESS_DATA_RW;
6937 else /* BT */
6938 fAccess = IEM_ACCESS_DATA_R;
6939
6940 /** @todo test negative bit offsets! */
6941 switch (pVCpu->iem.s.enmEffOpSize)
6942 {
6943 case IEMMODE_16BIT:
6944 IEM_MC_BEGIN(3, 1);
6945 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6946 IEM_MC_ARG(uint16_t, u16Src, 1);
6947 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6949
6950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6951 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6952 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6953 if (pImpl->pfnLockedU16)
6954 IEMOP_HLP_DONE_DECODING();
6955 else
6956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6957 IEM_MC_FETCH_EFLAGS(EFlags);
6958 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6959 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6960 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6961 else
6962 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6963 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6964
6965 IEM_MC_COMMIT_EFLAGS(EFlags);
6966 IEM_MC_ADVANCE_RIP();
6967 IEM_MC_END();
6968 return VINF_SUCCESS;
6969
6970 case IEMMODE_32BIT:
6971 IEM_MC_BEGIN(3, 1);
6972 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6973 IEM_MC_ARG(uint32_t, u32Src, 1);
6974 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6976
6977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6978 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6979 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6980 if (pImpl->pfnLockedU16)
6981 IEMOP_HLP_DONE_DECODING();
6982 else
6983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6984 IEM_MC_FETCH_EFLAGS(EFlags);
6985 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6986 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6987 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6988 else
6989 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6990 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6991
6992 IEM_MC_COMMIT_EFLAGS(EFlags);
6993 IEM_MC_ADVANCE_RIP();
6994 IEM_MC_END();
6995 return VINF_SUCCESS;
6996
6997 case IEMMODE_64BIT:
6998 IEM_MC_BEGIN(3, 1);
6999 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7000 IEM_MC_ARG(uint64_t, u64Src, 1);
7001 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7003
7004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7005 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7006 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7007 if (pImpl->pfnLockedU16)
7008 IEMOP_HLP_DONE_DECODING();
7009 else
7010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7011 IEM_MC_FETCH_EFLAGS(EFlags);
7012 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7013 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7014 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7015 else
7016 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7017 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7018
7019 IEM_MC_COMMIT_EFLAGS(EFlags);
7020 IEM_MC_ADVANCE_RIP();
7021 IEM_MC_END();
7022 return VINF_SUCCESS;
7023
7024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7025 }
7026 }
7027}
7028
7029
7030/** Opcode 0x0f 0xbb. */
7031FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7032{
7033 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7034 IEMOP_HLP_MIN_386();
7035 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7036}
7037
7038
7039/** Opcode 0x0f 0xbc. */
7040FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7041{
7042 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7043 IEMOP_HLP_MIN_386();
7044 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7045 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7046}
7047
7048
7049/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7050FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7051
7052
7053/** Opcode 0x0f 0xbd. */
7054FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7055{
7056 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7057 IEMOP_HLP_MIN_386();
7058 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7059 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7060}
7061
7062
7063/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7064FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7065
7066
7067/** Opcode 0x0f 0xbe. */
7068FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7069{
7070 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7071 IEMOP_HLP_MIN_386();
7072
7073 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7074
7075 /*
7076 * If rm is denoting a register, no more instruction bytes.
7077 */
7078 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7079 {
7080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7081 switch (pVCpu->iem.s.enmEffOpSize)
7082 {
7083 case IEMMODE_16BIT:
7084 IEM_MC_BEGIN(0, 1);
7085 IEM_MC_LOCAL(uint16_t, u16Value);
7086 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7087 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7088 IEM_MC_ADVANCE_RIP();
7089 IEM_MC_END();
7090 return VINF_SUCCESS;
7091
7092 case IEMMODE_32BIT:
7093 IEM_MC_BEGIN(0, 1);
7094 IEM_MC_LOCAL(uint32_t, u32Value);
7095 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7096 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7097 IEM_MC_ADVANCE_RIP();
7098 IEM_MC_END();
7099 return VINF_SUCCESS;
7100
7101 case IEMMODE_64BIT:
7102 IEM_MC_BEGIN(0, 1);
7103 IEM_MC_LOCAL(uint64_t, u64Value);
7104 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7105 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7106 IEM_MC_ADVANCE_RIP();
7107 IEM_MC_END();
7108 return VINF_SUCCESS;
7109
7110 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7111 }
7112 }
7113 else
7114 {
7115 /*
7116 * We're loading a register from memory.
7117 */
7118 switch (pVCpu->iem.s.enmEffOpSize)
7119 {
7120 case IEMMODE_16BIT:
7121 IEM_MC_BEGIN(0, 2);
7122 IEM_MC_LOCAL(uint16_t, u16Value);
7123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7126 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7127 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7128 IEM_MC_ADVANCE_RIP();
7129 IEM_MC_END();
7130 return VINF_SUCCESS;
7131
7132 case IEMMODE_32BIT:
7133 IEM_MC_BEGIN(0, 2);
7134 IEM_MC_LOCAL(uint32_t, u32Value);
7135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7138 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7139 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7140 IEM_MC_ADVANCE_RIP();
7141 IEM_MC_END();
7142 return VINF_SUCCESS;
7143
7144 case IEMMODE_64BIT:
7145 IEM_MC_BEGIN(0, 2);
7146 IEM_MC_LOCAL(uint64_t, u64Value);
7147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7150 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7151 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7152 IEM_MC_ADVANCE_RIP();
7153 IEM_MC_END();
7154 return VINF_SUCCESS;
7155
7156 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7157 }
7158 }
7159}
7160
7161
7162/** Opcode 0x0f 0xbf. */
7163FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7164{
7165 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7166 IEMOP_HLP_MIN_386();
7167
7168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7169
7170 /** @todo Not entirely sure how the operand size prefix is handled here,
7171 * assuming that it will be ignored. Would be nice to have a few
7172 * test for this. */
7173 /*
7174 * If rm is denoting a register, no more instruction bytes.
7175 */
7176 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7177 {
7178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7179 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7180 {
7181 IEM_MC_BEGIN(0, 1);
7182 IEM_MC_LOCAL(uint32_t, u32Value);
7183 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7184 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7185 IEM_MC_ADVANCE_RIP();
7186 IEM_MC_END();
7187 }
7188 else
7189 {
7190 IEM_MC_BEGIN(0, 1);
7191 IEM_MC_LOCAL(uint64_t, u64Value);
7192 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7193 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7194 IEM_MC_ADVANCE_RIP();
7195 IEM_MC_END();
7196 }
7197 }
7198 else
7199 {
7200 /*
7201 * We're loading a register from memory.
7202 */
7203 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7204 {
7205 IEM_MC_BEGIN(0, 2);
7206 IEM_MC_LOCAL(uint32_t, u32Value);
7207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7210 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7211 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7212 IEM_MC_ADVANCE_RIP();
7213 IEM_MC_END();
7214 }
7215 else
7216 {
7217 IEM_MC_BEGIN(0, 2);
7218 IEM_MC_LOCAL(uint64_t, u64Value);
7219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7222 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7223 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7224 IEM_MC_ADVANCE_RIP();
7225 IEM_MC_END();
7226 }
7227 }
7228 return VINF_SUCCESS;
7229}
7230
7231
7232/** Opcode 0x0f 0xc0. */
7233FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7234{
7235 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7236 IEMOP_HLP_MIN_486();
7237 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7238
7239 /*
7240 * If rm is denoting a register, no more instruction bytes.
7241 */
7242 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7243 {
7244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7245
7246 IEM_MC_BEGIN(3, 0);
7247 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7248 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7249 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7250
7251 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7252 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7253 IEM_MC_REF_EFLAGS(pEFlags);
7254 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7255
7256 IEM_MC_ADVANCE_RIP();
7257 IEM_MC_END();
7258 }
7259 else
7260 {
7261 /*
7262 * We're accessing memory.
7263 */
7264 IEM_MC_BEGIN(3, 3);
7265 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7266 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7267 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7268 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7270
7271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7272 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7273 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7274 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7275 IEM_MC_FETCH_EFLAGS(EFlags);
7276 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7277 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7278 else
7279 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7280
7281 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7282 IEM_MC_COMMIT_EFLAGS(EFlags);
7283 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7284 IEM_MC_ADVANCE_RIP();
7285 IEM_MC_END();
7286 return VINF_SUCCESS;
7287 }
7288 return VINF_SUCCESS;
7289}
7290
7291
7292/** Opcode 0x0f 0xc1. */
7293FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7294{
7295 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7296 IEMOP_HLP_MIN_486();
7297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7298
7299 /*
7300 * If rm is denoting a register, no more instruction bytes.
7301 */
7302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7303 {
7304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7305
7306 switch (pVCpu->iem.s.enmEffOpSize)
7307 {
7308 case IEMMODE_16BIT:
7309 IEM_MC_BEGIN(3, 0);
7310 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7311 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7312 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7313
7314 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7315 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7316 IEM_MC_REF_EFLAGS(pEFlags);
7317 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7318
7319 IEM_MC_ADVANCE_RIP();
7320 IEM_MC_END();
7321 return VINF_SUCCESS;
7322
7323 case IEMMODE_32BIT:
7324 IEM_MC_BEGIN(3, 0);
7325 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7326 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7327 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7328
7329 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7330 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7331 IEM_MC_REF_EFLAGS(pEFlags);
7332 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7333
7334 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7335 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7336 IEM_MC_ADVANCE_RIP();
7337 IEM_MC_END();
7338 return VINF_SUCCESS;
7339
7340 case IEMMODE_64BIT:
7341 IEM_MC_BEGIN(3, 0);
7342 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7343 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7344 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7345
7346 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7347 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7348 IEM_MC_REF_EFLAGS(pEFlags);
7349 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7350
7351 IEM_MC_ADVANCE_RIP();
7352 IEM_MC_END();
7353 return VINF_SUCCESS;
7354
7355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7356 }
7357 }
7358 else
7359 {
7360 /*
7361 * We're accessing memory.
7362 */
7363 switch (pVCpu->iem.s.enmEffOpSize)
7364 {
7365 case IEMMODE_16BIT:
7366 IEM_MC_BEGIN(3, 3);
7367 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7368 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7369 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7370 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7372
7373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7374 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7375 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7376 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7377 IEM_MC_FETCH_EFLAGS(EFlags);
7378 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7379 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7380 else
7381 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7382
7383 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7384 IEM_MC_COMMIT_EFLAGS(EFlags);
7385 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7386 IEM_MC_ADVANCE_RIP();
7387 IEM_MC_END();
7388 return VINF_SUCCESS;
7389
7390 case IEMMODE_32BIT:
7391 IEM_MC_BEGIN(3, 3);
7392 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7393 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7394 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7395 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7397
7398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7399 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7400 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7401 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7402 IEM_MC_FETCH_EFLAGS(EFlags);
7403 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7404 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7405 else
7406 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7407
7408 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7409 IEM_MC_COMMIT_EFLAGS(EFlags);
7410 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7411 IEM_MC_ADVANCE_RIP();
7412 IEM_MC_END();
7413 return VINF_SUCCESS;
7414
7415 case IEMMODE_64BIT:
7416 IEM_MC_BEGIN(3, 3);
7417 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7418 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7419 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7420 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7422
7423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7424 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7425 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7426 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7427 IEM_MC_FETCH_EFLAGS(EFlags);
7428 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7429 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7430 else
7431 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7432
7433 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7434 IEM_MC_COMMIT_EFLAGS(EFlags);
7435 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7436 IEM_MC_ADVANCE_RIP();
7437 IEM_MC_END();
7438 return VINF_SUCCESS;
7439
7440 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7441 }
7442 }
7443}
7444
7445
7446/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
7447FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
7448/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
7449FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
7450/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
7451FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
7452/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
7453FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
7454
7455
7456/** Opcode 0x0f 0xc3. */
7457FNIEMOP_DEF(iemOp_movnti_My_Gy)
7458{
7459 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7460
7461 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7462
7463 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7464 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7465 {
7466 switch (pVCpu->iem.s.enmEffOpSize)
7467 {
7468 case IEMMODE_32BIT:
7469 IEM_MC_BEGIN(0, 2);
7470 IEM_MC_LOCAL(uint32_t, u32Value);
7471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7472
7473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7475 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7476 return IEMOP_RAISE_INVALID_OPCODE();
7477
7478 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7479 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7480 IEM_MC_ADVANCE_RIP();
7481 IEM_MC_END();
7482 break;
7483
7484 case IEMMODE_64BIT:
7485 IEM_MC_BEGIN(0, 2);
7486 IEM_MC_LOCAL(uint64_t, u64Value);
7487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7488
7489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7491 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7492 return IEMOP_RAISE_INVALID_OPCODE();
7493
7494 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7495 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7496 IEM_MC_ADVANCE_RIP();
7497 IEM_MC_END();
7498 break;
7499
7500 case IEMMODE_16BIT:
7501 /** @todo check this form. */
7502 return IEMOP_RAISE_INVALID_OPCODE();
7503 }
7504 }
7505 else
7506 return IEMOP_RAISE_INVALID_OPCODE();
7507 return VINF_SUCCESS;
7508}
7509/* Opcode 0x66 0x0f 0xc3 - invalid */
7510/* Opcode 0xf3 0x0f 0xc3 - invalid */
7511/* Opcode 0xf2 0x0f 0xc3 - invalid */
7512
7513/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
7514FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7515/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
7516FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
7517/* Opcode 0xf3 0x0f 0xc4 - invalid */
7518/* Opcode 0xf2 0x0f 0xc4 - invalid */
7519
7520/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7521FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7522/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
7523FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
7524/* Opcode 0xf3 0x0f 0xc5 - invalid */
7525/* Opcode 0xf2 0x0f 0xc5 - invalid */
7526
7527/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
7528FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
7529/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
7530FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
7531/* Opcode 0xf3 0x0f 0xc6 - invalid */
7532/* Opcode 0xf2 0x0f 0xc6 - invalid */
7533
7534
7535/** Opcode 0x0f 0xc7 !11/1. */
7536FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7537{
7538 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7539
7540 IEM_MC_BEGIN(4, 3);
7541 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7542 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7543 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7544 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7545 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7546 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7548
7549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7550 IEMOP_HLP_DONE_DECODING();
7551 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7552
7553 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7554 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7555 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7556
7557 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7558 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7559 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7560
7561 IEM_MC_FETCH_EFLAGS(EFlags);
7562 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7563 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7564 else
7565 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7566
7567 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7568 IEM_MC_COMMIT_EFLAGS(EFlags);
7569 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7570 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7571 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7572 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7573 IEM_MC_ENDIF();
7574 IEM_MC_ADVANCE_RIP();
7575
7576 IEM_MC_END();
7577 return VINF_SUCCESS;
7578}
7579
7580
7581/** Opcode REX.W 0x0f 0xc7 !11/1. */
7582FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7583{
7584 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7585 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7586 {
7587#if 0
7588 RT_NOREF(bRm);
7589 IEMOP_BITCH_ABOUT_STUB();
7590 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7591#else
7592 IEM_MC_BEGIN(4, 3);
7593 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7594 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7595 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7596 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7597 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7598 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7600
7601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7602 IEMOP_HLP_DONE_DECODING();
7603 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7604 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7605
7606 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7607 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7608 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7609
7610 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7611 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7612 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7613
7614 IEM_MC_FETCH_EFLAGS(EFlags);
7615# ifdef RT_ARCH_AMD64
7616 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7617 {
7618 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7619 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7620 else
7621 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7622 }
7623 else
7624# endif
7625 {
7626 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7627 accesses and not all all atomic, which works fine on in UNI CPU guest
7628 configuration (ignoring DMA). If guest SMP is active we have no choice
7629 but to use a rendezvous callback here. Sigh. */
7630 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7631 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7632 else
7633 {
7634 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7635 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7636 }
7637 }
7638
7639 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7640 IEM_MC_COMMIT_EFLAGS(EFlags);
7641 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7642 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7643 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7644 IEM_MC_ENDIF();
7645 IEM_MC_ADVANCE_RIP();
7646
7647 IEM_MC_END();
7648 return VINF_SUCCESS;
7649#endif
7650 }
7651 Log(("cmpxchg16b -> #UD\n"));
7652 return IEMOP_RAISE_INVALID_OPCODE();
7653}
7654
7655FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
7656{
7657 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7658 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7659 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7660}
7661
7662/** Opcode 0x0f 0xc7 11/6. */
7663FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7664
7665/** Opcode 0x0f 0xc7 !11/6. */
7666FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7667
7668/** Opcode 0x66 0x0f 0xc7 !11/6. */
7669FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7670
7671/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7672FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7673
7674/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7675FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7676
7677/** Opcode 0x0f 0xc7 11/7. */
7678FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
7679
7680
7681/**
7682 * Group 9 jump table for register variant.
7683 */
7684IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
7685{ /* pfx: none, 066h, 0f3h, 0f2h */
7686 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7687 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
7688 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7689 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7690 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7691 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7692 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7693 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7694};
7695AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
7696
7697
7698/**
7699 * Group 9 jump table for memory variant.
7700 */
7701IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
7702{ /* pfx: none, 066h, 0f3h, 0f2h */
7703 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7704 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
7705 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7706 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7707 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7708 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7709 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
7710 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7711};
7712AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
7713
7714
7715/** Opcode 0x0f 0xc7. */
7716FNIEMOP_DEF(iemOp_Grp9)
7717{
7718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7719 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7720 /* register, register */
7721 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7722 + pVCpu->iem.s.idxPrefix], bRm);
7723 /* memory, register */
7724 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7725 + pVCpu->iem.s.idxPrefix], bRm);
7726}
7727
7728
7729/**
7730 * Common 'bswap register' helper.
7731 */
7732FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7733{
7734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7735 switch (pVCpu->iem.s.enmEffOpSize)
7736 {
7737 case IEMMODE_16BIT:
7738 IEM_MC_BEGIN(1, 0);
7739 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7740 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7741 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7742 IEM_MC_ADVANCE_RIP();
7743 IEM_MC_END();
7744 return VINF_SUCCESS;
7745
7746 case IEMMODE_32BIT:
7747 IEM_MC_BEGIN(1, 0);
7748 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7749 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7750 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7751 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7752 IEM_MC_ADVANCE_RIP();
7753 IEM_MC_END();
7754 return VINF_SUCCESS;
7755
7756 case IEMMODE_64BIT:
7757 IEM_MC_BEGIN(1, 0);
7758 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7759 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7760 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7761 IEM_MC_ADVANCE_RIP();
7762 IEM_MC_END();
7763 return VINF_SUCCESS;
7764
7765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7766 }
7767}
7768
7769
7770/** Opcode 0x0f 0xc8. */
7771FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7772{
7773 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7774 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7775 prefix. REX.B is the correct prefix it appears. For a parallel
7776 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7777 IEMOP_HLP_MIN_486();
7778 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7779}
7780
7781
7782/** Opcode 0x0f 0xc9. */
7783FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7784{
7785 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7786 IEMOP_HLP_MIN_486();
7787 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7788}
7789
7790
7791/** Opcode 0x0f 0xca. */
7792FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7793{
7794 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7795 IEMOP_HLP_MIN_486();
7796 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7797}
7798
7799
7800/** Opcode 0x0f 0xcb. */
7801FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7802{
7803 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7804 IEMOP_HLP_MIN_486();
7805 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7806}
7807
7808
7809/** Opcode 0x0f 0xcc. */
7810FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7811{
7812 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7813 IEMOP_HLP_MIN_486();
7814 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7815}
7816
7817
7818/** Opcode 0x0f 0xcd. */
7819FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7820{
7821 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7822 IEMOP_HLP_MIN_486();
7823 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7824}
7825
7826
7827/** Opcode 0x0f 0xce. */
7828FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7829{
7830 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7831 IEMOP_HLP_MIN_486();
7832 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7833}
7834
7835
7836/** Opcode 0x0f 0xcf. */
7837FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7838{
7839 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7840 IEMOP_HLP_MIN_486();
7841 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7842}
7843
7844
7845/* Opcode 0x0f 0xd0 - invalid */
7846/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
7847FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
7848/* Opcode 0xf3 0x0f 0xd0 - invalid */
7849/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
7850FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
7851
7852/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7853FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7854/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
7855FNIEMOP_STUB(iemOp_psrlw_Vx_W);
7856/* Opcode 0xf3 0x0f 0xd1 - invalid */
7857/* Opcode 0xf2 0x0f 0xd1 - invalid */
7858
7859/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7860FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7861/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
7862FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
7863/* Opcode 0xf3 0x0f 0xd2 - invalid */
7864/* Opcode 0xf2 0x0f 0xd2 - invalid */
7865
7866/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7867FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7868/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
7869FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
7870/* Opcode 0xf3 0x0f 0xd3 - invalid */
7871/* Opcode 0xf2 0x0f 0xd3 - invalid */
7872
7873/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7874FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7875/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
7876FNIEMOP_STUB(iemOp_paddq_Vx_W);
7877/* Opcode 0xf3 0x0f 0xd4 - invalid */
7878/* Opcode 0xf2 0x0f 0xd4 - invalid */
7879
7880/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7881FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7882/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
7883FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
7884/* Opcode 0xf3 0x0f 0xd5 - invalid */
7885/* Opcode 0xf2 0x0f 0xd5 - invalid */
7886
7887/* Opcode 0x0f 0xd6 - invalid */
7888
7889/**
7890 * @opcode 0xd6
7891 * @oppfx 0x66
7892 * @opcpuid sse2
7893 * @opgroup og_sse2_pcksclr_datamove
7894 * @opxcpttype none
7895 * @optest op1=-1 op2=2 -> op1=2
7896 * @optest op1=0 op2=-42 -> op1=-42
7897 */
7898FNIEMOP_DEF(iemOp_movq_Wq_Vq)
7899{
7900 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
7901 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7902 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7903 {
7904 /*
7905 * Register, register.
7906 */
7907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7908 IEM_MC_BEGIN(0, 2);
7909 IEM_MC_LOCAL(uint64_t, uSrc);
7910
7911 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7912 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7913
7914 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7915 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
7916
7917 IEM_MC_ADVANCE_RIP();
7918 IEM_MC_END();
7919 }
7920 else
7921 {
7922 /*
7923 * Memory, register.
7924 */
7925 IEM_MC_BEGIN(0, 2);
7926 IEM_MC_LOCAL(uint64_t, uSrc);
7927 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7928
7929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7931 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7932 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7933
7934 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7935 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7936
7937 IEM_MC_ADVANCE_RIP();
7938 IEM_MC_END();
7939 }
7940 return VINF_SUCCESS;
7941}
7942
7943
7944/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7945FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7946/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7947FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7948#if 0
7949FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7950{
7951 /* Docs says register only. */
7952 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7953
7954 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7955 {
7956 case IEM_OP_PRF_SIZE_OP: /* SSE */
7957 I E M O P _ M N E M O N I C(movq_Wq_Vq, "movq Wq,Vq");
7958 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7959 IEM_MC_BEGIN(2, 0);
7960 IEM_MC_ARG(uint64_t *, pDst, 0);
7961 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
7962 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7963 IEM_MC_PREPARE_SSE_USAGE();
7964 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7965 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7966 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7967 IEM_MC_ADVANCE_RIP();
7968 IEM_MC_END();
7969 return VINF_SUCCESS;
7970
7971 case 0: /* MMX */
7972 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7973 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7974 IEM_MC_BEGIN(2, 0);
7975 IEM_MC_ARG(uint64_t *, pDst, 0);
7976 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7977 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7978 IEM_MC_PREPARE_FPU_USAGE();
7979 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7980 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7981 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7982 IEM_MC_ADVANCE_RIP();
7983 IEM_MC_END();
7984 return VINF_SUCCESS;
7985
7986 default:
7987 return IEMOP_RAISE_INVALID_OPCODE();
7988 }
7989}
7990#endif
7991
7992
7993/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7994FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7995{
7996 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7997 /** @todo testcase: Check that the instruction implicitly clears the high
7998 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7999 * and opcode modifications are made to work with the whole width (not
8000 * just 128). */
8001 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8002 /* Docs says register only. */
8003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8004 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8005 {
8006 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8007 IEM_MC_BEGIN(2, 0);
8008 IEM_MC_ARG(uint64_t *, pDst, 0);
8009 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8010 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8011 IEM_MC_PREPARE_FPU_USAGE();
8012 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8013 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8014 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8015 IEM_MC_ADVANCE_RIP();
8016 IEM_MC_END();
8017 return VINF_SUCCESS;
8018 }
8019 return IEMOP_RAISE_INVALID_OPCODE();
8020}
8021
8022/** Opcode 0x66 0x0f 0xd7 - */
8023FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8024{
8025 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8026 /** @todo testcase: Check that the instruction implicitly clears the high
8027 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8028 * and opcode modifications are made to work with the whole width (not
8029 * just 128). */
8030 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8031 /* Docs says register only. */
8032 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8033 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8034 {
8035 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8036 IEM_MC_BEGIN(2, 0);
8037 IEM_MC_ARG(uint64_t *, pDst, 0);
8038 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8039 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8040 IEM_MC_PREPARE_SSE_USAGE();
8041 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8042 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8043 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8044 IEM_MC_ADVANCE_RIP();
8045 IEM_MC_END();
8046 return VINF_SUCCESS;
8047 }
8048 return IEMOP_RAISE_INVALID_OPCODE();
8049}
8050
8051/* Opcode 0xf3 0x0f 0xd7 - invalid */
8052/* Opcode 0xf2 0x0f 0xd7 - invalid */
8053
8054
8055/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8056FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8057/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8058FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8059/* Opcode 0xf3 0x0f 0xd8 - invalid */
8060/* Opcode 0xf2 0x0f 0xd8 - invalid */
8061
8062/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8063FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8064/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8065FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8066/* Opcode 0xf3 0x0f 0xd9 - invalid */
8067/* Opcode 0xf2 0x0f 0xd9 - invalid */
8068
8069/** Opcode 0x0f 0xda - pminub Pq, Qq */
8070FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8071/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8072FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8073/* Opcode 0xf3 0x0f 0xda - invalid */
8074/* Opcode 0xf2 0x0f 0xda - invalid */
8075
8076/** Opcode 0x0f 0xdb - pand Pq, Qq */
8077FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8078/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8079FNIEMOP_STUB(iemOp_pand_Vx_W);
8080/* Opcode 0xf3 0x0f 0xdb - invalid */
8081/* Opcode 0xf2 0x0f 0xdb - invalid */
8082
8083/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8084FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8085/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8086FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8087/* Opcode 0xf3 0x0f 0xdc - invalid */
8088/* Opcode 0xf2 0x0f 0xdc - invalid */
8089
8090/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8091FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8092/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8093FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8094/* Opcode 0xf3 0x0f 0xdd - invalid */
8095/* Opcode 0xf2 0x0f 0xdd - invalid */
8096
8097/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8098FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8099/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8100FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8101/* Opcode 0xf3 0x0f 0xde - invalid */
8102/* Opcode 0xf2 0x0f 0xde - invalid */
8103
8104/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8105FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8106/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8107FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8108/* Opcode 0xf3 0x0f 0xdf - invalid */
8109/* Opcode 0xf2 0x0f 0xdf - invalid */
8110
8111/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8112FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8113/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8114FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8115/* Opcode 0xf3 0x0f 0xe0 - invalid */
8116/* Opcode 0xf2 0x0f 0xe0 - invalid */
8117
8118/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8119FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8120/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8121FNIEMOP_STUB(iemOp_psraw_Vx_W);
8122/* Opcode 0xf3 0x0f 0xe1 - invalid */
8123/* Opcode 0xf2 0x0f 0xe1 - invalid */
8124
8125/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8126FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8127/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8128FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8129/* Opcode 0xf3 0x0f 0xe2 - invalid */
8130/* Opcode 0xf2 0x0f 0xe2 - invalid */
8131
8132/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8133FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8134/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8135FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8136/* Opcode 0xf3 0x0f 0xe3 - invalid */
8137/* Opcode 0xf2 0x0f 0xe3 - invalid */
8138
8139/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8140FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8141/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8142FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8143/* Opcode 0xf3 0x0f 0xe4 - invalid */
8144/* Opcode 0xf2 0x0f 0xe4 - invalid */
8145
8146/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8147FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8148/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8149FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8150/* Opcode 0xf3 0x0f 0xe5 - invalid */
8151/* Opcode 0xf2 0x0f 0xe5 - invalid */
8152
8153/* Opcode 0x0f 0xe6 - invalid */
8154/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8155FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8156/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8157FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8158/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8159FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8160
8161
8162/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8163FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8164{
8165 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8167 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8168 {
8169 /* Register, memory. */
8170 IEM_MC_BEGIN(0, 2);
8171 IEM_MC_LOCAL(uint64_t, uSrc);
8172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8173
8174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8176 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8177 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8178
8179 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8180 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8181
8182 IEM_MC_ADVANCE_RIP();
8183 IEM_MC_END();
8184 return VINF_SUCCESS;
8185 }
8186 /* The register, register encoding is invalid. */
8187 return IEMOP_RAISE_INVALID_OPCODE();
8188}
8189
8190/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
8191FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
8192{
8193 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8194 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8195 {
8196 /* Register, memory. */
8197 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
8198 IEM_MC_BEGIN(0, 2);
8199 IEM_MC_LOCAL(RTUINT128U, uSrc);
8200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8201
8202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8204 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8205 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8206
8207 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8208 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8209
8210 IEM_MC_ADVANCE_RIP();
8211 IEM_MC_END();
8212 return VINF_SUCCESS;
8213 }
8214
8215 /* The register, register encoding is invalid. */
8216 return IEMOP_RAISE_INVALID_OPCODE();
8217}
8218
8219/* Opcode 0xf3 0x0f 0xe7 - invalid */
8220/* Opcode 0xf2 0x0f 0xe7 - invalid */
8221
8222
8223/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8224FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8225/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8226FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8227/* Opcode 0xf3 0x0f 0xe8 - invalid */
8228/* Opcode 0xf2 0x0f 0xe8 - invalid */
8229
8230/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8231FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8232/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8233FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8234/* Opcode 0xf3 0x0f 0xe9 - invalid */
8235/* Opcode 0xf2 0x0f 0xe9 - invalid */
8236
8237/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8238FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8239/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8240FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8241/* Opcode 0xf3 0x0f 0xea - invalid */
8242/* Opcode 0xf2 0x0f 0xea - invalid */
8243
8244/** Opcode 0x0f 0xeb - por Pq, Qq */
8245FNIEMOP_STUB(iemOp_por_Pq_Qq);
8246/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8247FNIEMOP_STUB(iemOp_por_Vx_W);
8248/* Opcode 0xf3 0x0f 0xeb - invalid */
8249/* Opcode 0xf2 0x0f 0xeb - invalid */
8250
8251/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8252FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8253/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8254FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8255/* Opcode 0xf3 0x0f 0xec - invalid */
8256/* Opcode 0xf2 0x0f 0xec - invalid */
8257
8258/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8259FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8260/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8261FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8262/* Opcode 0xf3 0x0f 0xed - invalid */
8263/* Opcode 0xf2 0x0f 0xed - invalid */
8264
8265/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8266FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8267/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8268FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8269/* Opcode 0xf3 0x0f 0xee - invalid */
8270/* Opcode 0xf2 0x0f 0xee - invalid */
8271
8272
8273/** Opcode 0x0f 0xef - pxor Pq, Qq */
8274FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8275{
8276 IEMOP_MNEMONIC(pxor, "pxor");
8277 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8278}
8279
8280/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8281FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8282{
8283 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8284 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8285}
8286
8287/* Opcode 0xf3 0x0f 0xef - invalid */
8288/* Opcode 0xf2 0x0f 0xef - invalid */
8289
8290/* Opcode 0x0f 0xf0 - invalid */
8291/* Opcode 0x66 0x0f 0xf0 - invalid */
8292/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8293FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8294
8295/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8296FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8297/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8298FNIEMOP_STUB(iemOp_psllw_Vx_W);
8299/* Opcode 0xf2 0x0f 0xf1 - invalid */
8300
8301/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8302FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8303/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8304FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8305/* Opcode 0xf2 0x0f 0xf2 - invalid */
8306
8307/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8308FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8309/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8310FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8311/* Opcode 0xf2 0x0f 0xf3 - invalid */
8312
8313/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8314FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8315/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8316FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8317/* Opcode 0xf2 0x0f 0xf4 - invalid */
8318
8319/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8320FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8321/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
8322FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
8323/* Opcode 0xf2 0x0f 0xf5 - invalid */
8324
8325/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8326FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8327/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
8328FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
8329/* Opcode 0xf2 0x0f 0xf6 - invalid */
8330
8331/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8332FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8333/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
8334FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
8335/* Opcode 0xf2 0x0f 0xf7 - invalid */
8336
8337/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8338FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8339/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
8340FNIEMOP_STUB(iemOp_psubb_Vx_W);
8341/* Opcode 0xf2 0x0f 0xf8 - invalid */
8342
8343/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8344FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8345/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
8346FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
8347/* Opcode 0xf2 0x0f 0xf9 - invalid */
8348
8349/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8350FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8351/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
8352FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
8353/* Opcode 0xf2 0x0f 0xfa - invalid */
8354
8355/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8356FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8357/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
8358FNIEMOP_STUB(iemOp_psubq_Vx_W);
8359/* Opcode 0xf2 0x0f 0xfb - invalid */
8360
8361/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8362FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8363/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
8364FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
8365/* Opcode 0xf2 0x0f 0xfc - invalid */
8366
8367/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8368FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8369/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
8370FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
8371/* Opcode 0xf2 0x0f 0xfd - invalid */
8372
8373/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8374FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8375/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
8376FNIEMOP_STUB(iemOp_paddd_Vx_W);
8377/* Opcode 0xf2 0x0f 0xfe - invalid */
8378
8379
8380/** Opcode **** 0x0f 0xff - UD0 */
8381FNIEMOP_DEF(iemOp_ud0)
8382{
8383 IEMOP_MNEMONIC(ud0, "ud0");
8384 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8385 {
8386 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8387#ifndef TST_IEM_CHECK_MC
8388 RTGCPTR GCPtrEff;
8389 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8390 if (rcStrict != VINF_SUCCESS)
8391 return rcStrict;
8392#endif
8393 IEMOP_HLP_DONE_DECODING();
8394 }
8395 return IEMOP_RAISE_INVALID_OPCODE();
8396}
8397
8398
8399
8400/**
8401 * Two byte opcode map, first byte 0x0f.
8402 *
8403 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
8404 * check if it needs updating as well when making changes.
8405 */
8406IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8407{
8408 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8409 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8410 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8411 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8412 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8413 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8414 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8415 /* 0x06 */ IEMOP_X4(iemOp_clts),
8416 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8417 /* 0x08 */ IEMOP_X4(iemOp_invd),
8418 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8419 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8420 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8421 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8422 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8423 /* 0x0e */ IEMOP_X4(iemOp_femms),
8424 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8425
8426 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vx_Wsd,
8427 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
8428 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
8429 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8430 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8431 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8432 /* 0x16 */ iemOp_movhpsv1_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpdv1_Vdq_Mq, iemOp_movshdup_Vx_Wx, iemOp_InvalidNeedRM,
8433 /* 0x17 */ iemOp_movhpsv1_Mq_Vq, iemOp_movhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8434 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8435 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8436 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8437 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8438 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8439 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8440 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8441 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8442
8443 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8444 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8445 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8446 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8447 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8448 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8449 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8450 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8451 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8452 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8453 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
8454 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8455 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
8456 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
8457 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8458 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8459
8460 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8461 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8462 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8463 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8464 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8465 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8466 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8467 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8468 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
8469 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8470 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
8471 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8472 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8473 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8474 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8475 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8476
8477 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8478 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8479 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8480 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8481 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8482 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8483 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8484 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8485 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8486 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8487 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8488 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8489 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8490 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8491 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8492 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8493
8494 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8495 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
8496 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
8497 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
8498 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8499 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8500 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8501 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8502 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
8503 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
8504 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
8505 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8506 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
8507 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
8508 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
8509 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
8510
8511 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8512 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8513 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8514 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8515 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8516 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8517 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8518 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8519 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8520 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8521 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8522 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8523 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8524 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8525 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8526 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vx_Wx, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM,
8527
8528 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
8529 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8530 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8531 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8532 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8533 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8534 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8535 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8536
8537 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8538 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8539 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8540 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8541 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
8542 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
8543 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
8544 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
8545
8546 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8547 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8548 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8549 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8550 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8551 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8552 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8553 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8554 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8555 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8556 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8557 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8558 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8559 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8560 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8561 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8562
8563 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8564 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8565 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8566 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8567 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8568 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8569 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8570 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8571 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8572 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8573 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8574 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8575 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8576 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8577 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8578 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8579
8580 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8581 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8582 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8583 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8584 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8585 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8586 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8587 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8588 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8589 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8590 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8591 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8592 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8593 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8594 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8595 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8596
8597 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8598 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8599 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8600 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8601 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8602 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8603 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8604 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8605 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8606 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8607 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8608 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8609 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8610 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8611 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8612 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8613
8614 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8615 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8616 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
8617 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8618 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8619 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8620 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8621 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8622 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8623 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8624 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8625 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8626 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8627 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8628 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8629 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8630
8631 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
8632 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8633 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8634 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8635 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8636 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8637 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8638 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8639 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8640 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8641 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8642 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8643 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8644 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8645 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8646 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8647
8648 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8649 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8650 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8651 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8652 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8653 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8654 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
8655 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8656 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8657 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8658 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8659 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8660 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8661 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8662 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8663 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8664
8665 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
8666 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8667 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8668 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8669 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8670 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8671 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8672 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8673 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8674 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8675 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8676 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8677 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8678 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8679 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8680 /* 0xff */ IEMOP_X4(iemOp_ud0),
8681};
8682AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8683
8684/** @} */
8685
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette