VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66991

Last change on this file since 66991 was 66991, checked in by vboxsync, 8 years ago

IEM: Docs and test for movntps & movntpd.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 323.6 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66991 2017-05-19 22:01:15Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.215389.xyz. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
38 switch (pVCpu->iem.s.enmEffOpSize)
39 {
40 case IEMMODE_16BIT:
41 IEM_MC_BEGIN(0, 1);
42 IEM_MC_LOCAL(uint16_t, u16Ldtr);
43 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
44 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
45 IEM_MC_ADVANCE_RIP();
46 IEM_MC_END();
47 break;
48
49 case IEMMODE_32BIT:
50 IEM_MC_BEGIN(0, 1);
51 IEM_MC_LOCAL(uint32_t, u32Ldtr);
52 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
53 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
54 IEM_MC_ADVANCE_RIP();
55 IEM_MC_END();
56 break;
57
58 case IEMMODE_64BIT:
59 IEM_MC_BEGIN(0, 1);
60 IEM_MC_LOCAL(uint64_t, u64Ldtr);
61 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
62 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 break;
66
67 IEM_NOT_REACHED_DEFAULT_CASE_RET();
68 }
69 }
70 else
71 {
72 IEM_MC_BEGIN(0, 2);
73 IEM_MC_LOCAL(uint16_t, u16Ldtr);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
76 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
77 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
78 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
79 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/** Opcode 0x0f 0x00 /1. */
88FNIEMOPRM_DEF(iemOp_Grp6_str)
89{
90 IEMOP_MNEMONIC(str, "str Rv/Mw");
91 IEMOP_HLP_MIN_286();
92 IEMOP_HLP_NO_REAL_OR_V86_MODE();
93
94 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
95 {
96 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
97 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
98 switch (pVCpu->iem.s.enmEffOpSize)
99 {
100 case IEMMODE_16BIT:
101 IEM_MC_BEGIN(0, 1);
102 IEM_MC_LOCAL(uint16_t, u16Tr);
103 IEM_MC_FETCH_TR_U16(u16Tr);
104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
105 IEM_MC_ADVANCE_RIP();
106 IEM_MC_END();
107 break;
108
109 case IEMMODE_32BIT:
110 IEM_MC_BEGIN(0, 1);
111 IEM_MC_LOCAL(uint32_t, u32Tr);
112 IEM_MC_FETCH_TR_U32(u32Tr);
113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
114 IEM_MC_ADVANCE_RIP();
115 IEM_MC_END();
116 break;
117
118 case IEMMODE_64BIT:
119 IEM_MC_BEGIN(0, 1);
120 IEM_MC_LOCAL(uint64_t, u64Tr);
121 IEM_MC_FETCH_TR_U64(u64Tr);
122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 break;
126
127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
128 }
129 }
130 else
131 {
132 IEM_MC_BEGIN(0, 2);
133 IEM_MC_LOCAL(uint16_t, u16Tr);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
136 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
137 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
138 IEM_MC_FETCH_TR_U16(u16Tr);
139 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
140 IEM_MC_ADVANCE_RIP();
141 IEM_MC_END();
142 }
143 return VINF_SUCCESS;
144}
145
146
147/** Opcode 0x0f 0x00 /2. */
148FNIEMOPRM_DEF(iemOp_Grp6_lldt)
149{
150 IEMOP_MNEMONIC(lldt, "lldt Ew");
151 IEMOP_HLP_MIN_286();
152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
153
154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
155 {
156 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
157 IEM_MC_BEGIN(1, 0);
158 IEM_MC_ARG(uint16_t, u16Sel, 0);
159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
161 IEM_MC_END();
162 }
163 else
164 {
165 IEM_MC_BEGIN(1, 1);
166 IEM_MC_ARG(uint16_t, u16Sel, 0);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
169 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
170 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
171 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
172 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
173 IEM_MC_END();
174 }
175 return VINF_SUCCESS;
176}
177
178
179/** Opcode 0x0f 0x00 /3. */
180FNIEMOPRM_DEF(iemOp_Grp6_ltr)
181{
182 IEMOP_MNEMONIC(ltr, "ltr Ew");
183 IEMOP_HLP_MIN_286();
184 IEMOP_HLP_NO_REAL_OR_V86_MODE();
185
186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
187 {
188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
189 IEM_MC_BEGIN(1, 0);
190 IEM_MC_ARG(uint16_t, u16Sel, 0);
191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
192 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
193 IEM_MC_END();
194 }
195 else
196 {
197 IEM_MC_BEGIN(1, 1);
198 IEM_MC_ARG(uint16_t, u16Sel, 0);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
202 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
203 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
204 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
205 IEM_MC_END();
206 }
207 return VINF_SUCCESS;
208}
209
210
211/** Opcode 0x0f 0x00 /3. */
212FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
213{
214 IEMOP_HLP_MIN_286();
215 IEMOP_HLP_NO_REAL_OR_V86_MODE();
216
217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
218 {
219 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
220 IEM_MC_BEGIN(2, 0);
221 IEM_MC_ARG(uint16_t, u16Sel, 0);
222 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
223 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
224 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
225 IEM_MC_END();
226 }
227 else
228 {
229 IEM_MC_BEGIN(2, 1);
230 IEM_MC_ARG(uint16_t, u16Sel, 0);
231 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
234 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
235 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
236 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
237 IEM_MC_END();
238 }
239 return VINF_SUCCESS;
240}
241
242
243/** Opcode 0x0f 0x00 /4. */
244FNIEMOPRM_DEF(iemOp_Grp6_verr)
245{
246 IEMOP_MNEMONIC(verr, "verr Ew");
247 IEMOP_HLP_MIN_286();
248 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
249}
250
251
252/** Opcode 0x0f 0x00 /5. */
253FNIEMOPRM_DEF(iemOp_Grp6_verw)
254{
255 IEMOP_MNEMONIC(verw, "verw Ew");
256 IEMOP_HLP_MIN_286();
257 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
258}
259
260
261/**
262 * Group 6 jump table.
263 */
264IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
265{
266 iemOp_Grp6_sldt,
267 iemOp_Grp6_str,
268 iemOp_Grp6_lldt,
269 iemOp_Grp6_ltr,
270 iemOp_Grp6_verr,
271 iemOp_Grp6_verw,
272 iemOp_InvalidWithRM,
273 iemOp_InvalidWithRM
274};
275
276/** Opcode 0x0f 0x00. */
277FNIEMOP_DEF(iemOp_Grp6)
278{
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
280 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
281}
282
283
284/** Opcode 0x0f 0x01 /0. */
285FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
286{
287 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
288 IEMOP_HLP_MIN_286();
289 IEMOP_HLP_64BIT_OP_SIZE();
290 IEM_MC_BEGIN(2, 1);
291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
292 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
295 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
296 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
297 IEM_MC_END();
298 return VINF_SUCCESS;
299}
300
301
302/** Opcode 0x0f 0x01 /0. */
303FNIEMOP_DEF(iemOp_Grp7_vmcall)
304{
305 IEMOP_BITCH_ABOUT_STUB();
306 return IEMOP_RAISE_INVALID_OPCODE();
307}
308
309
310/** Opcode 0x0f 0x01 /0. */
311FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
312{
313 IEMOP_BITCH_ABOUT_STUB();
314 return IEMOP_RAISE_INVALID_OPCODE();
315}
316
317
318/** Opcode 0x0f 0x01 /0. */
319FNIEMOP_DEF(iemOp_Grp7_vmresume)
320{
321 IEMOP_BITCH_ABOUT_STUB();
322 return IEMOP_RAISE_INVALID_OPCODE();
323}
324
325
326/** Opcode 0x0f 0x01 /0. */
327FNIEMOP_DEF(iemOp_Grp7_vmxoff)
328{
329 IEMOP_BITCH_ABOUT_STUB();
330 return IEMOP_RAISE_INVALID_OPCODE();
331}
332
333
334/** Opcode 0x0f 0x01 /1. */
335FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
336{
337 IEMOP_MNEMONIC(sidt, "sidt Ms");
338 IEMOP_HLP_MIN_286();
339 IEMOP_HLP_64BIT_OP_SIZE();
340 IEM_MC_BEGIN(2, 1);
341 IEM_MC_ARG(uint8_t, iEffSeg, 0);
342 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
345 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
346 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
347 IEM_MC_END();
348 return VINF_SUCCESS;
349}
350
351
352/** Opcode 0x0f 0x01 /1. */
353FNIEMOP_DEF(iemOp_Grp7_monitor)
354{
355 IEMOP_MNEMONIC(monitor, "monitor");
356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
357 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
358}
359
360
361/** Opcode 0x0f 0x01 /1. */
362FNIEMOP_DEF(iemOp_Grp7_mwait)
363{
364 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
367}
368
369
370/** Opcode 0x0f 0x01 /2. */
371FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
372{
373 IEMOP_MNEMONIC(lgdt, "lgdt");
374 IEMOP_HLP_64BIT_OP_SIZE();
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd0. */
389FNIEMOP_DEF(iemOp_Grp7_xgetbv)
390{
391 IEMOP_MNEMONIC(xgetbv, "xgetbv");
392 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
393 {
394 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
396 }
397 return IEMOP_RAISE_INVALID_OPCODE();
398}
399
400
401/** Opcode 0x0f 0x01 0xd1. */
402FNIEMOP_DEF(iemOp_Grp7_xsetbv)
403{
404 IEMOP_MNEMONIC(xsetbv, "xsetbv");
405 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
406 {
407 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
408 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
409 }
410 return IEMOP_RAISE_INVALID_OPCODE();
411}
412
413
414/** Opcode 0x0f 0x01 /3. */
415FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
416{
417 IEMOP_MNEMONIC(lidt, "lidt");
418 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
419 ? IEMMODE_64BIT
420 : pVCpu->iem.s.enmEffOpSize;
421 IEM_MC_BEGIN(3, 1);
422 IEM_MC_ARG(uint8_t, iEffSeg, 0);
423 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
424 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
428 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
429 IEM_MC_END();
430 return VINF_SUCCESS;
431}
432
433
434#ifdef VBOX_WITH_NESTED_HWVIRT
435/** Opcode 0x0f 0x01 0xd8. */
436FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
437{
438 IEMOP_MNEMONIC(vmrun, "vmrun");
439 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
440}
441
442/** Opcode 0x0f 0x01 0xd9. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
444{
445 IEMOP_MNEMONIC(vmmcall, "vmmcall");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
447}
448
449
450/** Opcode 0x0f 0x01 0xda. */
451FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
452{
453 IEMOP_MNEMONIC(vmload, "vmload");
454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
455}
456
457
458/** Opcode 0x0f 0x01 0xdb. */
459FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
460{
461 IEMOP_MNEMONIC(vmsave, "vmsave");
462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
463}
464
465
466/** Opcode 0x0f 0x01 0xdc. */
467FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
468{
469 IEMOP_MNEMONIC(stgi, "stgi");
470 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
471}
472
473
474/** Opcode 0x0f 0x01 0xdd. */
475FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
476{
477 IEMOP_MNEMONIC(clgi, "clgi");
478 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
479}
480
481
482/** Opcode 0x0f 0x01 0xdf. */
483FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
484{
485 IEMOP_MNEMONIC(invlpga, "invlpga");
486 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
487}
488
489
490/** Opcode 0x0f 0x01 0xde. */
491FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
492{
493 IEMOP_MNEMONIC(skinit, "skinit");
494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
495}
496#else
497/** Opcode 0x0f 0x01 0xd8. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
499
500/** Opcode 0x0f 0x01 0xd9. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
502
503/** Opcode 0x0f 0x01 0xda. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
505
506/** Opcode 0x0f 0x01 0xdb. */
507FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
508
509/** Opcode 0x0f 0x01 0xdc. */
510FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
511
512/** Opcode 0x0f 0x01 0xdd. */
513FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
514
515/** Opcode 0x0f 0x01 0xdf. */
516FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520#endif /* VBOX_WITH_NESTED_HWVIRT */
521
522/** Opcode 0x0f 0x01 /4. */
523FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
524{
525 IEMOP_MNEMONIC(smsw, "smsw");
526 IEMOP_HLP_MIN_286();
527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
528 {
529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
530 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
531 switch (pVCpu->iem.s.enmEffOpSize)
532 {
533 case IEMMODE_16BIT:
534 IEM_MC_BEGIN(0, 1);
535 IEM_MC_LOCAL(uint16_t, u16Tmp);
536 IEM_MC_FETCH_CR0_U16(u16Tmp);
537 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
538 { /* likely */ }
539 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
541 else
542 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
543 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
544 IEM_MC_ADVANCE_RIP();
545 IEM_MC_END();
546 return VINF_SUCCESS;
547
548 case IEMMODE_32BIT:
549 IEM_MC_BEGIN(0, 1);
550 IEM_MC_LOCAL(uint32_t, u32Tmp);
551 IEM_MC_FETCH_CR0_U32(u32Tmp);
552 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 return VINF_SUCCESS;
556
557 case IEMMODE_64BIT:
558 IEM_MC_BEGIN(0, 1);
559 IEM_MC_LOCAL(uint64_t, u64Tmp);
560 IEM_MC_FETCH_CR0_U64(u64Tmp);
561 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
562 IEM_MC_ADVANCE_RIP();
563 IEM_MC_END();
564 return VINF_SUCCESS;
565
566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
567 }
568 }
569 else
570 {
571 /* Ignore operand size here, memory refs are always 16-bit. */
572 IEM_MC_BEGIN(0, 2);
573 IEM_MC_LOCAL(uint16_t, u16Tmp);
574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
578 IEM_MC_FETCH_CR0_U16(u16Tmp);
579 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
580 { /* likely */ }
581 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
582 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
583 else
584 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
585 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
586 IEM_MC_ADVANCE_RIP();
587 IEM_MC_END();
588 return VINF_SUCCESS;
589 }
590}
591
592
593/** Opcode 0x0f 0x01 /6. */
594FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
595{
596 /* The operand size is effectively ignored, all is 16-bit and only the
597 lower 3-bits are used. */
598 IEMOP_MNEMONIC(lmsw, "lmsw");
599 IEMOP_HLP_MIN_286();
600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
601 {
602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
603 IEM_MC_BEGIN(1, 0);
604 IEM_MC_ARG(uint16_t, u16Tmp, 0);
605 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
606 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
607 IEM_MC_END();
608 }
609 else
610 {
611 IEM_MC_BEGIN(1, 1);
612 IEM_MC_ARG(uint16_t, u16Tmp, 0);
613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
617 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
618 IEM_MC_END();
619 }
620 return VINF_SUCCESS;
621}
622
623
624/** Opcode 0x0f 0x01 /7. */
625FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
626{
627 IEMOP_MNEMONIC(invlpg, "invlpg");
628 IEMOP_HLP_MIN_486();
629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
630 IEM_MC_BEGIN(1, 1);
631 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
633 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
634 IEM_MC_END();
635 return VINF_SUCCESS;
636}
637
638
639/** Opcode 0x0f 0x01 /7. */
640FNIEMOP_DEF(iemOp_Grp7_swapgs)
641{
642 IEMOP_MNEMONIC(swapgs, "swapgs");
643 IEMOP_HLP_ONLY_64BIT();
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
646}
647
648
649/** Opcode 0x0f 0x01 /7. */
650FNIEMOP_DEF(iemOp_Grp7_rdtscp)
651{
652 IEMOP_MNEMONIC(rdtscp, "rdtscp");
653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
654 /** @todo SVM intercept removal from here. */
655 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RDTSCP, SVM_EXIT_RDTSCP, 0, 0);
656 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
657}
658
659
660/**
661 * Group 7 jump table, memory variant.
662 */
663IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
664{
665 iemOp_Grp7_sgdt,
666 iemOp_Grp7_sidt,
667 iemOp_Grp7_lgdt,
668 iemOp_Grp7_lidt,
669 iemOp_Grp7_smsw,
670 iemOp_InvalidWithRM,
671 iemOp_Grp7_lmsw,
672 iemOp_Grp7_invlpg
673};
674
675
676/** Opcode 0x0f 0x01. */
677FNIEMOP_DEF(iemOp_Grp7)
678{
679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
680 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
681 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
682
683 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
684 {
685 case 0:
686 switch (bRm & X86_MODRM_RM_MASK)
687 {
688 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
689 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
690 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
691 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 1:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
700 }
701 return IEMOP_RAISE_INVALID_OPCODE();
702
703 case 2:
704 switch (bRm & X86_MODRM_RM_MASK)
705 {
706 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
707 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
708 }
709 return IEMOP_RAISE_INVALID_OPCODE();
710
711 case 3:
712 switch (bRm & X86_MODRM_RM_MASK)
713 {
714 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
715 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
716 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
717 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
718 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
719 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
720 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
721 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
723 }
724
725 case 4:
726 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
727
728 case 5:
729 return IEMOP_RAISE_INVALID_OPCODE();
730
731 case 6:
732 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
733
734 case 7:
735 switch (bRm & X86_MODRM_RM_MASK)
736 {
737 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
738 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
739 }
740 return IEMOP_RAISE_INVALID_OPCODE();
741
742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
743 }
744}
745
746/** Opcode 0x0f 0x00 /3. */
747FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
748{
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
751
752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
753 {
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 switch (pVCpu->iem.s.enmEffOpSize)
756 {
757 case IEMMODE_16BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 case IEMMODE_32BIT:
773 case IEMMODE_64BIT:
774 {
775 IEM_MC_BEGIN(3, 0);
776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
777 IEM_MC_ARG(uint16_t, u16Sel, 1);
778 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
779
780 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
781 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
782 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
783
784 IEM_MC_END();
785 return VINF_SUCCESS;
786 }
787
788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
789 }
790 }
791 else
792 {
793 switch (pVCpu->iem.s.enmEffOpSize)
794 {
795 case IEMMODE_16BIT:
796 {
797 IEM_MC_BEGIN(3, 1);
798 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
799 IEM_MC_ARG(uint16_t, u16Sel, 1);
800 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
802
803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
804 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
805
806 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
807 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
808 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
809
810 IEM_MC_END();
811 return VINF_SUCCESS;
812 }
813
814 case IEMMODE_32BIT:
815 case IEMMODE_64BIT:
816 {
817 IEM_MC_BEGIN(3, 1);
818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 1);
820 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
822
823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
824 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
825/** @todo testcase: make sure it's a 16-bit read. */
826
827 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
828 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
829 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
830
831 IEM_MC_END();
832 return VINF_SUCCESS;
833 }
834
835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
836 }
837 }
838}
839
840
841
842/** Opcode 0x0f 0x02. */
843FNIEMOP_DEF(iemOp_lar_Gv_Ew)
844{
845 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
846 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
847}
848
849
850/** Opcode 0x0f 0x03. */
851FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
852{
853 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
854 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
855}
856
857
858/** Opcode 0x0f 0x05. */
859FNIEMOP_DEF(iemOp_syscall)
860{
861 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
863 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
864}
865
866
867/** Opcode 0x0f 0x06. */
868FNIEMOP_DEF(iemOp_clts)
869{
870 IEMOP_MNEMONIC(clts, "clts");
871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
872 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
873}
874
875
876/** Opcode 0x0f 0x07. */
877FNIEMOP_DEF(iemOp_sysret)
878{
879 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
881 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
882}
883
884
885/** Opcode 0x0f 0x08. */
886FNIEMOP_DEF(iemOp_invd)
887{
888 IEMOP_MNEMONIC(invd, "invd");
889#ifdef VBOX_WITH_NESTED_HWVIRT
890 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
891 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
892#else
893 RT_NOREF_PV(pVCpu);
894#endif
895 /** @todo implement invd for the regular case (above only handles nested SVM
896 * exits). */
897 IEMOP_BITCH_ABOUT_STUB();
898 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
899}
900
901// IEMOP_HLP_MIN_486();
902
903
904/** Opcode 0x0f 0x09. */
905FNIEMOP_DEF(iemOp_wbinvd)
906{
907 IEMOP_MNEMONIC(wbinvd, "wbinvd");
908 IEMOP_HLP_MIN_486();
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
910 IEM_MC_BEGIN(0, 0);
911 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
912 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
913 IEM_MC_ADVANCE_RIP();
914 IEM_MC_END();
915 return VINF_SUCCESS; /* ignore for now */
916}
917
918
919/** Opcode 0x0f 0x0b. */
920FNIEMOP_DEF(iemOp_ud2)
921{
922 IEMOP_MNEMONIC(ud2, "ud2");
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926/** Opcode 0x0f 0x0d. */
927FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
928{
929 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
931 {
932 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
933 return IEMOP_RAISE_INVALID_OPCODE();
934 }
935
936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
938 {
939 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
940 return IEMOP_RAISE_INVALID_OPCODE();
941 }
942
943 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
944 {
945 case 2: /* Aliased to /0 for the time being. */
946 case 4: /* Aliased to /0 for the time being. */
947 case 5: /* Aliased to /0 for the time being. */
948 case 6: /* Aliased to /0 for the time being. */
949 case 7: /* Aliased to /0 for the time being. */
950 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
951 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
952 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
954 }
955
956 IEM_MC_BEGIN(0, 1);
957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
960 /* Currently a NOP. */
961 NOREF(GCPtrEffSrc);
962 IEM_MC_ADVANCE_RIP();
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x0e. */
969FNIEMOP_STUB(iemOp_femms);
970
971
972/** Opcode 0x0f 0x0f. */
973FNIEMOP_DEF(iemOp_3Dnow)
974{
975 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
976 {
977 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
978 return IEMOP_RAISE_INVALID_OPCODE();
979 }
980
981#ifdef IEM_WITH_3DNOW
982 /* This is pretty sparse, use switch instead of table. */
983 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
984 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
985#else
986 IEMOP_BITCH_ABOUT_STUB();
987 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
988#endif
989}
990
991
992/**
993 * @opcode 0x10
994 * @oppfx none
995 * @opcpuid sse
996 * @opgroup og_sse_simdfp_datamove
997 * @opxcpttype 4UA
998 * @optest op1=1 op2=2 -> op1=2
999 * @optest op1=0 op2=-22 -> op1=-22
1000 */
1001FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1002{
1003 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1006 {
1007 /*
1008 * Register, register.
1009 */
1010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1011 IEM_MC_BEGIN(0, 0);
1012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1013 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1014 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1015 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1016 IEM_MC_ADVANCE_RIP();
1017 IEM_MC_END();
1018 }
1019 else
1020 {
1021 /*
1022 * Memory, register.
1023 */
1024 IEM_MC_BEGIN(0, 2);
1025 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1027
1028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1032
1033 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1034 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1035
1036 IEM_MC_ADVANCE_RIP();
1037 IEM_MC_END();
1038 }
1039 return VINF_SUCCESS;
1040
1041}
1042
1043
1044/**
1045 * @opcode 0x10
1046 * @oppfx 0x66
1047 * @opcpuid sse2
1048 * @opgroup og_sse2_pcksclr_datamove
1049 * @opxcpttype 4UA
1050 * @optest op1=1 op2=2 -> op1=2
1051 * @optest op1=0 op2=-42 -> op1=-42
1052 */
1053FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1054{
1055 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1058 {
1059 /*
1060 * Register, register.
1061 */
1062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1063 IEM_MC_BEGIN(0, 0);
1064 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1066 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1067 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1068 IEM_MC_ADVANCE_RIP();
1069 IEM_MC_END();
1070 }
1071 else
1072 {
1073 /*
1074 * Memory, register.
1075 */
1076 IEM_MC_BEGIN(0, 2);
1077 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1079
1080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1083 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1084
1085 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1086 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1087
1088 IEM_MC_ADVANCE_RIP();
1089 IEM_MC_END();
1090 }
1091 return VINF_SUCCESS;
1092}
1093
1094
1095/**
1096 * @opcode 0x10
1097 * @oppfx 0xf3
1098 * @opcpuid sse
1099 * @opgroup og_sse_simdfp_datamove
1100 * @opxcpttype 5
1101 * @optest op1=1 op2=2 -> op1=2
1102 * @optest op1=0 op2=-22 -> op1=-22
1103 */
1104FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1105{
1106 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1109 {
1110 /*
1111 * Register, register.
1112 */
1113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1114 IEM_MC_BEGIN(0, 1);
1115 IEM_MC_LOCAL(uint32_t, uSrc);
1116
1117 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1118 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1119 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1120 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 else
1126 {
1127 /*
1128 * Memory, register.
1129 */
1130 IEM_MC_BEGIN(0, 2);
1131 IEM_MC_LOCAL(uint32_t, uSrc);
1132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1133
1134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1136 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1138
1139 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1140 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1141
1142 IEM_MC_ADVANCE_RIP();
1143 IEM_MC_END();
1144 }
1145 return VINF_SUCCESS;
1146}
1147
1148
1149/**
1150 * @opcode 0x10
1151 * @oppfx 0xf2
1152 * @opcpuid sse2
1153 * @opgroup og_sse2_pcksclr_datamove
1154 * @opxcpttype 5
1155 * @optest op1=1 op2=2 -> op1=2
1156 * @optest op1=0 op2=-42 -> op1=-42
1157 */
1158FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1159{
1160 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1163 {
1164 /*
1165 * Register, register.
1166 */
1167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1168 IEM_MC_BEGIN(0, 1);
1169 IEM_MC_LOCAL(uint64_t, uSrc);
1170
1171 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1172 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1173 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1174 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1175
1176 IEM_MC_ADVANCE_RIP();
1177 IEM_MC_END();
1178 }
1179 else
1180 {
1181 /*
1182 * Memory, register.
1183 */
1184 IEM_MC_BEGIN(0, 2);
1185 IEM_MC_LOCAL(uint64_t, uSrc);
1186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1187
1188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1190 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1191 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1192
1193 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1194 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1195
1196 IEM_MC_ADVANCE_RIP();
1197 IEM_MC_END();
1198 }
1199 return VINF_SUCCESS;
1200}
1201
1202
1203/**
1204 * @opcode 0x11
1205 * @oppfx none
1206 * @opcpuid sse
1207 * @opgroup og_sse_simdfp_datamove
1208 * @opxcpttype 4UA
1209 * @optest op1=1 op2=2 -> op1=2
1210 * @optest op1=0 op2=-42 -> op1=-42
1211 */
1212FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1213{
1214 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1217 {
1218 /*
1219 * Register, register.
1220 */
1221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1222 IEM_MC_BEGIN(0, 0);
1223 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1224 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1225 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1226 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1227 IEM_MC_ADVANCE_RIP();
1228 IEM_MC_END();
1229 }
1230 else
1231 {
1232 /*
1233 * Memory, register.
1234 */
1235 IEM_MC_BEGIN(0, 2);
1236 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1238
1239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1243
1244 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1245 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1246
1247 IEM_MC_ADVANCE_RIP();
1248 IEM_MC_END();
1249 }
1250 return VINF_SUCCESS;
1251}
1252
1253
1254/**
1255 * @opcode 0x11
1256 * @oppfx 0x66
1257 * @opcpuid sse2
1258 * @opgroup og_sse2_pcksclr_datamove
1259 * @opxcpttype 4UA
1260 * @optest op1=1 op2=2 -> op1=2
1261 * @optest op1=0 op2=-42 -> op1=-42
1262 */
1263FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1264{
1265 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1268 {
1269 /*
1270 * Register, register.
1271 */
1272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1273 IEM_MC_BEGIN(0, 0);
1274 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1275 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1276 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1277 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1278 IEM_MC_ADVANCE_RIP();
1279 IEM_MC_END();
1280 }
1281 else
1282 {
1283 /*
1284 * Memory, register.
1285 */
1286 IEM_MC_BEGIN(0, 2);
1287 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1289
1290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1292 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1294
1295 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1296 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1297
1298 IEM_MC_ADVANCE_RIP();
1299 IEM_MC_END();
1300 }
1301 return VINF_SUCCESS;
1302}
1303
1304
1305/**
1306 * @opcode 0x11
1307 * @oppfx 0xf3
1308 * @opcpuid sse
1309 * @opgroup og_sse_simdfp_datamove
1310 * @opxcpttype 5
1311 * @optest op1=1 op2=2 -> op1=2
1312 * @optest op1=0 op2=-22 -> op1=-22
1313 */
1314FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1315{
1316 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1319 {
1320 /*
1321 * Register, register.
1322 */
1323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1324 IEM_MC_BEGIN(0, 1);
1325 IEM_MC_LOCAL(uint32_t, uSrc);
1326
1327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1328 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1329 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1330 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1331
1332 IEM_MC_ADVANCE_RIP();
1333 IEM_MC_END();
1334 }
1335 else
1336 {
1337 /*
1338 * Memory, register.
1339 */
1340 IEM_MC_BEGIN(0, 2);
1341 IEM_MC_LOCAL(uint32_t, uSrc);
1342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1343
1344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1346 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1348
1349 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1350 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1351
1352 IEM_MC_ADVANCE_RIP();
1353 IEM_MC_END();
1354 }
1355 return VINF_SUCCESS;
1356}
1357
1358
1359/**
1360 * @opcode 0x11
1361 * @oppfx 0xf2
1362 * @opcpuid sse2
1363 * @opgroup og_sse2_pcksclr_datamove
1364 * @opxcpttype 5
1365 * @optest op1=1 op2=2 -> op1=2
1366 * @optest op1=0 op2=-42 -> op1=-42
1367 */
1368FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1369{
1370 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1373 {
1374 /*
1375 * Register, register.
1376 */
1377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1378 IEM_MC_BEGIN(0, 1);
1379 IEM_MC_LOCAL(uint64_t, uSrc);
1380
1381 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1382 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1383 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1384 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1385
1386 IEM_MC_ADVANCE_RIP();
1387 IEM_MC_END();
1388 }
1389 else
1390 {
1391 /*
1392 * Memory, register.
1393 */
1394 IEM_MC_BEGIN(0, 2);
1395 IEM_MC_LOCAL(uint64_t, uSrc);
1396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1397
1398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1400 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1402
1403 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1404 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1405
1406 IEM_MC_ADVANCE_RIP();
1407 IEM_MC_END();
1408 }
1409 return VINF_SUCCESS;
1410}
1411
1412
1413FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1414{
1415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1416 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1417 {
1418 /**
1419 * @opcode 0x12
1420 * @opcodesub 11 mr/reg
1421 * @oppfx none
1422 * @opcpuid sse
1423 * @opgroup og_sse_simdfp_datamove
1424 * @opxcpttype 5
1425 * @optest op1=1 op2=2 -> op1=2
1426 * @optest op1=0 op2=-42 -> op1=-42
1427 */
1428 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1429
1430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1431 IEM_MC_BEGIN(0, 1);
1432 IEM_MC_LOCAL(uint64_t, uSrc);
1433
1434 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1436 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1437 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1438
1439 IEM_MC_ADVANCE_RIP();
1440 IEM_MC_END();
1441 }
1442 else
1443 {
1444 /**
1445 * @opdone
1446 * @opcode 0x12
1447 * @opcodesub !11 mr/reg
1448 * @oppfx none
1449 * @opcpuid sse
1450 * @opgroup og_sse_simdfp_datamove
1451 * @opxcpttype 5
1452 * @optest op1=1 op2=2 -> op1=2
1453 * @optest op1=0 op2=-42 -> op1=-42
1454 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1455 */
1456 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1457
1458 IEM_MC_BEGIN(0, 2);
1459 IEM_MC_LOCAL(uint64_t, uSrc);
1460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1461
1462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1466
1467 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1468 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1469
1470 IEM_MC_ADVANCE_RIP();
1471 IEM_MC_END();
1472 }
1473 return VINF_SUCCESS;
1474}
1475
1476
1477/**
1478 * @opcode 0x12
1479 * @opcodesub !11 mr/reg
1480 * @oppfx 0x66
1481 * @opcpuid sse2
1482 * @opgroup og_sse2_pcksclr_datamove
1483 * @opxcpttype 5
1484 * @optest op1=1 op2=2 -> op1=2
1485 * @optest op1=0 op2=-42 -> op1=-42
1486 */
1487FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1488{
1489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1490 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1491 {
1492 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1493
1494 IEM_MC_BEGIN(0, 2);
1495 IEM_MC_LOCAL(uint64_t, uSrc);
1496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1497
1498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1502
1503 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1504 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1505
1506 IEM_MC_ADVANCE_RIP();
1507 IEM_MC_END();
1508 return VINF_SUCCESS;
1509 }
1510
1511 /**
1512 * @opdone
1513 * @opmnemonic ud660f12m3
1514 * @opcode 0x12
1515 * @opcodesub 11 mr/reg
1516 * @oppfx 0x66
1517 * @opunused immediate
1518 * @opcpuid sse
1519 * @optest ->
1520 */
1521 return IEMOP_RAISE_INVALID_OPCODE();
1522}
1523
1524
1525/**
1526 * @opcode 0x12
1527 * @oppfx 0xf3
1528 * @opcpuid sse3
1529 * @opgroup og_sse3_pcksclr_datamove
1530 * @opxcpttype 4
1531 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1532 * op1=0x00000002000000020000000100000001
1533 */
1534FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1535{
1536 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1538 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1539 {
1540 /*
1541 * Register, register.
1542 */
1543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1544 IEM_MC_BEGIN(2, 0);
1545 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1546 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1547
1548 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1549 IEM_MC_PREPARE_SSE_USAGE();
1550
1551 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1552 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1553 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1554
1555 IEM_MC_ADVANCE_RIP();
1556 IEM_MC_END();
1557 }
1558 else
1559 {
1560 /*
1561 * Register, memory.
1562 */
1563 IEM_MC_BEGIN(2, 2);
1564 IEM_MC_LOCAL(RTUINT128U, uSrc);
1565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1566 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1567 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1568
1569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1571 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1572 IEM_MC_PREPARE_SSE_USAGE();
1573
1574 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1575 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1576 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1577
1578 IEM_MC_ADVANCE_RIP();
1579 IEM_MC_END();
1580 }
1581 return VINF_SUCCESS;
1582}
1583
1584
1585/**
1586 * @opcode 0x12
1587 * @oppfx 0xf2
1588 * @opcpuid sse3
1589 * @opgroup og_sse3_pcksclr_datamove
1590 * @opxcpttype 5
1591 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1592 * op1=0x22222222111111112222222211111111
1593 */
1594FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1595{
1596 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1599 {
1600 /*
1601 * Register, register.
1602 */
1603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1604 IEM_MC_BEGIN(2, 0);
1605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1606 IEM_MC_ARG(uint64_t, uSrc, 1);
1607
1608 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1609 IEM_MC_PREPARE_SSE_USAGE();
1610
1611 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1612 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1613 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1614
1615 IEM_MC_ADVANCE_RIP();
1616 IEM_MC_END();
1617 }
1618 else
1619 {
1620 /*
1621 * Register, memory.
1622 */
1623 IEM_MC_BEGIN(2, 2);
1624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1625 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1626 IEM_MC_ARG(uint64_t, uSrc, 1);
1627
1628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1630 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1631 IEM_MC_PREPARE_SSE_USAGE();
1632
1633 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1634 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1635 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1636
1637 IEM_MC_ADVANCE_RIP();
1638 IEM_MC_END();
1639 }
1640 return VINF_SUCCESS;
1641}
1642
1643
1644/**
1645 * @opcode 0x13
1646 * @opcodesub !11 mr/reg
1647 * @oppfx none
1648 * @opcpuid sse
1649 * @opgroup og_sse_simdfp_datamove
1650 * @opxcpttype 5
1651 * @optest op1=1 op2=2 -> op1=2
1652 * @optest op1=0 op2=-42 -> op1=-42
1653 */
1654FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1655{
1656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1657 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1658 {
1659 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1660
1661 IEM_MC_BEGIN(0, 2);
1662 IEM_MC_LOCAL(uint64_t, uSrc);
1663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1664
1665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1667 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1668 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1669
1670 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1671 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1672
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 return VINF_SUCCESS;
1676 }
1677
1678 /**
1679 * @opdone
1680 * @opmnemonic ud0f13m3
1681 * @opcode 0x13
1682 * @opcodesub 11 mr/reg
1683 * @oppfx none
1684 * @opunused immediate
1685 * @opcpuid sse
1686 * @optest ->
1687 */
1688 return IEMOP_RAISE_INVALID_OPCODE();
1689}
1690
1691
1692/**
1693 * @opcode 0x13
1694 * @opcodesub !11 mr/reg
1695 * @oppfx 0x66
1696 * @opcpuid sse2
1697 * @opgroup og_sse2_pcksclr_datamove
1698 * @opxcpttype 5
1699 * @optest op1=1 op2=2 -> op1=2
1700 * @optest op1=0 op2=-42 -> op1=-42
1701 */
1702FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1703{
1704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1705 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1706 {
1707 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1708 IEM_MC_BEGIN(0, 2);
1709 IEM_MC_LOCAL(uint64_t, uSrc);
1710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1711
1712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1714 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1715 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1716
1717 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1718 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1719
1720 IEM_MC_ADVANCE_RIP();
1721 IEM_MC_END();
1722 return VINF_SUCCESS;
1723 }
1724
1725 /**
1726 * @opdone
1727 * @opmnemonic ud660f13m3
1728 * @opcode 0x13
1729 * @opcodesub 11 mr/reg
1730 * @oppfx 0x66
1731 * @opunused immediate
1732 * @opcpuid sse
1733 * @optest ->
1734 */
1735 return IEMOP_RAISE_INVALID_OPCODE();
1736}
1737
1738
1739/**
1740 * @opmnemonic udf30f13
1741 * @opcode 0x13
1742 * @oppfx 0xf3
1743 * @opunused intel-modrm
1744 * @opcpuid sse
1745 * @optest ->
1746 * @opdone
1747 */
1748
1749/**
1750 * @opmnemonic udf20f13
1751 * @opcode 0x13
1752 * @oppfx 0xf2
1753 * @opunused intel-modrm
1754 * @opcpuid sse
1755 * @optest ->
1756 * @opdone
1757 */
1758
1759/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1760FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1761/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1762FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1763
1764/**
1765 * @opdone
1766 * @opmnemonic udf30f14
1767 * @opcode 0x14
1768 * @oppfx 0xf3
1769 * @opunused intel-modrm
1770 * @opcpuid sse
1771 * @optest ->
1772 * @opdone
1773 */
1774
1775/**
1776 * @opmnemonic udf20f14
1777 * @opcode 0x14
1778 * @oppfx 0xf2
1779 * @opunused intel-modrm
1780 * @opcpuid sse
1781 * @optest ->
1782 * @opdone
1783 */
1784
1785/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1786FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1787/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1788FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1789/* Opcode 0xf3 0x0f 0x15 - invalid */
1790/* Opcode 0xf2 0x0f 0x15 - invalid */
1791
1792/**
1793 * @opdone
1794 * @opmnemonic udf30f15
1795 * @opcode 0x15
1796 * @oppfx 0xf3
1797 * @opunused intel-modrm
1798 * @opcpuid sse
1799 * @optest ->
1800 * @opdone
1801 */
1802
1803/**
1804 * @opmnemonic udf20f15
1805 * @opcode 0x15
1806 * @oppfx 0xf2
1807 * @opunused intel-modrm
1808 * @opcpuid sse
1809 * @optest ->
1810 * @opdone
1811 */
1812
1813FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1814{
1815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1817 {
1818 /**
1819 * @opcode 0x16
1820 * @opcodesub 11 mr/reg
1821 * @oppfx none
1822 * @opcpuid sse
1823 * @opgroup og_sse_simdfp_datamove
1824 * @opxcpttype 5
1825 * @optest op1=1 op2=2 -> op1=2
1826 * @optest op1=0 op2=-42 -> op1=-42
1827 */
1828 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1829
1830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1831 IEM_MC_BEGIN(0, 1);
1832 IEM_MC_LOCAL(uint64_t, uSrc);
1833
1834 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1835 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1836 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1837 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1838
1839 IEM_MC_ADVANCE_RIP();
1840 IEM_MC_END();
1841 }
1842 else
1843 {
1844 /**
1845 * @opdone
1846 * @opcode 0x16
1847 * @opcodesub !11 mr/reg
1848 * @oppfx none
1849 * @opcpuid sse
1850 * @opgroup og_sse_simdfp_datamove
1851 * @opxcpttype 5
1852 * @optest op1=1 op2=2 -> op1=2
1853 * @optest op1=0 op2=-42 -> op1=-42
1854 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1855 */
1856 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1857
1858 IEM_MC_BEGIN(0, 2);
1859 IEM_MC_LOCAL(uint64_t, uSrc);
1860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1861
1862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1866
1867 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1868 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1869
1870 IEM_MC_ADVANCE_RIP();
1871 IEM_MC_END();
1872 }
1873 return VINF_SUCCESS;
1874}
1875
1876
1877/**
1878 * @opcode 0x16
1879 * @opcodesub !11 mr/reg
1880 * @oppfx 0x66
1881 * @opcpuid sse2
1882 * @opgroup og_sse2_pcksclr_datamove
1883 * @opxcpttype 5
1884 * @optest op1=1 op2=2 -> op1=2
1885 * @optest op1=0 op2=-42 -> op1=-42
1886 */
1887FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1888{
1889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1890 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1891 {
1892 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1893 IEM_MC_BEGIN(0, 2);
1894 IEM_MC_LOCAL(uint64_t, uSrc);
1895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1896
1897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1899 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1900 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1901
1902 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1903 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1904
1905 IEM_MC_ADVANCE_RIP();
1906 IEM_MC_END();
1907 return VINF_SUCCESS;
1908 }
1909
1910 /**
1911 * @opdone
1912 * @opmnemonic ud660f16m3
1913 * @opcode 0x16
1914 * @opcodesub 11 mr/reg
1915 * @oppfx 0x66
1916 * @opunused immediate
1917 * @opcpuid sse
1918 * @optest ->
1919 */
1920 return IEMOP_RAISE_INVALID_OPCODE();
1921}
1922
1923
1924/**
1925 * @opcode 0x16
1926 * @oppfx 0xf3
1927 * @opcpuid sse3
1928 * @opgroup og_sse3_pcksclr_datamove
1929 * @opxcpttype 4
1930 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1931 * op1=0x00000002000000020000000100000001
1932 */
1933FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1934{
1935 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1938 {
1939 /*
1940 * Register, register.
1941 */
1942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1943 IEM_MC_BEGIN(2, 0);
1944 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1945 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1946
1947 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1948 IEM_MC_PREPARE_SSE_USAGE();
1949
1950 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1951 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1952 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1953
1954 IEM_MC_ADVANCE_RIP();
1955 IEM_MC_END();
1956 }
1957 else
1958 {
1959 /*
1960 * Register, memory.
1961 */
1962 IEM_MC_BEGIN(2, 2);
1963 IEM_MC_LOCAL(RTUINT128U, uSrc);
1964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1965 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1966 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1967
1968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1970 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1971 IEM_MC_PREPARE_SSE_USAGE();
1972
1973 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1974 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1975 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1976
1977 IEM_MC_ADVANCE_RIP();
1978 IEM_MC_END();
1979 }
1980 return VINF_SUCCESS;
1981}
1982
1983/**
1984 * @opdone
1985 * @opmnemonic udf30f16
1986 * @opcode 0x16
1987 * @oppfx 0xf2
1988 * @opunused intel-modrm
1989 * @opcpuid sse
1990 * @optest ->
1991 * @opdone
1992 */
1993
1994
1995/**
1996 * @opcode 0x17
1997 * @opcodesub !11 mr/reg
1998 * @oppfx none
1999 * @opcpuid sse
2000 * @opgroup og_sse_simdfp_datamove
2001 * @opxcpttype 5
2002 * @optest op1=1 op2=2 -> op1=2
2003 * @optest op1=0 op2=-42 -> op1=-42
2004 */
2005FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2006{
2007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2008 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2009 {
2010 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2011
2012 IEM_MC_BEGIN(0, 2);
2013 IEM_MC_LOCAL(uint64_t, uSrc);
2014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2015
2016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2018 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2019 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2020
2021 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2022 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2023
2024 IEM_MC_ADVANCE_RIP();
2025 IEM_MC_END();
2026 return VINF_SUCCESS;
2027 }
2028
2029 /**
2030 * @opdone
2031 * @opmnemonic ud0f17m3
2032 * @opcode 0x17
2033 * @opcodesub 11 mr/reg
2034 * @oppfx none
2035 * @opunused immediate
2036 * @opcpuid sse
2037 * @optest ->
2038 */
2039 return IEMOP_RAISE_INVALID_OPCODE();
2040}
2041
2042
2043/**
2044 * @opcode 0x17
2045 * @opcodesub !11 mr/reg
2046 * @oppfx 0x66
2047 * @opcpuid sse2
2048 * @opgroup og_sse2_pcksclr_datamove
2049 * @opxcpttype 5
2050 * @optest op1=1 op2=2 -> op1=2
2051 * @optest op1=0 op2=-42 -> op1=-42
2052 */
2053FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
2054{
2055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2056 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2057 {
2058 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2059
2060 IEM_MC_BEGIN(0, 2);
2061 IEM_MC_LOCAL(uint64_t, uSrc);
2062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2063
2064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2066 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2067 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2068
2069 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2070 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2071
2072 IEM_MC_ADVANCE_RIP();
2073 IEM_MC_END();
2074 return VINF_SUCCESS;
2075 }
2076
2077 /**
2078 * @opdone
2079 * @opmnemonic ud660f17m3
2080 * @opcode 0x17
2081 * @opcodesub 11 mr/reg
2082 * @oppfx 0x66
2083 * @opunused immediate
2084 * @opcpuid sse
2085 * @optest ->
2086 */
2087 return IEMOP_RAISE_INVALID_OPCODE();
2088}
2089
2090
2091/**
2092 * @opdone
2093 * @opmnemonic udf30f17
2094 * @opcode 0x17
2095 * @oppfx 0xf3
2096 * @opunused intel-modrm
2097 * @opcpuid sse
2098 * @optest ->
2099 * @opdone
2100 */
2101
2102/**
2103 * @opmnemonic udf20f17
2104 * @opcode 0x17
2105 * @oppfx 0xf2
2106 * @opunused intel-modrm
2107 * @opcpuid sse
2108 * @optest ->
2109 * @opdone
2110 */
2111
2112
2113/** Opcode 0x0f 0x18. */
2114FNIEMOP_DEF(iemOp_prefetch_Grp16)
2115{
2116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2117 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2118 {
2119 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2120 {
2121 case 4: /* Aliased to /0 for the time being according to AMD. */
2122 case 5: /* Aliased to /0 for the time being according to AMD. */
2123 case 6: /* Aliased to /0 for the time being according to AMD. */
2124 case 7: /* Aliased to /0 for the time being according to AMD. */
2125 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2126 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2127 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2128 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2130 }
2131
2132 IEM_MC_BEGIN(0, 1);
2133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2136 /* Currently a NOP. */
2137 NOREF(GCPtrEffSrc);
2138 IEM_MC_ADVANCE_RIP();
2139 IEM_MC_END();
2140 return VINF_SUCCESS;
2141 }
2142
2143 return IEMOP_RAISE_INVALID_OPCODE();
2144}
2145
2146
2147/** Opcode 0x0f 0x19..0x1f. */
2148FNIEMOP_DEF(iemOp_nop_Ev)
2149{
2150 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2151 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2152 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2153 {
2154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2155 IEM_MC_BEGIN(0, 0);
2156 IEM_MC_ADVANCE_RIP();
2157 IEM_MC_END();
2158 }
2159 else
2160 {
2161 IEM_MC_BEGIN(0, 1);
2162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2165 /* Currently a NOP. */
2166 NOREF(GCPtrEffSrc);
2167 IEM_MC_ADVANCE_RIP();
2168 IEM_MC_END();
2169 }
2170 return VINF_SUCCESS;
2171}
2172
2173
2174/** Opcode 0x0f 0x20. */
2175FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2176{
2177 /* mod is ignored, as is operand size overrides. */
2178 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2179 IEMOP_HLP_MIN_386();
2180 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2181 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2182 else
2183 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2184
2185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2186 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2187 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2188 {
2189 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2190 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2191 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2192 iCrReg |= 8;
2193 }
2194 switch (iCrReg)
2195 {
2196 case 0: case 2: case 3: case 4: case 8:
2197 break;
2198 default:
2199 return IEMOP_RAISE_INVALID_OPCODE();
2200 }
2201 IEMOP_HLP_DONE_DECODING();
2202
2203 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2204}
2205
2206
2207/** Opcode 0x0f 0x21. */
2208FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2209{
2210 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2211 IEMOP_HLP_MIN_386();
2212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2214 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2215 return IEMOP_RAISE_INVALID_OPCODE();
2216 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2217 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2218 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2219}
2220
2221
2222/** Opcode 0x0f 0x22. */
2223FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2224{
2225 /* mod is ignored, as is operand size overrides. */
2226 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2227 IEMOP_HLP_MIN_386();
2228 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2229 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2230 else
2231 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2232
2233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2234 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2235 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2236 {
2237 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2238 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2239 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2240 iCrReg |= 8;
2241 }
2242 switch (iCrReg)
2243 {
2244 case 0: case 2: case 3: case 4: case 8:
2245 break;
2246 default:
2247 return IEMOP_RAISE_INVALID_OPCODE();
2248 }
2249 IEMOP_HLP_DONE_DECODING();
2250
2251 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2252}
2253
2254
2255/** Opcode 0x0f 0x23. */
2256FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2257{
2258 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2259 IEMOP_HLP_MIN_386();
2260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2262 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2263 return IEMOP_RAISE_INVALID_OPCODE();
2264 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2265 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2266 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2267}
2268
2269
2270/** Opcode 0x0f 0x24. */
2271FNIEMOP_DEF(iemOp_mov_Rd_Td)
2272{
2273 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2274 /** @todo works on 386 and 486. */
2275 /* The RM byte is not considered, see testcase. */
2276 return IEMOP_RAISE_INVALID_OPCODE();
2277}
2278
2279
2280/** Opcode 0x0f 0x26. */
2281FNIEMOP_DEF(iemOp_mov_Td_Rd)
2282{
2283 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2284 /** @todo works on 386 and 486. */
2285 /* The RM byte is not considered, see testcase. */
2286 return IEMOP_RAISE_INVALID_OPCODE();
2287}
2288
2289
2290/**
2291 * @opcode 0x28
2292 * @oppfx none
2293 * @opcpuid sse
2294 * @opgroup og_sse_simdfp_datamove
2295 * @opxcpttype 1
2296 * @optest op1=1 op2=2 -> op1=2
2297 * @optest op1=0 op2=-42 -> op1=-42
2298 */
2299FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2300{
2301 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2303 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2304 {
2305 /*
2306 * Register, register.
2307 */
2308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2309 IEM_MC_BEGIN(0, 0);
2310 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2312 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2313 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2314 IEM_MC_ADVANCE_RIP();
2315 IEM_MC_END();
2316 }
2317 else
2318 {
2319 /*
2320 * Register, memory.
2321 */
2322 IEM_MC_BEGIN(0, 2);
2323 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2325
2326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2328 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2329 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2330
2331 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2332 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2333
2334 IEM_MC_ADVANCE_RIP();
2335 IEM_MC_END();
2336 }
2337 return VINF_SUCCESS;
2338}
2339
2340/**
2341 * @opcode 0x28
2342 * @oppfx 66
2343 * @opcpuid sse2
2344 * @opgroup og_sse2_pcksclr_datamove
2345 * @opxcpttype 1
2346 * @optest op1=1 op2=2 -> op1=2
2347 * @optest op1=0 op2=-42 -> op1=-42
2348 */
2349FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2350{
2351 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2352 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2353 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2354 {
2355 /*
2356 * Register, register.
2357 */
2358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2359 IEM_MC_BEGIN(0, 0);
2360 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2361 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2362 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2363 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2364 IEM_MC_ADVANCE_RIP();
2365 IEM_MC_END();
2366 }
2367 else
2368 {
2369 /*
2370 * Register, memory.
2371 */
2372 IEM_MC_BEGIN(0, 2);
2373 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2375
2376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2378 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2379 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2380
2381 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2382 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2383
2384 IEM_MC_ADVANCE_RIP();
2385 IEM_MC_END();
2386 }
2387 return VINF_SUCCESS;
2388}
2389
2390/* Opcode 0xf3 0x0f 0x28 - invalid */
2391/* Opcode 0xf2 0x0f 0x28 - invalid */
2392
2393/**
2394 * @opcode 0x29
2395 * @oppfx none
2396 * @opcpuid sse
2397 * @opgroup og_sse_simdfp_datamove
2398 * @opxcpttype 1
2399 * @optest op1=1 op2=2 -> op1=2
2400 * @optest op1=0 op2=-42 -> op1=-42
2401 */
2402FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2403{
2404 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2406 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2407 {
2408 /*
2409 * Register, register.
2410 */
2411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2412 IEM_MC_BEGIN(0, 0);
2413 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2414 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2415 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2416 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2417 IEM_MC_ADVANCE_RIP();
2418 IEM_MC_END();
2419 }
2420 else
2421 {
2422 /*
2423 * Memory, register.
2424 */
2425 IEM_MC_BEGIN(0, 2);
2426 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2428
2429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2431 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2432 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2433
2434 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2435 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2436
2437 IEM_MC_ADVANCE_RIP();
2438 IEM_MC_END();
2439 }
2440 return VINF_SUCCESS;
2441}
2442
2443/**
2444 * @opcode 0x29
2445 * @oppfx 66
2446 * @opcpuid sse2
2447 * @opgroup og_sse2_pcksclr_datamove
2448 * @opxcpttype 1
2449 * @optest op1=1 op2=2 -> op1=2
2450 * @optest op1=0 op2=-42 -> op1=-42
2451 */
2452FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2453{
2454 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2456 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2457 {
2458 /*
2459 * Register, register.
2460 */
2461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2462 IEM_MC_BEGIN(0, 0);
2463 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2464 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2465 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2466 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2467 IEM_MC_ADVANCE_RIP();
2468 IEM_MC_END();
2469 }
2470 else
2471 {
2472 /*
2473 * Memory, register.
2474 */
2475 IEM_MC_BEGIN(0, 2);
2476 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2478
2479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2481 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2483
2484 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2485 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2486
2487 IEM_MC_ADVANCE_RIP();
2488 IEM_MC_END();
2489 }
2490 return VINF_SUCCESS;
2491}
2492
2493/* Opcode 0xf3 0x0f 0x29 - invalid */
2494/* Opcode 0xf2 0x0f 0x29 - invalid */
2495
2496
2497/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2498FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2499/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2500FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2501/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2502FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2503/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2504FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2505
2506
2507/**
2508 * @opcode 0x2b
2509 * @opcodesub !11 mr/reg
2510 * @oppfx none
2511 * @opcpuid sse
2512 * @opgroup og_sse1_cachect
2513 * @opxcpttype 1
2514 * @optest op1=1 op2=2 -> op1=2
2515 * @optest op1=0 op2=-42 -> op1=-42
2516 */
2517FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2518{
2519 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2521 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2522 {
2523 /*
2524 * memory, register.
2525 */
2526 IEM_MC_BEGIN(0, 2);
2527 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2529
2530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2532 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2533 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2534
2535 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2536 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2537
2538 IEM_MC_ADVANCE_RIP();
2539 IEM_MC_END();
2540 }
2541 /* The register, register encoding is invalid. */
2542 else
2543 return IEMOP_RAISE_INVALID_OPCODE();
2544 return VINF_SUCCESS;
2545}
2546
2547/**
2548 * @opcode 0x2b
2549 * @opcodesub !11 mr/reg
2550 * @oppfx 0x66
2551 * @opcpuid sse2
2552 * @opgroup og_sse2_cachect
2553 * @opxcpttype 1
2554 * @optest op1=1 op2=2 -> op1=2
2555 * @optest op1=0 op2=-42 -> op1=-42
2556 */
2557FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2558{
2559 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2561 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2562 {
2563 /*
2564 * memory, register.
2565 */
2566 IEM_MC_BEGIN(0, 2);
2567 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2569
2570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2572 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2573 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2574
2575 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2576 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2577
2578 IEM_MC_ADVANCE_RIP();
2579 IEM_MC_END();
2580 }
2581 /* The register, register encoding is invalid. */
2582 else
2583 return IEMOP_RAISE_INVALID_OPCODE();
2584 return VINF_SUCCESS;
2585}
2586/* Opcode 0xf3 0x0f 0x2b - invalid */
2587/* Opcode 0xf2 0x0f 0x2b - invalid */
2588
2589
2590/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2591FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2592/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2593FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2594/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2595FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2596/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2597FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2598
2599/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2600FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2601/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2602FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2603/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2604FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2605/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2606FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2607
2608/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2609FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2610/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2611FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2612/* Opcode 0xf3 0x0f 0x2e - invalid */
2613/* Opcode 0xf2 0x0f 0x2e - invalid */
2614
2615/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2616FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2617/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2618FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2619/* Opcode 0xf3 0x0f 0x2f - invalid */
2620/* Opcode 0xf2 0x0f 0x2f - invalid */
2621
2622/** Opcode 0x0f 0x30. */
2623FNIEMOP_DEF(iemOp_wrmsr)
2624{
2625 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2627 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2628}
2629
2630
2631/** Opcode 0x0f 0x31. */
2632FNIEMOP_DEF(iemOp_rdtsc)
2633{
2634 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2636 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2637}
2638
2639
2640/** Opcode 0x0f 0x33. */
2641FNIEMOP_DEF(iemOp_rdmsr)
2642{
2643 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2646}
2647
2648
2649/** Opcode 0x0f 0x34. */
2650FNIEMOP_DEF(iemOp_rdpmc)
2651{
2652 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2654 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2655}
2656
2657
2658/** Opcode 0x0f 0x34. */
2659FNIEMOP_STUB(iemOp_sysenter);
2660/** Opcode 0x0f 0x35. */
2661FNIEMOP_STUB(iemOp_sysexit);
2662/** Opcode 0x0f 0x37. */
2663FNIEMOP_STUB(iemOp_getsec);
2664
2665
2666/** Opcode 0x0f 0x38. */
2667FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2668{
2669#ifdef IEM_WITH_THREE_0F_38
2670 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2671 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2672#else
2673 IEMOP_BITCH_ABOUT_STUB();
2674 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2675#endif
2676}
2677
2678
2679/** Opcode 0x0f 0x3a. */
2680FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2681{
2682#ifdef IEM_WITH_THREE_0F_3A
2683 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2684 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2685#else
2686 IEMOP_BITCH_ABOUT_STUB();
2687 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2688#endif
2689}
2690
2691
2692/**
2693 * Implements a conditional move.
2694 *
2695 * Wish there was an obvious way to do this where we could share and reduce
2696 * code bloat.
2697 *
2698 * @param a_Cnd The conditional "microcode" operation.
2699 */
2700#define CMOV_X(a_Cnd) \
2701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2702 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2703 { \
2704 switch (pVCpu->iem.s.enmEffOpSize) \
2705 { \
2706 case IEMMODE_16BIT: \
2707 IEM_MC_BEGIN(0, 1); \
2708 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2709 a_Cnd { \
2710 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2711 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2712 } IEM_MC_ENDIF(); \
2713 IEM_MC_ADVANCE_RIP(); \
2714 IEM_MC_END(); \
2715 return VINF_SUCCESS; \
2716 \
2717 case IEMMODE_32BIT: \
2718 IEM_MC_BEGIN(0, 1); \
2719 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2720 a_Cnd { \
2721 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2722 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2723 } IEM_MC_ELSE() { \
2724 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2725 } IEM_MC_ENDIF(); \
2726 IEM_MC_ADVANCE_RIP(); \
2727 IEM_MC_END(); \
2728 return VINF_SUCCESS; \
2729 \
2730 case IEMMODE_64BIT: \
2731 IEM_MC_BEGIN(0, 1); \
2732 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2733 a_Cnd { \
2734 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2735 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2736 } IEM_MC_ENDIF(); \
2737 IEM_MC_ADVANCE_RIP(); \
2738 IEM_MC_END(); \
2739 return VINF_SUCCESS; \
2740 \
2741 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2742 } \
2743 } \
2744 else \
2745 { \
2746 switch (pVCpu->iem.s.enmEffOpSize) \
2747 { \
2748 case IEMMODE_16BIT: \
2749 IEM_MC_BEGIN(0, 2); \
2750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2751 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2753 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2754 a_Cnd { \
2755 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2756 } IEM_MC_ENDIF(); \
2757 IEM_MC_ADVANCE_RIP(); \
2758 IEM_MC_END(); \
2759 return VINF_SUCCESS; \
2760 \
2761 case IEMMODE_32BIT: \
2762 IEM_MC_BEGIN(0, 2); \
2763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2764 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2766 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2767 a_Cnd { \
2768 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2769 } IEM_MC_ELSE() { \
2770 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2771 } IEM_MC_ENDIF(); \
2772 IEM_MC_ADVANCE_RIP(); \
2773 IEM_MC_END(); \
2774 return VINF_SUCCESS; \
2775 \
2776 case IEMMODE_64BIT: \
2777 IEM_MC_BEGIN(0, 2); \
2778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2779 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2781 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2782 a_Cnd { \
2783 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2784 } IEM_MC_ENDIF(); \
2785 IEM_MC_ADVANCE_RIP(); \
2786 IEM_MC_END(); \
2787 return VINF_SUCCESS; \
2788 \
2789 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2790 } \
2791 } do {} while (0)
2792
2793
2794
2795/** Opcode 0x0f 0x40. */
2796FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2797{
2798 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2799 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2800}
2801
2802
2803/** Opcode 0x0f 0x41. */
2804FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2805{
2806 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2807 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2808}
2809
2810
2811/** Opcode 0x0f 0x42. */
2812FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2813{
2814 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2815 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2816}
2817
2818
2819/** Opcode 0x0f 0x43. */
2820FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2821{
2822 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2823 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2824}
2825
2826
2827/** Opcode 0x0f 0x44. */
2828FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2829{
2830 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2831 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2832}
2833
2834
2835/** Opcode 0x0f 0x45. */
2836FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2837{
2838 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2839 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2840}
2841
2842
2843/** Opcode 0x0f 0x46. */
2844FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2845{
2846 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2847 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2848}
2849
2850
2851/** Opcode 0x0f 0x47. */
2852FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2853{
2854 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2855 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2856}
2857
2858
2859/** Opcode 0x0f 0x48. */
2860FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2861{
2862 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2863 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2864}
2865
2866
2867/** Opcode 0x0f 0x49. */
2868FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2869{
2870 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2871 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2872}
2873
2874
2875/** Opcode 0x0f 0x4a. */
2876FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2877{
2878 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2879 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2880}
2881
2882
2883/** Opcode 0x0f 0x4b. */
2884FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2885{
2886 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2887 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2888}
2889
2890
2891/** Opcode 0x0f 0x4c. */
2892FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2893{
2894 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2895 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2896}
2897
2898
2899/** Opcode 0x0f 0x4d. */
2900FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2901{
2902 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2903 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2904}
2905
2906
2907/** Opcode 0x0f 0x4e. */
2908FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2909{
2910 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2911 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2912}
2913
2914
2915/** Opcode 0x0f 0x4f. */
2916FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2917{
2918 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2919 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2920}
2921
2922#undef CMOV_X
2923
2924/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2925FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2926/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2927FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2928/* Opcode 0xf3 0x0f 0x50 - invalid */
2929/* Opcode 0xf2 0x0f 0x50 - invalid */
2930
2931/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2932FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2933/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2934FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2935/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2936FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2937/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2938FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2939
2940/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2941FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2942/* Opcode 0x66 0x0f 0x52 - invalid */
2943/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2944FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2945/* Opcode 0xf2 0x0f 0x52 - invalid */
2946
2947/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2948FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2949/* Opcode 0x66 0x0f 0x53 - invalid */
2950/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2951FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2952/* Opcode 0xf2 0x0f 0x53 - invalid */
2953
2954/** Opcode 0x0f 0x54 - andps Vps, Wps */
2955FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2956/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2957FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2958/* Opcode 0xf3 0x0f 0x54 - invalid */
2959/* Opcode 0xf2 0x0f 0x54 - invalid */
2960
2961/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2962FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2963/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2964FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2965/* Opcode 0xf3 0x0f 0x55 - invalid */
2966/* Opcode 0xf2 0x0f 0x55 - invalid */
2967
2968/** Opcode 0x0f 0x56 - orps Vps, Wps */
2969FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2970/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2971FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2972/* Opcode 0xf3 0x0f 0x56 - invalid */
2973/* Opcode 0xf2 0x0f 0x56 - invalid */
2974
2975/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2976FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2977/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2978FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2979/* Opcode 0xf3 0x0f 0x57 - invalid */
2980/* Opcode 0xf2 0x0f 0x57 - invalid */
2981
2982/** Opcode 0x0f 0x58 - addps Vps, Wps */
2983FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2984/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2985FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2986/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2987FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2988/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2989FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2990
2991/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2992FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2993/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2994FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2995/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2996FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2997/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2998FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2999
3000/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
3001FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
3002/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
3003FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
3004/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
3005FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
3006/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
3007FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
3008
3009/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
3010FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
3011/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
3012FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
3013/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
3014FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
3015/* Opcode 0xf2 0x0f 0x5b - invalid */
3016
3017/** Opcode 0x0f 0x5c - subps Vps, Wps */
3018FNIEMOP_STUB(iemOp_subps_Vps_Wps);
3019/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
3020FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
3021/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
3022FNIEMOP_STUB(iemOp_subss_Vss_Wss);
3023/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
3024FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
3025
3026/** Opcode 0x0f 0x5d - minps Vps, Wps */
3027FNIEMOP_STUB(iemOp_minps_Vps_Wps);
3028/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
3029FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
3030/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
3031FNIEMOP_STUB(iemOp_minss_Vss_Wss);
3032/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
3033FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
3034
3035/** Opcode 0x0f 0x5e - divps Vps, Wps */
3036FNIEMOP_STUB(iemOp_divps_Vps_Wps);
3037/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
3038FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
3039/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
3040FNIEMOP_STUB(iemOp_divss_Vss_Wss);
3041/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
3042FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
3043
3044/** Opcode 0x0f 0x5f - maxps Vps, Wps */
3045FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
3046/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
3047FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
3048/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
3049FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
3050/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
3051FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
3052
3053/**
3054 * Common worker for MMX instructions on the forms:
3055 * pxxxx mm1, mm2/mem32
3056 *
3057 * The 2nd operand is the first half of a register, which in the memory case
3058 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3059 * memory accessed for MMX.
3060 *
3061 * Exceptions type 4.
3062 */
3063FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3064{
3065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3067 {
3068 /*
3069 * Register, register.
3070 */
3071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3072 IEM_MC_BEGIN(2, 0);
3073 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3074 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3075 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3076 IEM_MC_PREPARE_SSE_USAGE();
3077 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3078 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3079 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3080 IEM_MC_ADVANCE_RIP();
3081 IEM_MC_END();
3082 }
3083 else
3084 {
3085 /*
3086 * Register, memory.
3087 */
3088 IEM_MC_BEGIN(2, 2);
3089 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3090 IEM_MC_LOCAL(uint64_t, uSrc);
3091 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3093
3094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3096 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3097 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3098
3099 IEM_MC_PREPARE_SSE_USAGE();
3100 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3101 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3102
3103 IEM_MC_ADVANCE_RIP();
3104 IEM_MC_END();
3105 }
3106 return VINF_SUCCESS;
3107}
3108
3109
3110/**
3111 * Common worker for SSE2 instructions on the forms:
3112 * pxxxx xmm1, xmm2/mem128
3113 *
3114 * The 2nd operand is the first half of a register, which in the memory case
3115 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3116 * memory accessed for MMX.
3117 *
3118 * Exceptions type 4.
3119 */
3120FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3121{
3122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3123 if (!pImpl->pfnU64)
3124 return IEMOP_RAISE_INVALID_OPCODE();
3125 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3126 {
3127 /*
3128 * Register, register.
3129 */
3130 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3131 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3133 IEM_MC_BEGIN(2, 0);
3134 IEM_MC_ARG(uint64_t *, pDst, 0);
3135 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3136 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3137 IEM_MC_PREPARE_FPU_USAGE();
3138 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3139 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3140 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3141 IEM_MC_ADVANCE_RIP();
3142 IEM_MC_END();
3143 }
3144 else
3145 {
3146 /*
3147 * Register, memory.
3148 */
3149 IEM_MC_BEGIN(2, 2);
3150 IEM_MC_ARG(uint64_t *, pDst, 0);
3151 IEM_MC_LOCAL(uint32_t, uSrc);
3152 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3154
3155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3157 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3158 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3159
3160 IEM_MC_PREPARE_FPU_USAGE();
3161 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3162 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3163
3164 IEM_MC_ADVANCE_RIP();
3165 IEM_MC_END();
3166 }
3167 return VINF_SUCCESS;
3168}
3169
3170
3171/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3172FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3173{
3174 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3175 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3176}
3177
3178/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3179FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3180{
3181 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3182 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3183}
3184
3185/* Opcode 0xf3 0x0f 0x60 - invalid */
3186
3187
3188/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3189FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3190{
3191 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3192 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3193}
3194
3195/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3196FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3197{
3198 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3199 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3200}
3201
3202/* Opcode 0xf3 0x0f 0x61 - invalid */
3203
3204
3205/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3206FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3207{
3208 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3209 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3210}
3211
3212/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3213FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3214{
3215 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3216 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3217}
3218
3219/* Opcode 0xf3 0x0f 0x62 - invalid */
3220
3221
3222
3223/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3224FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3225/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3226FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3227/* Opcode 0xf3 0x0f 0x63 - invalid */
3228
3229/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3230FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3231/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3232FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3233/* Opcode 0xf3 0x0f 0x64 - invalid */
3234
3235/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3236FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3237/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3238FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3239/* Opcode 0xf3 0x0f 0x65 - invalid */
3240
3241/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3242FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3243/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3244FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3245/* Opcode 0xf3 0x0f 0x66 - invalid */
3246
3247/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3248FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3249/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3250FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3251/* Opcode 0xf3 0x0f 0x67 - invalid */
3252
3253
3254/**
3255 * Common worker for MMX instructions on the form:
3256 * pxxxx mm1, mm2/mem64
3257 *
3258 * The 2nd operand is the second half of a register, which in the memory case
3259 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3260 * where it may read the full 128 bits or only the upper 64 bits.
3261 *
3262 * Exceptions type 4.
3263 */
3264FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3265{
3266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3267 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3269 {
3270 /*
3271 * Register, register.
3272 */
3273 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3274 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3276 IEM_MC_BEGIN(2, 0);
3277 IEM_MC_ARG(uint64_t *, pDst, 0);
3278 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3279 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3280 IEM_MC_PREPARE_FPU_USAGE();
3281 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3282 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3283 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3284 IEM_MC_ADVANCE_RIP();
3285 IEM_MC_END();
3286 }
3287 else
3288 {
3289 /*
3290 * Register, memory.
3291 */
3292 IEM_MC_BEGIN(2, 2);
3293 IEM_MC_ARG(uint64_t *, pDst, 0);
3294 IEM_MC_LOCAL(uint64_t, uSrc);
3295 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3297
3298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3300 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3301 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3302
3303 IEM_MC_PREPARE_FPU_USAGE();
3304 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3305 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3306
3307 IEM_MC_ADVANCE_RIP();
3308 IEM_MC_END();
3309 }
3310 return VINF_SUCCESS;
3311}
3312
3313
3314/**
3315 * Common worker for SSE2 instructions on the form:
3316 * pxxxx xmm1, xmm2/mem128
3317 *
3318 * The 2nd operand is the second half of a register, which in the memory case
3319 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3320 * where it may read the full 128 bits or only the upper 64 bits.
3321 *
3322 * Exceptions type 4.
3323 */
3324FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3325{
3326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3327 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3328 {
3329 /*
3330 * Register, register.
3331 */
3332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3333 IEM_MC_BEGIN(2, 0);
3334 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3335 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3336 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3337 IEM_MC_PREPARE_SSE_USAGE();
3338 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3339 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3340 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3341 IEM_MC_ADVANCE_RIP();
3342 IEM_MC_END();
3343 }
3344 else
3345 {
3346 /*
3347 * Register, memory.
3348 */
3349 IEM_MC_BEGIN(2, 2);
3350 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3351 IEM_MC_LOCAL(RTUINT128U, uSrc);
3352 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3354
3355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3357 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3358 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3359
3360 IEM_MC_PREPARE_SSE_USAGE();
3361 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3362 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3363
3364 IEM_MC_ADVANCE_RIP();
3365 IEM_MC_END();
3366 }
3367 return VINF_SUCCESS;
3368}
3369
3370
3371/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3372FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3373{
3374 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3375 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3376}
3377
3378/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3379FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3380{
3381 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3382 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3383}
3384/* Opcode 0xf3 0x0f 0x68 - invalid */
3385
3386
3387/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3388FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3389{
3390 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3391 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3392}
3393
3394/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3395FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3396{
3397 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3398 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3399
3400}
3401/* Opcode 0xf3 0x0f 0x69 - invalid */
3402
3403
3404/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3405FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3406{
3407 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3408 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3409}
3410
3411/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3412FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3413{
3414 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3415 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3416}
3417/* Opcode 0xf3 0x0f 0x6a - invalid */
3418
3419
3420/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3421FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3422/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3423FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3424/* Opcode 0xf3 0x0f 0x6b - invalid */
3425
3426
3427/* Opcode 0x0f 0x6c - invalid */
3428
3429/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3430FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3431{
3432 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3433 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3434}
3435
3436/* Opcode 0xf3 0x0f 0x6c - invalid */
3437/* Opcode 0xf2 0x0f 0x6c - invalid */
3438
3439
3440/* Opcode 0x0f 0x6d - invalid */
3441
3442/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3443FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3444{
3445 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3446 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3447}
3448
3449/* Opcode 0xf3 0x0f 0x6d - invalid */
3450
3451
3452/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
3453FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3454{
3455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3456 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3457 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
3458 else
3459 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
3460 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3461 {
3462 /* MMX, greg */
3463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3464 IEM_MC_BEGIN(0, 1);
3465 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3466 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3467 IEM_MC_LOCAL(uint64_t, u64Tmp);
3468 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3469 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3470 else
3471 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3472 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3473 IEM_MC_ADVANCE_RIP();
3474 IEM_MC_END();
3475 }
3476 else
3477 {
3478 /* MMX, [mem] */
3479 IEM_MC_BEGIN(0, 2);
3480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3481 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3484 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3485 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3486 {
3487 IEM_MC_LOCAL(uint64_t, u64Tmp);
3488 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3489 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3490 }
3491 else
3492 {
3493 IEM_MC_LOCAL(uint32_t, u32Tmp);
3494 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3495 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3496 }
3497 IEM_MC_ADVANCE_RIP();
3498 IEM_MC_END();
3499 }
3500 return VINF_SUCCESS;
3501}
3502
3503/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
3504FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3505{
3506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3507 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3508 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
3509 else
3510 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
3511 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3512 {
3513 /* XMM, greg*/
3514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3515 IEM_MC_BEGIN(0, 1);
3516 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3517 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3518 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3519 {
3520 IEM_MC_LOCAL(uint64_t, u64Tmp);
3521 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3522 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3523 }
3524 else
3525 {
3526 IEM_MC_LOCAL(uint32_t, u32Tmp);
3527 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3528 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3529 }
3530 IEM_MC_ADVANCE_RIP();
3531 IEM_MC_END();
3532 }
3533 else
3534 {
3535 /* XMM, [mem] */
3536 IEM_MC_BEGIN(0, 2);
3537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3538 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3541 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3542 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3543 {
3544 IEM_MC_LOCAL(uint64_t, u64Tmp);
3545 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3546 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3547 }
3548 else
3549 {
3550 IEM_MC_LOCAL(uint32_t, u32Tmp);
3551 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3552 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3553 }
3554 IEM_MC_ADVANCE_RIP();
3555 IEM_MC_END();
3556 }
3557 return VINF_SUCCESS;
3558}
3559
3560/* Opcode 0xf3 0x0f 0x6e - invalid */
3561
3562
3563/** Opcode 0x0f 0x6f - movq Pq, Qq */
3564FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3565{
3566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3567 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3568 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3569 {
3570 /*
3571 * Register, register.
3572 */
3573 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3574 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3576 IEM_MC_BEGIN(0, 1);
3577 IEM_MC_LOCAL(uint64_t, u64Tmp);
3578 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3579 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3580 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3581 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3582 IEM_MC_ADVANCE_RIP();
3583 IEM_MC_END();
3584 }
3585 else
3586 {
3587 /*
3588 * Register, memory.
3589 */
3590 IEM_MC_BEGIN(0, 2);
3591 IEM_MC_LOCAL(uint64_t, u64Tmp);
3592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3593
3594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3596 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3597 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3598 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3599 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3600
3601 IEM_MC_ADVANCE_RIP();
3602 IEM_MC_END();
3603 }
3604 return VINF_SUCCESS;
3605}
3606
3607/** Opcode 0x66 0x0f 0x6f - movdqa Vx, Wx */
3608FNIEMOP_DEF(iemOp_movdqa_Vx_Wx)
3609{
3610 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3611 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3612 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3613 {
3614 /*
3615 * Register, register.
3616 */
3617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3618 IEM_MC_BEGIN(0, 0);
3619 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3620 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3621 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3622 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3623 IEM_MC_ADVANCE_RIP();
3624 IEM_MC_END();
3625 }
3626 else
3627 {
3628 /*
3629 * Register, memory.
3630 */
3631 IEM_MC_BEGIN(0, 2);
3632 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3634
3635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3637 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3638 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3639 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3640 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3641
3642 IEM_MC_ADVANCE_RIP();
3643 IEM_MC_END();
3644 }
3645 return VINF_SUCCESS;
3646}
3647
3648/** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */
3649FNIEMOP_DEF(iemOp_movdqu_Vx_Wx)
3650{
3651 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3652 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3653 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3654 {
3655 /*
3656 * Register, register.
3657 */
3658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3659 IEM_MC_BEGIN(0, 0);
3660 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3661 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3662 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3663 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3664 IEM_MC_ADVANCE_RIP();
3665 IEM_MC_END();
3666 }
3667 else
3668 {
3669 /*
3670 * Register, memory.
3671 */
3672 IEM_MC_BEGIN(0, 2);
3673 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3675
3676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3678 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3679 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3680 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3681 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3682
3683 IEM_MC_ADVANCE_RIP();
3684 IEM_MC_END();
3685 }
3686 return VINF_SUCCESS;
3687}
3688
3689
3690/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3691FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3692{
3693 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3694 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3695 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3696 {
3697 /*
3698 * Register, register.
3699 */
3700 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3702
3703 IEM_MC_BEGIN(3, 0);
3704 IEM_MC_ARG(uint64_t *, pDst, 0);
3705 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3706 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3707 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3708 IEM_MC_PREPARE_FPU_USAGE();
3709 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3710 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3711 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3712 IEM_MC_ADVANCE_RIP();
3713 IEM_MC_END();
3714 }
3715 else
3716 {
3717 /*
3718 * Register, memory.
3719 */
3720 IEM_MC_BEGIN(3, 2);
3721 IEM_MC_ARG(uint64_t *, pDst, 0);
3722 IEM_MC_LOCAL(uint64_t, uSrc);
3723 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3725
3726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3727 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3728 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3730 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3731
3732 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3733 IEM_MC_PREPARE_FPU_USAGE();
3734 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3735 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3736
3737 IEM_MC_ADVANCE_RIP();
3738 IEM_MC_END();
3739 }
3740 return VINF_SUCCESS;
3741}
3742
3743/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3744FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3745{
3746 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3747 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3748 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3749 {
3750 /*
3751 * Register, register.
3752 */
3753 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3755
3756 IEM_MC_BEGIN(3, 0);
3757 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3758 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3759 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3760 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3761 IEM_MC_PREPARE_SSE_USAGE();
3762 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3763 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3764 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3765 IEM_MC_ADVANCE_RIP();
3766 IEM_MC_END();
3767 }
3768 else
3769 {
3770 /*
3771 * Register, memory.
3772 */
3773 IEM_MC_BEGIN(3, 2);
3774 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3775 IEM_MC_LOCAL(RTUINT128U, uSrc);
3776 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3778
3779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3780 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3781 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3783 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3784
3785 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3786 IEM_MC_PREPARE_SSE_USAGE();
3787 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3788 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3789
3790 IEM_MC_ADVANCE_RIP();
3791 IEM_MC_END();
3792 }
3793 return VINF_SUCCESS;
3794}
3795
3796/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3797FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3798{
3799 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3800 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3801 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3802 {
3803 /*
3804 * Register, register.
3805 */
3806 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3808
3809 IEM_MC_BEGIN(3, 0);
3810 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3811 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3812 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3813 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3814 IEM_MC_PREPARE_SSE_USAGE();
3815 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3816 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3817 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3818 IEM_MC_ADVANCE_RIP();
3819 IEM_MC_END();
3820 }
3821 else
3822 {
3823 /*
3824 * Register, memory.
3825 */
3826 IEM_MC_BEGIN(3, 2);
3827 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3828 IEM_MC_LOCAL(RTUINT128U, uSrc);
3829 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3831
3832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3833 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3834 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3836 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3837
3838 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3839 IEM_MC_PREPARE_SSE_USAGE();
3840 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3841 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3842
3843 IEM_MC_ADVANCE_RIP();
3844 IEM_MC_END();
3845 }
3846 return VINF_SUCCESS;
3847}
3848
3849/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3850FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3851{
3852 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3854 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3855 {
3856 /*
3857 * Register, register.
3858 */
3859 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3861
3862 IEM_MC_BEGIN(3, 0);
3863 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3864 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3865 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3866 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3867 IEM_MC_PREPARE_SSE_USAGE();
3868 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3869 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3870 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3871 IEM_MC_ADVANCE_RIP();
3872 IEM_MC_END();
3873 }
3874 else
3875 {
3876 /*
3877 * Register, memory.
3878 */
3879 IEM_MC_BEGIN(3, 2);
3880 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3881 IEM_MC_LOCAL(RTUINT128U, uSrc);
3882 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3884
3885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3886 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3887 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3889 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3890
3891 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3892 IEM_MC_PREPARE_SSE_USAGE();
3893 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3894 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3895
3896 IEM_MC_ADVANCE_RIP();
3897 IEM_MC_END();
3898 }
3899 return VINF_SUCCESS;
3900}
3901
3902
3903/** Opcode 0x0f 0x71 11/2. */
3904FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3905
3906/** Opcode 0x66 0x0f 0x71 11/2. */
3907FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3908
3909/** Opcode 0x0f 0x71 11/4. */
3910FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3911
3912/** Opcode 0x66 0x0f 0x71 11/4. */
3913FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3914
3915/** Opcode 0x0f 0x71 11/6. */
3916FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3917
3918/** Opcode 0x66 0x0f 0x71 11/6. */
3919FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3920
3921
3922/**
3923 * Group 12 jump table for register variant.
3924 */
3925IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3926{
3927 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3928 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3929 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3930 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3931 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3932 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3933 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3934 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3935};
3936AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3937
3938
3939/** Opcode 0x0f 0x71. */
3940FNIEMOP_DEF(iemOp_Grp12)
3941{
3942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3943 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3944 /* register, register */
3945 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3946 + pVCpu->iem.s.idxPrefix], bRm);
3947 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3948}
3949
3950
3951/** Opcode 0x0f 0x72 11/2. */
3952FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3953
3954/** Opcode 0x66 0x0f 0x72 11/2. */
3955FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
3956
3957/** Opcode 0x0f 0x72 11/4. */
3958FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3959
3960/** Opcode 0x66 0x0f 0x72 11/4. */
3961FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
3962
3963/** Opcode 0x0f 0x72 11/6. */
3964FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3965
3966/** Opcode 0x66 0x0f 0x72 11/6. */
3967FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
3968
3969
3970/**
3971 * Group 13 jump table for register variant.
3972 */
3973IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3974{
3975 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3976 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3977 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3978 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3979 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3980 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3981 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3982 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3983};
3984AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3985
3986/** Opcode 0x0f 0x72. */
3987FNIEMOP_DEF(iemOp_Grp13)
3988{
3989 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3990 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3991 /* register, register */
3992 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3993 + pVCpu->iem.s.idxPrefix], bRm);
3994 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3995}
3996
3997
3998/** Opcode 0x0f 0x73 11/2. */
3999FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4000
4001/** Opcode 0x66 0x0f 0x73 11/2. */
4002FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4003
4004/** Opcode 0x66 0x0f 0x73 11/3. */
4005FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4006
4007/** Opcode 0x0f 0x73 11/6. */
4008FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4009
4010/** Opcode 0x66 0x0f 0x73 11/6. */
4011FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4012
4013/** Opcode 0x66 0x0f 0x73 11/7. */
4014FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4015
4016/**
4017 * Group 14 jump table for register variant.
4018 */
4019IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4020{
4021 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4022 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4023 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4024 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4025 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4026 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4027 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4028 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4029};
4030AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4031
4032
4033/** Opcode 0x0f 0x73. */
4034FNIEMOP_DEF(iemOp_Grp14)
4035{
4036 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4037 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4038 /* register, register */
4039 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4040 + pVCpu->iem.s.idxPrefix], bRm);
4041 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4042}
4043
4044
4045/**
4046 * Common worker for MMX instructions on the form:
4047 * pxxx mm1, mm2/mem64
4048 */
4049FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4050{
4051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4052 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4053 {
4054 /*
4055 * Register, register.
4056 */
4057 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4058 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4060 IEM_MC_BEGIN(2, 0);
4061 IEM_MC_ARG(uint64_t *, pDst, 0);
4062 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4063 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4064 IEM_MC_PREPARE_FPU_USAGE();
4065 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4066 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4067 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4068 IEM_MC_ADVANCE_RIP();
4069 IEM_MC_END();
4070 }
4071 else
4072 {
4073 /*
4074 * Register, memory.
4075 */
4076 IEM_MC_BEGIN(2, 2);
4077 IEM_MC_ARG(uint64_t *, pDst, 0);
4078 IEM_MC_LOCAL(uint64_t, uSrc);
4079 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4081
4082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4084 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4085 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4086
4087 IEM_MC_PREPARE_FPU_USAGE();
4088 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4089 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4090
4091 IEM_MC_ADVANCE_RIP();
4092 IEM_MC_END();
4093 }
4094 return VINF_SUCCESS;
4095}
4096
4097
4098/**
4099 * Common worker for SSE2 instructions on the forms:
4100 * pxxx xmm1, xmm2/mem128
4101 *
4102 * Proper alignment of the 128-bit operand is enforced.
4103 * Exceptions type 4. SSE2 cpuid checks.
4104 */
4105FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4106{
4107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4109 {
4110 /*
4111 * Register, register.
4112 */
4113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4114 IEM_MC_BEGIN(2, 0);
4115 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4116 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4117 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4118 IEM_MC_PREPARE_SSE_USAGE();
4119 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4120 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4121 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4122 IEM_MC_ADVANCE_RIP();
4123 IEM_MC_END();
4124 }
4125 else
4126 {
4127 /*
4128 * Register, memory.
4129 */
4130 IEM_MC_BEGIN(2, 2);
4131 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4132 IEM_MC_LOCAL(RTUINT128U, uSrc);
4133 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4135
4136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4138 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4139 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4140
4141 IEM_MC_PREPARE_SSE_USAGE();
4142 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4143 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4144
4145 IEM_MC_ADVANCE_RIP();
4146 IEM_MC_END();
4147 }
4148 return VINF_SUCCESS;
4149}
4150
4151
4152/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4153FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4154{
4155 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4156 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4157}
4158
4159/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4160FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4161{
4162 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4163 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4164}
4165
4166/* Opcode 0xf3 0x0f 0x74 - invalid */
4167/* Opcode 0xf2 0x0f 0x74 - invalid */
4168
4169
4170/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4171FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4172{
4173 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4174 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4175}
4176
4177/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4178FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4179{
4180 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4181 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4182}
4183
4184/* Opcode 0xf3 0x0f 0x75 - invalid */
4185/* Opcode 0xf2 0x0f 0x75 - invalid */
4186
4187
4188/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4189FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4190{
4191 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4192 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4193}
4194
4195/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4196FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4197{
4198 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4199 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4200}
4201
4202/* Opcode 0xf3 0x0f 0x76 - invalid */
4203/* Opcode 0xf2 0x0f 0x76 - invalid */
4204
4205
4206/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4207FNIEMOP_STUB(iemOp_emms);
4208/* Opcode 0x66 0x0f 0x77 - invalid */
4209/* Opcode 0xf3 0x0f 0x77 - invalid */
4210/* Opcode 0xf2 0x0f 0x77 - invalid */
4211
4212/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4213FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4214/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4215FNIEMOP_STUB(iemOp_AmdGrp17);
4216/* Opcode 0xf3 0x0f 0x78 - invalid */
4217/* Opcode 0xf2 0x0f 0x78 - invalid */
4218
4219/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4220FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4221/* Opcode 0x66 0x0f 0x79 - invalid */
4222/* Opcode 0xf3 0x0f 0x79 - invalid */
4223/* Opcode 0xf2 0x0f 0x79 - invalid */
4224
4225/* Opcode 0x0f 0x7a - invalid */
4226/* Opcode 0x66 0x0f 0x7a - invalid */
4227/* Opcode 0xf3 0x0f 0x7a - invalid */
4228/* Opcode 0xf2 0x0f 0x7a - invalid */
4229
4230/* Opcode 0x0f 0x7b - invalid */
4231/* Opcode 0x66 0x0f 0x7b - invalid */
4232/* Opcode 0xf3 0x0f 0x7b - invalid */
4233/* Opcode 0xf2 0x0f 0x7b - invalid */
4234
4235/* Opcode 0x0f 0x7c - invalid */
4236/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4237FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4238/* Opcode 0xf3 0x0f 0x7c - invalid */
4239/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4240FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4241
4242/* Opcode 0x0f 0x7d - invalid */
4243/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4244FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4245/* Opcode 0xf3 0x0f 0x7d - invalid */
4246/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4247FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4248
4249
4250/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4251FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4252{
4253 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4254 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4255 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
4256 else
4257 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
4258 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4259 {
4260 /* greg, MMX */
4261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4262 IEM_MC_BEGIN(0, 1);
4263 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4264 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4265 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4266 {
4267 IEM_MC_LOCAL(uint64_t, u64Tmp);
4268 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4269 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4270 }
4271 else
4272 {
4273 IEM_MC_LOCAL(uint32_t, u32Tmp);
4274 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4275 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4276 }
4277 IEM_MC_ADVANCE_RIP();
4278 IEM_MC_END();
4279 }
4280 else
4281 {
4282 /* [mem], MMX */
4283 IEM_MC_BEGIN(0, 2);
4284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4285 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4288 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4289 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4290 {
4291 IEM_MC_LOCAL(uint64_t, u64Tmp);
4292 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4293 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4294 }
4295 else
4296 {
4297 IEM_MC_LOCAL(uint32_t, u32Tmp);
4298 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4299 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4300 }
4301 IEM_MC_ADVANCE_RIP();
4302 IEM_MC_END();
4303 }
4304 return VINF_SUCCESS;
4305}
4306
4307/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
4308FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4309{
4310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4311 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4312 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
4313 else
4314 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
4315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4316 {
4317 /* greg, XMM */
4318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4319 IEM_MC_BEGIN(0, 1);
4320 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4321 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4322 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4323 {
4324 IEM_MC_LOCAL(uint64_t, u64Tmp);
4325 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4326 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4327 }
4328 else
4329 {
4330 IEM_MC_LOCAL(uint32_t, u32Tmp);
4331 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4332 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4333 }
4334 IEM_MC_ADVANCE_RIP();
4335 IEM_MC_END();
4336 }
4337 else
4338 {
4339 /* [mem], XMM */
4340 IEM_MC_BEGIN(0, 2);
4341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4342 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4345 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4346 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4347 {
4348 IEM_MC_LOCAL(uint64_t, u64Tmp);
4349 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4350 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4351 }
4352 else
4353 {
4354 IEM_MC_LOCAL(uint32_t, u32Tmp);
4355 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4356 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4357 }
4358 IEM_MC_ADVANCE_RIP();
4359 IEM_MC_END();
4360 }
4361 return VINF_SUCCESS;
4362}
4363
4364
4365/**
4366 * @opcode 0x7e
4367 * @opcodesub !11 mr/reg
4368 * @oppfx 0xf3
4369 * @opcpuid sse2
4370 * @opgroup og_sse2_pcksclr_datamove
4371 * @opxcpttype 5
4372 * @optest op1=1 op2=2 -> op1=2
4373 * @optest op1=0 op2=-42 -> op1=-42
4374 */
4375FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4376{
4377 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
4378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4379 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4380 {
4381 /*
4382 * Register, register.
4383 */
4384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4385 IEM_MC_BEGIN(0, 2);
4386 IEM_MC_LOCAL(uint64_t, uSrc);
4387
4388 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4389 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4390
4391 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4392 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4393
4394 IEM_MC_ADVANCE_RIP();
4395 IEM_MC_END();
4396 }
4397 else
4398 {
4399 /*
4400 * Memory, register.
4401 */
4402 IEM_MC_BEGIN(0, 2);
4403 IEM_MC_LOCAL(uint64_t, uSrc);
4404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4405
4406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4408 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4409 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4410
4411 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4412 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4413
4414 IEM_MC_ADVANCE_RIP();
4415 IEM_MC_END();
4416 }
4417 return VINF_SUCCESS;
4418}
4419
4420/* Opcode 0xf2 0x0f 0x7e - invalid */
4421
4422
4423/** Opcode 0x0f 0x7f - movq Qq, Pq */
4424FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4425{
4426 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4428 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4429 {
4430 /*
4431 * Register, register.
4432 */
4433 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4434 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4436 IEM_MC_BEGIN(0, 1);
4437 IEM_MC_LOCAL(uint64_t, u64Tmp);
4438 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4439 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4440 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4441 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4442 IEM_MC_ADVANCE_RIP();
4443 IEM_MC_END();
4444 }
4445 else
4446 {
4447 /*
4448 * Register, memory.
4449 */
4450 IEM_MC_BEGIN(0, 2);
4451 IEM_MC_LOCAL(uint64_t, u64Tmp);
4452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4453
4454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4456 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4457 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4458
4459 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4460 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4461
4462 IEM_MC_ADVANCE_RIP();
4463 IEM_MC_END();
4464 }
4465 return VINF_SUCCESS;
4466}
4467
4468/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4469FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4470{
4471 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4473 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4474 {
4475 /*
4476 * Register, register.
4477 */
4478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4479 IEM_MC_BEGIN(0, 0);
4480 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4481 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4482 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4483 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4484 IEM_MC_ADVANCE_RIP();
4485 IEM_MC_END();
4486 }
4487 else
4488 {
4489 /*
4490 * Register, memory.
4491 */
4492 IEM_MC_BEGIN(0, 2);
4493 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4495
4496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4498 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4499 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4500
4501 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4502 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4503
4504 IEM_MC_ADVANCE_RIP();
4505 IEM_MC_END();
4506 }
4507 return VINF_SUCCESS;
4508}
4509
4510/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4511FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4512{
4513 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4514 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4515 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4516 {
4517 /*
4518 * Register, register.
4519 */
4520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4521 IEM_MC_BEGIN(0, 0);
4522 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4523 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4524 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4525 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4526 IEM_MC_ADVANCE_RIP();
4527 IEM_MC_END();
4528 }
4529 else
4530 {
4531 /*
4532 * Register, memory.
4533 */
4534 IEM_MC_BEGIN(0, 2);
4535 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4537
4538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4540 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4541 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4542
4543 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4544 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4545
4546 IEM_MC_ADVANCE_RIP();
4547 IEM_MC_END();
4548 }
4549 return VINF_SUCCESS;
4550}
4551
4552/* Opcode 0xf2 0x0f 0x7f - invalid */
4553
4554
4555
4556/** Opcode 0x0f 0x80. */
4557FNIEMOP_DEF(iemOp_jo_Jv)
4558{
4559 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4560 IEMOP_HLP_MIN_386();
4561 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4562 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4563 {
4564 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4566
4567 IEM_MC_BEGIN(0, 0);
4568 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4569 IEM_MC_REL_JMP_S16(i16Imm);
4570 } IEM_MC_ELSE() {
4571 IEM_MC_ADVANCE_RIP();
4572 } IEM_MC_ENDIF();
4573 IEM_MC_END();
4574 }
4575 else
4576 {
4577 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4579
4580 IEM_MC_BEGIN(0, 0);
4581 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4582 IEM_MC_REL_JMP_S32(i32Imm);
4583 } IEM_MC_ELSE() {
4584 IEM_MC_ADVANCE_RIP();
4585 } IEM_MC_ENDIF();
4586 IEM_MC_END();
4587 }
4588 return VINF_SUCCESS;
4589}
4590
4591
4592/** Opcode 0x0f 0x81. */
4593FNIEMOP_DEF(iemOp_jno_Jv)
4594{
4595 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4596 IEMOP_HLP_MIN_386();
4597 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4598 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4599 {
4600 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4602
4603 IEM_MC_BEGIN(0, 0);
4604 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4605 IEM_MC_ADVANCE_RIP();
4606 } IEM_MC_ELSE() {
4607 IEM_MC_REL_JMP_S16(i16Imm);
4608 } IEM_MC_ENDIF();
4609 IEM_MC_END();
4610 }
4611 else
4612 {
4613 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4615
4616 IEM_MC_BEGIN(0, 0);
4617 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4618 IEM_MC_ADVANCE_RIP();
4619 } IEM_MC_ELSE() {
4620 IEM_MC_REL_JMP_S32(i32Imm);
4621 } IEM_MC_ENDIF();
4622 IEM_MC_END();
4623 }
4624 return VINF_SUCCESS;
4625}
4626
4627
4628/** Opcode 0x0f 0x82. */
4629FNIEMOP_DEF(iemOp_jc_Jv)
4630{
4631 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4632 IEMOP_HLP_MIN_386();
4633 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4634 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4635 {
4636 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4638
4639 IEM_MC_BEGIN(0, 0);
4640 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4641 IEM_MC_REL_JMP_S16(i16Imm);
4642 } IEM_MC_ELSE() {
4643 IEM_MC_ADVANCE_RIP();
4644 } IEM_MC_ENDIF();
4645 IEM_MC_END();
4646 }
4647 else
4648 {
4649 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4651
4652 IEM_MC_BEGIN(0, 0);
4653 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4654 IEM_MC_REL_JMP_S32(i32Imm);
4655 } IEM_MC_ELSE() {
4656 IEM_MC_ADVANCE_RIP();
4657 } IEM_MC_ENDIF();
4658 IEM_MC_END();
4659 }
4660 return VINF_SUCCESS;
4661}
4662
4663
4664/** Opcode 0x0f 0x83. */
4665FNIEMOP_DEF(iemOp_jnc_Jv)
4666{
4667 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4668 IEMOP_HLP_MIN_386();
4669 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4670 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4671 {
4672 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4674
4675 IEM_MC_BEGIN(0, 0);
4676 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4677 IEM_MC_ADVANCE_RIP();
4678 } IEM_MC_ELSE() {
4679 IEM_MC_REL_JMP_S16(i16Imm);
4680 } IEM_MC_ENDIF();
4681 IEM_MC_END();
4682 }
4683 else
4684 {
4685 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4687
4688 IEM_MC_BEGIN(0, 0);
4689 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4690 IEM_MC_ADVANCE_RIP();
4691 } IEM_MC_ELSE() {
4692 IEM_MC_REL_JMP_S32(i32Imm);
4693 } IEM_MC_ENDIF();
4694 IEM_MC_END();
4695 }
4696 return VINF_SUCCESS;
4697}
4698
4699
4700/** Opcode 0x0f 0x84. */
4701FNIEMOP_DEF(iemOp_je_Jv)
4702{
4703 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4704 IEMOP_HLP_MIN_386();
4705 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4706 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4707 {
4708 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4710
4711 IEM_MC_BEGIN(0, 0);
4712 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4713 IEM_MC_REL_JMP_S16(i16Imm);
4714 } IEM_MC_ELSE() {
4715 IEM_MC_ADVANCE_RIP();
4716 } IEM_MC_ENDIF();
4717 IEM_MC_END();
4718 }
4719 else
4720 {
4721 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4723
4724 IEM_MC_BEGIN(0, 0);
4725 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4726 IEM_MC_REL_JMP_S32(i32Imm);
4727 } IEM_MC_ELSE() {
4728 IEM_MC_ADVANCE_RIP();
4729 } IEM_MC_ENDIF();
4730 IEM_MC_END();
4731 }
4732 return VINF_SUCCESS;
4733}
4734
4735
4736/** Opcode 0x0f 0x85. */
4737FNIEMOP_DEF(iemOp_jne_Jv)
4738{
4739 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4740 IEMOP_HLP_MIN_386();
4741 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4742 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4743 {
4744 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4746
4747 IEM_MC_BEGIN(0, 0);
4748 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4749 IEM_MC_ADVANCE_RIP();
4750 } IEM_MC_ELSE() {
4751 IEM_MC_REL_JMP_S16(i16Imm);
4752 } IEM_MC_ENDIF();
4753 IEM_MC_END();
4754 }
4755 else
4756 {
4757 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4759
4760 IEM_MC_BEGIN(0, 0);
4761 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4762 IEM_MC_ADVANCE_RIP();
4763 } IEM_MC_ELSE() {
4764 IEM_MC_REL_JMP_S32(i32Imm);
4765 } IEM_MC_ENDIF();
4766 IEM_MC_END();
4767 }
4768 return VINF_SUCCESS;
4769}
4770
4771
4772/** Opcode 0x0f 0x86. */
4773FNIEMOP_DEF(iemOp_jbe_Jv)
4774{
4775 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4776 IEMOP_HLP_MIN_386();
4777 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4778 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4779 {
4780 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4782
4783 IEM_MC_BEGIN(0, 0);
4784 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4785 IEM_MC_REL_JMP_S16(i16Imm);
4786 } IEM_MC_ELSE() {
4787 IEM_MC_ADVANCE_RIP();
4788 } IEM_MC_ENDIF();
4789 IEM_MC_END();
4790 }
4791 else
4792 {
4793 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4795
4796 IEM_MC_BEGIN(0, 0);
4797 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4798 IEM_MC_REL_JMP_S32(i32Imm);
4799 } IEM_MC_ELSE() {
4800 IEM_MC_ADVANCE_RIP();
4801 } IEM_MC_ENDIF();
4802 IEM_MC_END();
4803 }
4804 return VINF_SUCCESS;
4805}
4806
4807
4808/** Opcode 0x0f 0x87. */
4809FNIEMOP_DEF(iemOp_jnbe_Jv)
4810{
4811 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4812 IEMOP_HLP_MIN_386();
4813 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4814 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4815 {
4816 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4818
4819 IEM_MC_BEGIN(0, 0);
4820 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4821 IEM_MC_ADVANCE_RIP();
4822 } IEM_MC_ELSE() {
4823 IEM_MC_REL_JMP_S16(i16Imm);
4824 } IEM_MC_ENDIF();
4825 IEM_MC_END();
4826 }
4827 else
4828 {
4829 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4831
4832 IEM_MC_BEGIN(0, 0);
4833 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4834 IEM_MC_ADVANCE_RIP();
4835 } IEM_MC_ELSE() {
4836 IEM_MC_REL_JMP_S32(i32Imm);
4837 } IEM_MC_ENDIF();
4838 IEM_MC_END();
4839 }
4840 return VINF_SUCCESS;
4841}
4842
4843
4844/** Opcode 0x0f 0x88. */
4845FNIEMOP_DEF(iemOp_js_Jv)
4846{
4847 IEMOP_MNEMONIC(js_Jv, "js Jv");
4848 IEMOP_HLP_MIN_386();
4849 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4850 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4851 {
4852 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4854
4855 IEM_MC_BEGIN(0, 0);
4856 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4857 IEM_MC_REL_JMP_S16(i16Imm);
4858 } IEM_MC_ELSE() {
4859 IEM_MC_ADVANCE_RIP();
4860 } IEM_MC_ENDIF();
4861 IEM_MC_END();
4862 }
4863 else
4864 {
4865 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4867
4868 IEM_MC_BEGIN(0, 0);
4869 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4870 IEM_MC_REL_JMP_S32(i32Imm);
4871 } IEM_MC_ELSE() {
4872 IEM_MC_ADVANCE_RIP();
4873 } IEM_MC_ENDIF();
4874 IEM_MC_END();
4875 }
4876 return VINF_SUCCESS;
4877}
4878
4879
4880/** Opcode 0x0f 0x89. */
4881FNIEMOP_DEF(iemOp_jns_Jv)
4882{
4883 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4884 IEMOP_HLP_MIN_386();
4885 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4886 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4887 {
4888 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4890
4891 IEM_MC_BEGIN(0, 0);
4892 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4893 IEM_MC_ADVANCE_RIP();
4894 } IEM_MC_ELSE() {
4895 IEM_MC_REL_JMP_S16(i16Imm);
4896 } IEM_MC_ENDIF();
4897 IEM_MC_END();
4898 }
4899 else
4900 {
4901 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4903
4904 IEM_MC_BEGIN(0, 0);
4905 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4906 IEM_MC_ADVANCE_RIP();
4907 } IEM_MC_ELSE() {
4908 IEM_MC_REL_JMP_S32(i32Imm);
4909 } IEM_MC_ENDIF();
4910 IEM_MC_END();
4911 }
4912 return VINF_SUCCESS;
4913}
4914
4915
4916/** Opcode 0x0f 0x8a. */
4917FNIEMOP_DEF(iemOp_jp_Jv)
4918{
4919 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4920 IEMOP_HLP_MIN_386();
4921 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4922 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4923 {
4924 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4926
4927 IEM_MC_BEGIN(0, 0);
4928 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4929 IEM_MC_REL_JMP_S16(i16Imm);
4930 } IEM_MC_ELSE() {
4931 IEM_MC_ADVANCE_RIP();
4932 } IEM_MC_ENDIF();
4933 IEM_MC_END();
4934 }
4935 else
4936 {
4937 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4939
4940 IEM_MC_BEGIN(0, 0);
4941 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4942 IEM_MC_REL_JMP_S32(i32Imm);
4943 } IEM_MC_ELSE() {
4944 IEM_MC_ADVANCE_RIP();
4945 } IEM_MC_ENDIF();
4946 IEM_MC_END();
4947 }
4948 return VINF_SUCCESS;
4949}
4950
4951
4952/** Opcode 0x0f 0x8b. */
4953FNIEMOP_DEF(iemOp_jnp_Jv)
4954{
4955 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4956 IEMOP_HLP_MIN_386();
4957 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4958 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4959 {
4960 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4962
4963 IEM_MC_BEGIN(0, 0);
4964 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4965 IEM_MC_ADVANCE_RIP();
4966 } IEM_MC_ELSE() {
4967 IEM_MC_REL_JMP_S16(i16Imm);
4968 } IEM_MC_ENDIF();
4969 IEM_MC_END();
4970 }
4971 else
4972 {
4973 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4975
4976 IEM_MC_BEGIN(0, 0);
4977 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4978 IEM_MC_ADVANCE_RIP();
4979 } IEM_MC_ELSE() {
4980 IEM_MC_REL_JMP_S32(i32Imm);
4981 } IEM_MC_ENDIF();
4982 IEM_MC_END();
4983 }
4984 return VINF_SUCCESS;
4985}
4986
4987
4988/** Opcode 0x0f 0x8c. */
4989FNIEMOP_DEF(iemOp_jl_Jv)
4990{
4991 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4992 IEMOP_HLP_MIN_386();
4993 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4994 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4995 {
4996 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4998
4999 IEM_MC_BEGIN(0, 0);
5000 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5001 IEM_MC_REL_JMP_S16(i16Imm);
5002 } IEM_MC_ELSE() {
5003 IEM_MC_ADVANCE_RIP();
5004 } IEM_MC_ENDIF();
5005 IEM_MC_END();
5006 }
5007 else
5008 {
5009 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5011
5012 IEM_MC_BEGIN(0, 0);
5013 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5014 IEM_MC_REL_JMP_S32(i32Imm);
5015 } IEM_MC_ELSE() {
5016 IEM_MC_ADVANCE_RIP();
5017 } IEM_MC_ENDIF();
5018 IEM_MC_END();
5019 }
5020 return VINF_SUCCESS;
5021}
5022
5023
5024/** Opcode 0x0f 0x8d. */
5025FNIEMOP_DEF(iemOp_jnl_Jv)
5026{
5027 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5028 IEMOP_HLP_MIN_386();
5029 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5030 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5031 {
5032 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5034
5035 IEM_MC_BEGIN(0, 0);
5036 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5037 IEM_MC_ADVANCE_RIP();
5038 } IEM_MC_ELSE() {
5039 IEM_MC_REL_JMP_S16(i16Imm);
5040 } IEM_MC_ENDIF();
5041 IEM_MC_END();
5042 }
5043 else
5044 {
5045 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5047
5048 IEM_MC_BEGIN(0, 0);
5049 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5050 IEM_MC_ADVANCE_RIP();
5051 } IEM_MC_ELSE() {
5052 IEM_MC_REL_JMP_S32(i32Imm);
5053 } IEM_MC_ENDIF();
5054 IEM_MC_END();
5055 }
5056 return VINF_SUCCESS;
5057}
5058
5059
5060/** Opcode 0x0f 0x8e. */
5061FNIEMOP_DEF(iemOp_jle_Jv)
5062{
5063 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5064 IEMOP_HLP_MIN_386();
5065 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5066 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5067 {
5068 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5070
5071 IEM_MC_BEGIN(0, 0);
5072 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5073 IEM_MC_REL_JMP_S16(i16Imm);
5074 } IEM_MC_ELSE() {
5075 IEM_MC_ADVANCE_RIP();
5076 } IEM_MC_ENDIF();
5077 IEM_MC_END();
5078 }
5079 else
5080 {
5081 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5083
5084 IEM_MC_BEGIN(0, 0);
5085 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5086 IEM_MC_REL_JMP_S32(i32Imm);
5087 } IEM_MC_ELSE() {
5088 IEM_MC_ADVANCE_RIP();
5089 } IEM_MC_ENDIF();
5090 IEM_MC_END();
5091 }
5092 return VINF_SUCCESS;
5093}
5094
5095
5096/** Opcode 0x0f 0x8f. */
5097FNIEMOP_DEF(iemOp_jnle_Jv)
5098{
5099 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5100 IEMOP_HLP_MIN_386();
5101 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5102 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5103 {
5104 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5106
5107 IEM_MC_BEGIN(0, 0);
5108 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5109 IEM_MC_ADVANCE_RIP();
5110 } IEM_MC_ELSE() {
5111 IEM_MC_REL_JMP_S16(i16Imm);
5112 } IEM_MC_ENDIF();
5113 IEM_MC_END();
5114 }
5115 else
5116 {
5117 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5119
5120 IEM_MC_BEGIN(0, 0);
5121 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5122 IEM_MC_ADVANCE_RIP();
5123 } IEM_MC_ELSE() {
5124 IEM_MC_REL_JMP_S32(i32Imm);
5125 } IEM_MC_ENDIF();
5126 IEM_MC_END();
5127 }
5128 return VINF_SUCCESS;
5129}
5130
5131
5132/** Opcode 0x0f 0x90. */
5133FNIEMOP_DEF(iemOp_seto_Eb)
5134{
5135 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5136 IEMOP_HLP_MIN_386();
5137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5138
5139 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5140 * any way. AMD says it's "unused", whatever that means. We're
5141 * ignoring for now. */
5142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5143 {
5144 /* register target */
5145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5146 IEM_MC_BEGIN(0, 0);
5147 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5148 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5149 } IEM_MC_ELSE() {
5150 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5151 } IEM_MC_ENDIF();
5152 IEM_MC_ADVANCE_RIP();
5153 IEM_MC_END();
5154 }
5155 else
5156 {
5157 /* memory target */
5158 IEM_MC_BEGIN(0, 1);
5159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5162 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5163 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5164 } IEM_MC_ELSE() {
5165 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5166 } IEM_MC_ENDIF();
5167 IEM_MC_ADVANCE_RIP();
5168 IEM_MC_END();
5169 }
5170 return VINF_SUCCESS;
5171}
5172
5173
5174/** Opcode 0x0f 0x91. */
5175FNIEMOP_DEF(iemOp_setno_Eb)
5176{
5177 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5178 IEMOP_HLP_MIN_386();
5179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5180
5181 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5182 * any way. AMD says it's "unused", whatever that means. We're
5183 * ignoring for now. */
5184 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5185 {
5186 /* register target */
5187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5188 IEM_MC_BEGIN(0, 0);
5189 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5190 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5191 } IEM_MC_ELSE() {
5192 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5193 } IEM_MC_ENDIF();
5194 IEM_MC_ADVANCE_RIP();
5195 IEM_MC_END();
5196 }
5197 else
5198 {
5199 /* memory target */
5200 IEM_MC_BEGIN(0, 1);
5201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5204 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5205 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5206 } IEM_MC_ELSE() {
5207 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5208 } IEM_MC_ENDIF();
5209 IEM_MC_ADVANCE_RIP();
5210 IEM_MC_END();
5211 }
5212 return VINF_SUCCESS;
5213}
5214
5215
5216/** Opcode 0x0f 0x92. */
5217FNIEMOP_DEF(iemOp_setc_Eb)
5218{
5219 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5220 IEMOP_HLP_MIN_386();
5221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5222
5223 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5224 * any way. AMD says it's "unused", whatever that means. We're
5225 * ignoring for now. */
5226 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5227 {
5228 /* register target */
5229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5230 IEM_MC_BEGIN(0, 0);
5231 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5232 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5233 } IEM_MC_ELSE() {
5234 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5235 } IEM_MC_ENDIF();
5236 IEM_MC_ADVANCE_RIP();
5237 IEM_MC_END();
5238 }
5239 else
5240 {
5241 /* memory target */
5242 IEM_MC_BEGIN(0, 1);
5243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5246 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5247 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5248 } IEM_MC_ELSE() {
5249 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5250 } IEM_MC_ENDIF();
5251 IEM_MC_ADVANCE_RIP();
5252 IEM_MC_END();
5253 }
5254 return VINF_SUCCESS;
5255}
5256
5257
5258/** Opcode 0x0f 0x93. */
5259FNIEMOP_DEF(iemOp_setnc_Eb)
5260{
5261 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5262 IEMOP_HLP_MIN_386();
5263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5264
5265 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5266 * any way. AMD says it's "unused", whatever that means. We're
5267 * ignoring for now. */
5268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5269 {
5270 /* register target */
5271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5272 IEM_MC_BEGIN(0, 0);
5273 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5274 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5275 } IEM_MC_ELSE() {
5276 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5277 } IEM_MC_ENDIF();
5278 IEM_MC_ADVANCE_RIP();
5279 IEM_MC_END();
5280 }
5281 else
5282 {
5283 /* memory target */
5284 IEM_MC_BEGIN(0, 1);
5285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5288 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5289 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5290 } IEM_MC_ELSE() {
5291 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5292 } IEM_MC_ENDIF();
5293 IEM_MC_ADVANCE_RIP();
5294 IEM_MC_END();
5295 }
5296 return VINF_SUCCESS;
5297}
5298
5299
5300/** Opcode 0x0f 0x94. */
5301FNIEMOP_DEF(iemOp_sete_Eb)
5302{
5303 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5304 IEMOP_HLP_MIN_386();
5305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5306
5307 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5308 * any way. AMD says it's "unused", whatever that means. We're
5309 * ignoring for now. */
5310 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5311 {
5312 /* register target */
5313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5314 IEM_MC_BEGIN(0, 0);
5315 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5316 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5317 } IEM_MC_ELSE() {
5318 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5319 } IEM_MC_ENDIF();
5320 IEM_MC_ADVANCE_RIP();
5321 IEM_MC_END();
5322 }
5323 else
5324 {
5325 /* memory target */
5326 IEM_MC_BEGIN(0, 1);
5327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5330 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5331 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5332 } IEM_MC_ELSE() {
5333 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5334 } IEM_MC_ENDIF();
5335 IEM_MC_ADVANCE_RIP();
5336 IEM_MC_END();
5337 }
5338 return VINF_SUCCESS;
5339}
5340
5341
5342/** Opcode 0x0f 0x95. */
5343FNIEMOP_DEF(iemOp_setne_Eb)
5344{
5345 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5346 IEMOP_HLP_MIN_386();
5347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5348
5349 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5350 * any way. AMD says it's "unused", whatever that means. We're
5351 * ignoring for now. */
5352 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5353 {
5354 /* register target */
5355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5356 IEM_MC_BEGIN(0, 0);
5357 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5358 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5359 } IEM_MC_ELSE() {
5360 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5361 } IEM_MC_ENDIF();
5362 IEM_MC_ADVANCE_RIP();
5363 IEM_MC_END();
5364 }
5365 else
5366 {
5367 /* memory target */
5368 IEM_MC_BEGIN(0, 1);
5369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5372 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5373 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5374 } IEM_MC_ELSE() {
5375 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5376 } IEM_MC_ENDIF();
5377 IEM_MC_ADVANCE_RIP();
5378 IEM_MC_END();
5379 }
5380 return VINF_SUCCESS;
5381}
5382
5383
5384/** Opcode 0x0f 0x96. */
5385FNIEMOP_DEF(iemOp_setbe_Eb)
5386{
5387 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5388 IEMOP_HLP_MIN_386();
5389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5390
5391 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5392 * any way. AMD says it's "unused", whatever that means. We're
5393 * ignoring for now. */
5394 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5395 {
5396 /* register target */
5397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5398 IEM_MC_BEGIN(0, 0);
5399 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5400 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5401 } IEM_MC_ELSE() {
5402 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5403 } IEM_MC_ENDIF();
5404 IEM_MC_ADVANCE_RIP();
5405 IEM_MC_END();
5406 }
5407 else
5408 {
5409 /* memory target */
5410 IEM_MC_BEGIN(0, 1);
5411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5414 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5415 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5416 } IEM_MC_ELSE() {
5417 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5418 } IEM_MC_ENDIF();
5419 IEM_MC_ADVANCE_RIP();
5420 IEM_MC_END();
5421 }
5422 return VINF_SUCCESS;
5423}
5424
5425
5426/** Opcode 0x0f 0x97. */
5427FNIEMOP_DEF(iemOp_setnbe_Eb)
5428{
5429 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5430 IEMOP_HLP_MIN_386();
5431 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5432
5433 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5434 * any way. AMD says it's "unused", whatever that means. We're
5435 * ignoring for now. */
5436 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5437 {
5438 /* register target */
5439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5440 IEM_MC_BEGIN(0, 0);
5441 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5442 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5443 } IEM_MC_ELSE() {
5444 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5445 } IEM_MC_ENDIF();
5446 IEM_MC_ADVANCE_RIP();
5447 IEM_MC_END();
5448 }
5449 else
5450 {
5451 /* memory target */
5452 IEM_MC_BEGIN(0, 1);
5453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5456 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5457 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5458 } IEM_MC_ELSE() {
5459 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5460 } IEM_MC_ENDIF();
5461 IEM_MC_ADVANCE_RIP();
5462 IEM_MC_END();
5463 }
5464 return VINF_SUCCESS;
5465}
5466
5467
5468/** Opcode 0x0f 0x98. */
5469FNIEMOP_DEF(iemOp_sets_Eb)
5470{
5471 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5472 IEMOP_HLP_MIN_386();
5473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5474
5475 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5476 * any way. AMD says it's "unused", whatever that means. We're
5477 * ignoring for now. */
5478 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5479 {
5480 /* register target */
5481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5482 IEM_MC_BEGIN(0, 0);
5483 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5484 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5485 } IEM_MC_ELSE() {
5486 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5487 } IEM_MC_ENDIF();
5488 IEM_MC_ADVANCE_RIP();
5489 IEM_MC_END();
5490 }
5491 else
5492 {
5493 /* memory target */
5494 IEM_MC_BEGIN(0, 1);
5495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5498 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5499 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5500 } IEM_MC_ELSE() {
5501 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5502 } IEM_MC_ENDIF();
5503 IEM_MC_ADVANCE_RIP();
5504 IEM_MC_END();
5505 }
5506 return VINF_SUCCESS;
5507}
5508
5509
5510/** Opcode 0x0f 0x99. */
5511FNIEMOP_DEF(iemOp_setns_Eb)
5512{
5513 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5514 IEMOP_HLP_MIN_386();
5515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5516
5517 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5518 * any way. AMD says it's "unused", whatever that means. We're
5519 * ignoring for now. */
5520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5521 {
5522 /* register target */
5523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5524 IEM_MC_BEGIN(0, 0);
5525 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5526 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5527 } IEM_MC_ELSE() {
5528 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5529 } IEM_MC_ENDIF();
5530 IEM_MC_ADVANCE_RIP();
5531 IEM_MC_END();
5532 }
5533 else
5534 {
5535 /* memory target */
5536 IEM_MC_BEGIN(0, 1);
5537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5540 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5541 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5542 } IEM_MC_ELSE() {
5543 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5544 } IEM_MC_ENDIF();
5545 IEM_MC_ADVANCE_RIP();
5546 IEM_MC_END();
5547 }
5548 return VINF_SUCCESS;
5549}
5550
5551
5552/** Opcode 0x0f 0x9a. */
5553FNIEMOP_DEF(iemOp_setp_Eb)
5554{
5555 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5556 IEMOP_HLP_MIN_386();
5557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5558
5559 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5560 * any way. AMD says it's "unused", whatever that means. We're
5561 * ignoring for now. */
5562 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5563 {
5564 /* register target */
5565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5566 IEM_MC_BEGIN(0, 0);
5567 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5568 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5569 } IEM_MC_ELSE() {
5570 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5571 } IEM_MC_ENDIF();
5572 IEM_MC_ADVANCE_RIP();
5573 IEM_MC_END();
5574 }
5575 else
5576 {
5577 /* memory target */
5578 IEM_MC_BEGIN(0, 1);
5579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5582 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5583 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5584 } IEM_MC_ELSE() {
5585 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5586 } IEM_MC_ENDIF();
5587 IEM_MC_ADVANCE_RIP();
5588 IEM_MC_END();
5589 }
5590 return VINF_SUCCESS;
5591}
5592
5593
5594/** Opcode 0x0f 0x9b. */
5595FNIEMOP_DEF(iemOp_setnp_Eb)
5596{
5597 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5598 IEMOP_HLP_MIN_386();
5599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5600
5601 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5602 * any way. AMD says it's "unused", whatever that means. We're
5603 * ignoring for now. */
5604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5605 {
5606 /* register target */
5607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5608 IEM_MC_BEGIN(0, 0);
5609 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5610 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5611 } IEM_MC_ELSE() {
5612 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5613 } IEM_MC_ENDIF();
5614 IEM_MC_ADVANCE_RIP();
5615 IEM_MC_END();
5616 }
5617 else
5618 {
5619 /* memory target */
5620 IEM_MC_BEGIN(0, 1);
5621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5624 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5625 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5626 } IEM_MC_ELSE() {
5627 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5628 } IEM_MC_ENDIF();
5629 IEM_MC_ADVANCE_RIP();
5630 IEM_MC_END();
5631 }
5632 return VINF_SUCCESS;
5633}
5634
5635
5636/** Opcode 0x0f 0x9c. */
5637FNIEMOP_DEF(iemOp_setl_Eb)
5638{
5639 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5640 IEMOP_HLP_MIN_386();
5641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5642
5643 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5644 * any way. AMD says it's "unused", whatever that means. We're
5645 * ignoring for now. */
5646 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5647 {
5648 /* register target */
5649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5650 IEM_MC_BEGIN(0, 0);
5651 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5652 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5653 } IEM_MC_ELSE() {
5654 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5655 } IEM_MC_ENDIF();
5656 IEM_MC_ADVANCE_RIP();
5657 IEM_MC_END();
5658 }
5659 else
5660 {
5661 /* memory target */
5662 IEM_MC_BEGIN(0, 1);
5663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5666 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5667 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5668 } IEM_MC_ELSE() {
5669 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5670 } IEM_MC_ENDIF();
5671 IEM_MC_ADVANCE_RIP();
5672 IEM_MC_END();
5673 }
5674 return VINF_SUCCESS;
5675}
5676
5677
5678/** Opcode 0x0f 0x9d. */
5679FNIEMOP_DEF(iemOp_setnl_Eb)
5680{
5681 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5682 IEMOP_HLP_MIN_386();
5683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5684
5685 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5686 * any way. AMD says it's "unused", whatever that means. We're
5687 * ignoring for now. */
5688 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5689 {
5690 /* register target */
5691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5692 IEM_MC_BEGIN(0, 0);
5693 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5694 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5695 } IEM_MC_ELSE() {
5696 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5697 } IEM_MC_ENDIF();
5698 IEM_MC_ADVANCE_RIP();
5699 IEM_MC_END();
5700 }
5701 else
5702 {
5703 /* memory target */
5704 IEM_MC_BEGIN(0, 1);
5705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5708 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5709 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5710 } IEM_MC_ELSE() {
5711 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5712 } IEM_MC_ENDIF();
5713 IEM_MC_ADVANCE_RIP();
5714 IEM_MC_END();
5715 }
5716 return VINF_SUCCESS;
5717}
5718
5719
5720/** Opcode 0x0f 0x9e. */
5721FNIEMOP_DEF(iemOp_setle_Eb)
5722{
5723 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5724 IEMOP_HLP_MIN_386();
5725 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5726
5727 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5728 * any way. AMD says it's "unused", whatever that means. We're
5729 * ignoring for now. */
5730 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5731 {
5732 /* register target */
5733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5734 IEM_MC_BEGIN(0, 0);
5735 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5736 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5737 } IEM_MC_ELSE() {
5738 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5739 } IEM_MC_ENDIF();
5740 IEM_MC_ADVANCE_RIP();
5741 IEM_MC_END();
5742 }
5743 else
5744 {
5745 /* memory target */
5746 IEM_MC_BEGIN(0, 1);
5747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5750 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5751 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5752 } IEM_MC_ELSE() {
5753 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5754 } IEM_MC_ENDIF();
5755 IEM_MC_ADVANCE_RIP();
5756 IEM_MC_END();
5757 }
5758 return VINF_SUCCESS;
5759}
5760
5761
5762/** Opcode 0x0f 0x9f. */
5763FNIEMOP_DEF(iemOp_setnle_Eb)
5764{
5765 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5766 IEMOP_HLP_MIN_386();
5767 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5768
5769 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5770 * any way. AMD says it's "unused", whatever that means. We're
5771 * ignoring for now. */
5772 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5773 {
5774 /* register target */
5775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5776 IEM_MC_BEGIN(0, 0);
5777 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5778 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5779 } IEM_MC_ELSE() {
5780 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5781 } IEM_MC_ENDIF();
5782 IEM_MC_ADVANCE_RIP();
5783 IEM_MC_END();
5784 }
5785 else
5786 {
5787 /* memory target */
5788 IEM_MC_BEGIN(0, 1);
5789 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5792 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5793 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5794 } IEM_MC_ELSE() {
5795 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5796 } IEM_MC_ENDIF();
5797 IEM_MC_ADVANCE_RIP();
5798 IEM_MC_END();
5799 }
5800 return VINF_SUCCESS;
5801}
5802
5803
5804/**
5805 * Common 'push segment-register' helper.
5806 */
5807FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5808{
5809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5810 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5811 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5812
5813 switch (pVCpu->iem.s.enmEffOpSize)
5814 {
5815 case IEMMODE_16BIT:
5816 IEM_MC_BEGIN(0, 1);
5817 IEM_MC_LOCAL(uint16_t, u16Value);
5818 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5819 IEM_MC_PUSH_U16(u16Value);
5820 IEM_MC_ADVANCE_RIP();
5821 IEM_MC_END();
5822 break;
5823
5824 case IEMMODE_32BIT:
5825 IEM_MC_BEGIN(0, 1);
5826 IEM_MC_LOCAL(uint32_t, u32Value);
5827 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5828 IEM_MC_PUSH_U32_SREG(u32Value);
5829 IEM_MC_ADVANCE_RIP();
5830 IEM_MC_END();
5831 break;
5832
5833 case IEMMODE_64BIT:
5834 IEM_MC_BEGIN(0, 1);
5835 IEM_MC_LOCAL(uint64_t, u64Value);
5836 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5837 IEM_MC_PUSH_U64(u64Value);
5838 IEM_MC_ADVANCE_RIP();
5839 IEM_MC_END();
5840 break;
5841 }
5842
5843 return VINF_SUCCESS;
5844}
5845
5846
5847/** Opcode 0x0f 0xa0. */
5848FNIEMOP_DEF(iemOp_push_fs)
5849{
5850 IEMOP_MNEMONIC(push_fs, "push fs");
5851 IEMOP_HLP_MIN_386();
5852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5853 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5854}
5855
5856
5857/** Opcode 0x0f 0xa1. */
5858FNIEMOP_DEF(iemOp_pop_fs)
5859{
5860 IEMOP_MNEMONIC(pop_fs, "pop fs");
5861 IEMOP_HLP_MIN_386();
5862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5863 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5864}
5865
5866
5867/** Opcode 0x0f 0xa2. */
5868FNIEMOP_DEF(iemOp_cpuid)
5869{
5870 IEMOP_MNEMONIC(cpuid, "cpuid");
5871 IEMOP_HLP_MIN_486(); /* not all 486es. */
5872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5873 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5874}
5875
5876
5877/**
5878 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5879 * iemOp_bts_Ev_Gv.
5880 */
5881FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5882{
5883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5884 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5885
5886 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5887 {
5888 /* register destination. */
5889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5890 switch (pVCpu->iem.s.enmEffOpSize)
5891 {
5892 case IEMMODE_16BIT:
5893 IEM_MC_BEGIN(3, 0);
5894 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5895 IEM_MC_ARG(uint16_t, u16Src, 1);
5896 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5897
5898 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5899 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5900 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5901 IEM_MC_REF_EFLAGS(pEFlags);
5902 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5903
5904 IEM_MC_ADVANCE_RIP();
5905 IEM_MC_END();
5906 return VINF_SUCCESS;
5907
5908 case IEMMODE_32BIT:
5909 IEM_MC_BEGIN(3, 0);
5910 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5911 IEM_MC_ARG(uint32_t, u32Src, 1);
5912 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5913
5914 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5915 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5916 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5917 IEM_MC_REF_EFLAGS(pEFlags);
5918 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5919
5920 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5921 IEM_MC_ADVANCE_RIP();
5922 IEM_MC_END();
5923 return VINF_SUCCESS;
5924
5925 case IEMMODE_64BIT:
5926 IEM_MC_BEGIN(3, 0);
5927 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5928 IEM_MC_ARG(uint64_t, u64Src, 1);
5929 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5930
5931 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5932 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5933 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5934 IEM_MC_REF_EFLAGS(pEFlags);
5935 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5936
5937 IEM_MC_ADVANCE_RIP();
5938 IEM_MC_END();
5939 return VINF_SUCCESS;
5940
5941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5942 }
5943 }
5944 else
5945 {
5946 /* memory destination. */
5947
5948 uint32_t fAccess;
5949 if (pImpl->pfnLockedU16)
5950 fAccess = IEM_ACCESS_DATA_RW;
5951 else /* BT */
5952 fAccess = IEM_ACCESS_DATA_R;
5953
5954 /** @todo test negative bit offsets! */
5955 switch (pVCpu->iem.s.enmEffOpSize)
5956 {
5957 case IEMMODE_16BIT:
5958 IEM_MC_BEGIN(3, 2);
5959 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5960 IEM_MC_ARG(uint16_t, u16Src, 1);
5961 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5963 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5964
5965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5966 if (pImpl->pfnLockedU16)
5967 IEMOP_HLP_DONE_DECODING();
5968 else
5969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5970 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5971 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5972 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5973 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5974 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5975 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5976 IEM_MC_FETCH_EFLAGS(EFlags);
5977
5978 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5979 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5980 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5981 else
5982 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5983 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5984
5985 IEM_MC_COMMIT_EFLAGS(EFlags);
5986 IEM_MC_ADVANCE_RIP();
5987 IEM_MC_END();
5988 return VINF_SUCCESS;
5989
5990 case IEMMODE_32BIT:
5991 IEM_MC_BEGIN(3, 2);
5992 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5993 IEM_MC_ARG(uint32_t, u32Src, 1);
5994 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5996 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5997
5998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5999 if (pImpl->pfnLockedU16)
6000 IEMOP_HLP_DONE_DECODING();
6001 else
6002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6003 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6004 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6005 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6006 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6007 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6008 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6009 IEM_MC_FETCH_EFLAGS(EFlags);
6010
6011 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6012 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6013 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6014 else
6015 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6016 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6017
6018 IEM_MC_COMMIT_EFLAGS(EFlags);
6019 IEM_MC_ADVANCE_RIP();
6020 IEM_MC_END();
6021 return VINF_SUCCESS;
6022
6023 case IEMMODE_64BIT:
6024 IEM_MC_BEGIN(3, 2);
6025 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6026 IEM_MC_ARG(uint64_t, u64Src, 1);
6027 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6028 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6029 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6030
6031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6032 if (pImpl->pfnLockedU16)
6033 IEMOP_HLP_DONE_DECODING();
6034 else
6035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6036 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6037 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6038 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6039 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6040 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6041 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6042 IEM_MC_FETCH_EFLAGS(EFlags);
6043
6044 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6045 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6046 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6047 else
6048 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6049 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6050
6051 IEM_MC_COMMIT_EFLAGS(EFlags);
6052 IEM_MC_ADVANCE_RIP();
6053 IEM_MC_END();
6054 return VINF_SUCCESS;
6055
6056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6057 }
6058 }
6059}
6060
6061
6062/** Opcode 0x0f 0xa3. */
6063FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6064{
6065 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6066 IEMOP_HLP_MIN_386();
6067 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6068}
6069
6070
6071/**
6072 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6073 */
6074FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6075{
6076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6077 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6078
6079 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6080 {
6081 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6083
6084 switch (pVCpu->iem.s.enmEffOpSize)
6085 {
6086 case IEMMODE_16BIT:
6087 IEM_MC_BEGIN(4, 0);
6088 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6089 IEM_MC_ARG(uint16_t, u16Src, 1);
6090 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6091 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6092
6093 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6094 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6095 IEM_MC_REF_EFLAGS(pEFlags);
6096 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6097
6098 IEM_MC_ADVANCE_RIP();
6099 IEM_MC_END();
6100 return VINF_SUCCESS;
6101
6102 case IEMMODE_32BIT:
6103 IEM_MC_BEGIN(4, 0);
6104 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6105 IEM_MC_ARG(uint32_t, u32Src, 1);
6106 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6107 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6108
6109 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6110 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6111 IEM_MC_REF_EFLAGS(pEFlags);
6112 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6113
6114 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6115 IEM_MC_ADVANCE_RIP();
6116 IEM_MC_END();
6117 return VINF_SUCCESS;
6118
6119 case IEMMODE_64BIT:
6120 IEM_MC_BEGIN(4, 0);
6121 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6122 IEM_MC_ARG(uint64_t, u64Src, 1);
6123 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6124 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6125
6126 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6127 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6128 IEM_MC_REF_EFLAGS(pEFlags);
6129 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6130
6131 IEM_MC_ADVANCE_RIP();
6132 IEM_MC_END();
6133 return VINF_SUCCESS;
6134
6135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6136 }
6137 }
6138 else
6139 {
6140 switch (pVCpu->iem.s.enmEffOpSize)
6141 {
6142 case IEMMODE_16BIT:
6143 IEM_MC_BEGIN(4, 2);
6144 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6145 IEM_MC_ARG(uint16_t, u16Src, 1);
6146 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6147 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6149
6150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6151 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6152 IEM_MC_ASSIGN(cShiftArg, cShift);
6153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6154 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6155 IEM_MC_FETCH_EFLAGS(EFlags);
6156 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6157 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6158
6159 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6160 IEM_MC_COMMIT_EFLAGS(EFlags);
6161 IEM_MC_ADVANCE_RIP();
6162 IEM_MC_END();
6163 return VINF_SUCCESS;
6164
6165 case IEMMODE_32BIT:
6166 IEM_MC_BEGIN(4, 2);
6167 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6168 IEM_MC_ARG(uint32_t, u32Src, 1);
6169 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6170 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6172
6173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6174 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6175 IEM_MC_ASSIGN(cShiftArg, cShift);
6176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6177 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6178 IEM_MC_FETCH_EFLAGS(EFlags);
6179 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6180 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6181
6182 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6183 IEM_MC_COMMIT_EFLAGS(EFlags);
6184 IEM_MC_ADVANCE_RIP();
6185 IEM_MC_END();
6186 return VINF_SUCCESS;
6187
6188 case IEMMODE_64BIT:
6189 IEM_MC_BEGIN(4, 2);
6190 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6191 IEM_MC_ARG(uint64_t, u64Src, 1);
6192 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6193 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6195
6196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6197 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6198 IEM_MC_ASSIGN(cShiftArg, cShift);
6199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6200 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6201 IEM_MC_FETCH_EFLAGS(EFlags);
6202 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6203 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6204
6205 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6206 IEM_MC_COMMIT_EFLAGS(EFlags);
6207 IEM_MC_ADVANCE_RIP();
6208 IEM_MC_END();
6209 return VINF_SUCCESS;
6210
6211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6212 }
6213 }
6214}
6215
6216
6217/**
6218 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6219 */
6220FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6221{
6222 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6223 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6224
6225 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6226 {
6227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6228
6229 switch (pVCpu->iem.s.enmEffOpSize)
6230 {
6231 case IEMMODE_16BIT:
6232 IEM_MC_BEGIN(4, 0);
6233 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6234 IEM_MC_ARG(uint16_t, u16Src, 1);
6235 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6236 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6237
6238 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6239 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6240 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6241 IEM_MC_REF_EFLAGS(pEFlags);
6242 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6243
6244 IEM_MC_ADVANCE_RIP();
6245 IEM_MC_END();
6246 return VINF_SUCCESS;
6247
6248 case IEMMODE_32BIT:
6249 IEM_MC_BEGIN(4, 0);
6250 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6251 IEM_MC_ARG(uint32_t, u32Src, 1);
6252 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6253 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6254
6255 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6256 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6257 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6258 IEM_MC_REF_EFLAGS(pEFlags);
6259 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6260
6261 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6262 IEM_MC_ADVANCE_RIP();
6263 IEM_MC_END();
6264 return VINF_SUCCESS;
6265
6266 case IEMMODE_64BIT:
6267 IEM_MC_BEGIN(4, 0);
6268 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6269 IEM_MC_ARG(uint64_t, u64Src, 1);
6270 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6271 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6272
6273 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6274 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6275 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6276 IEM_MC_REF_EFLAGS(pEFlags);
6277 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6278
6279 IEM_MC_ADVANCE_RIP();
6280 IEM_MC_END();
6281 return VINF_SUCCESS;
6282
6283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6284 }
6285 }
6286 else
6287 {
6288 switch (pVCpu->iem.s.enmEffOpSize)
6289 {
6290 case IEMMODE_16BIT:
6291 IEM_MC_BEGIN(4, 2);
6292 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6293 IEM_MC_ARG(uint16_t, u16Src, 1);
6294 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6295 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6297
6298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6300 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6301 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6302 IEM_MC_FETCH_EFLAGS(EFlags);
6303 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6304 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6305
6306 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6307 IEM_MC_COMMIT_EFLAGS(EFlags);
6308 IEM_MC_ADVANCE_RIP();
6309 IEM_MC_END();
6310 return VINF_SUCCESS;
6311
6312 case IEMMODE_32BIT:
6313 IEM_MC_BEGIN(4, 2);
6314 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6315 IEM_MC_ARG(uint32_t, u32Src, 1);
6316 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6317 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6319
6320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6322 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6323 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6324 IEM_MC_FETCH_EFLAGS(EFlags);
6325 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6326 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6327
6328 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6329 IEM_MC_COMMIT_EFLAGS(EFlags);
6330 IEM_MC_ADVANCE_RIP();
6331 IEM_MC_END();
6332 return VINF_SUCCESS;
6333
6334 case IEMMODE_64BIT:
6335 IEM_MC_BEGIN(4, 2);
6336 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6337 IEM_MC_ARG(uint64_t, u64Src, 1);
6338 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6339 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6341
6342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6344 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6345 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6346 IEM_MC_FETCH_EFLAGS(EFlags);
6347 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6348 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6349
6350 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6351 IEM_MC_COMMIT_EFLAGS(EFlags);
6352 IEM_MC_ADVANCE_RIP();
6353 IEM_MC_END();
6354 return VINF_SUCCESS;
6355
6356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6357 }
6358 }
6359}
6360
6361
6362
6363/** Opcode 0x0f 0xa4. */
6364FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6365{
6366 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6367 IEMOP_HLP_MIN_386();
6368 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6369}
6370
6371
6372/** Opcode 0x0f 0xa5. */
6373FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6374{
6375 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6376 IEMOP_HLP_MIN_386();
6377 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6378}
6379
6380
6381/** Opcode 0x0f 0xa8. */
6382FNIEMOP_DEF(iemOp_push_gs)
6383{
6384 IEMOP_MNEMONIC(push_gs, "push gs");
6385 IEMOP_HLP_MIN_386();
6386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6387 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6388}
6389
6390
6391/** Opcode 0x0f 0xa9. */
6392FNIEMOP_DEF(iemOp_pop_gs)
6393{
6394 IEMOP_MNEMONIC(pop_gs, "pop gs");
6395 IEMOP_HLP_MIN_386();
6396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6397 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6398}
6399
6400
6401/** Opcode 0x0f 0xaa. */
6402FNIEMOP_DEF(iemOp_rsm)
6403{
6404 IEMOP_MNEMONIC(rsm, "rsm");
6405 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
6406 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
6407 * intercept). */
6408 IEMOP_BITCH_ABOUT_STUB();
6409 return IEMOP_RAISE_INVALID_OPCODE();
6410}
6411
6412//IEMOP_HLP_MIN_386();
6413
6414
6415/** Opcode 0x0f 0xab. */
6416FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6417{
6418 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6419 IEMOP_HLP_MIN_386();
6420 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6421}
6422
6423
6424/** Opcode 0x0f 0xac. */
6425FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6426{
6427 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6428 IEMOP_HLP_MIN_386();
6429 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6430}
6431
6432
6433/** Opcode 0x0f 0xad. */
6434FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6435{
6436 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6437 IEMOP_HLP_MIN_386();
6438 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6439}
6440
6441
6442/** Opcode 0x0f 0xae mem/0. */
6443FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6444{
6445 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6446 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6447 return IEMOP_RAISE_INVALID_OPCODE();
6448
6449 IEM_MC_BEGIN(3, 1);
6450 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6451 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6452 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6455 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6456 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6457 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6458 IEM_MC_END();
6459 return VINF_SUCCESS;
6460}
6461
6462
6463/** Opcode 0x0f 0xae mem/1. */
6464FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6465{
6466 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6467 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6468 return IEMOP_RAISE_INVALID_OPCODE();
6469
6470 IEM_MC_BEGIN(3, 1);
6471 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6472 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6473 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6476 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6477 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6478 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6479 IEM_MC_END();
6480 return VINF_SUCCESS;
6481}
6482
6483
6484/**
6485 * @opmaps grp15
6486 * @opcode !11/2
6487 * @oppfx none
6488 * @opcpuid sse
6489 * @opgroup og_sse_mxcsrsm
6490 * @opxcpttype 5
6491 * @optest op1=0 -> mxcsr=0
6492 * @optest op1=0x2083 -> mxcsr=0x2083
6493 * @optest op1=0xfffffffe -> value.xcpt=0xd
6494 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6495 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6496 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6497 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6498 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6499 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6500 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6501 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6502 */
6503FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6504{
6505 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6506 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6507 return IEMOP_RAISE_INVALID_OPCODE();
6508
6509 IEM_MC_BEGIN(2, 0);
6510 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6511 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6514 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6515 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6516 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6517 IEM_MC_END();
6518 return VINF_SUCCESS;
6519}
6520
6521
6522/**
6523 * @opmaps grp15
6524 * @opcode !11/3
6525 * @oppfx none
6526 * @opcpuid sse
6527 * @opgroup og_sse_mxcsrsm
6528 * @opxcpttype 5
6529 * @optest mxcsr=0 -> op1=0
6530 * @optest mxcsr=0x2083 -> op1=0x2083
6531 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6532 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6533 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6534 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6535 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6536 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6537 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6538 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6539 */
6540FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6541{
6542 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6543 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6544 return IEMOP_RAISE_INVALID_OPCODE();
6545
6546 IEM_MC_BEGIN(2, 0);
6547 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6548 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6551 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6552 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6553 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6554 IEM_MC_END();
6555 return VINF_SUCCESS;
6556}
6557
6558
6559/**
6560 * @opmaps grp15
6561 * @opcode !11/4
6562 * @oppfx none
6563 * @opcpuid xsave
6564 * @opgroup og_system
6565 * @opxcpttype none
6566 */
6567FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6568{
6569 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6570 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6571 return IEMOP_RAISE_INVALID_OPCODE();
6572
6573 IEM_MC_BEGIN(3, 0);
6574 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6575 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6576 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6579 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6580 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6581 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6582 IEM_MC_END();
6583 return VINF_SUCCESS;
6584}
6585
6586
6587/**
6588 * @opmaps grp15
6589 * @opcode !11/5
6590 * @oppfx none
6591 * @opcpuid xsave
6592 * @opgroup og_system
6593 * @opxcpttype none
6594 */
6595FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6596{
6597 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6598 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6599 return IEMOP_RAISE_INVALID_OPCODE();
6600
6601 IEM_MC_BEGIN(3, 0);
6602 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6603 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6604 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6607 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6608 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6609 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6610 IEM_MC_END();
6611 return VINF_SUCCESS;
6612}
6613
6614/** Opcode 0x0f 0xae mem/6. */
6615FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6616
6617/**
6618 * @opmaps grp15
6619 * @opcode !11/7
6620 * @oppfx none
6621 * @opcpuid clfsh
6622 * @opgroup og_cachectl
6623 * @optest op1=1 ->
6624 */
6625FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6626{
6627 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6628 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6629 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6630
6631 IEM_MC_BEGIN(2, 0);
6632 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6633 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6636 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6637 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6638 IEM_MC_END();
6639 return VINF_SUCCESS;
6640}
6641
6642/**
6643 * @opmaps grp15
6644 * @opcode !11/7
6645 * @oppfx 0x66
6646 * @opcpuid clflushopt
6647 * @opgroup og_cachectl
6648 * @optest op1=1 ->
6649 */
6650FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6651{
6652 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6653 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6654 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6655
6656 IEM_MC_BEGIN(2, 0);
6657 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6658 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6661 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6662 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6663 IEM_MC_END();
6664 return VINF_SUCCESS;
6665}
6666
6667
6668/** Opcode 0x0f 0xae 11b/5. */
6669FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6670{
6671 RT_NOREF_PV(bRm);
6672 IEMOP_MNEMONIC(lfence, "lfence");
6673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6674 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6675 return IEMOP_RAISE_INVALID_OPCODE();
6676
6677 IEM_MC_BEGIN(0, 0);
6678 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6679 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6680 else
6681 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6682 IEM_MC_ADVANCE_RIP();
6683 IEM_MC_END();
6684 return VINF_SUCCESS;
6685}
6686
6687
6688/** Opcode 0x0f 0xae 11b/6. */
6689FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6690{
6691 RT_NOREF_PV(bRm);
6692 IEMOP_MNEMONIC(mfence, "mfence");
6693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6694 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6695 return IEMOP_RAISE_INVALID_OPCODE();
6696
6697 IEM_MC_BEGIN(0, 0);
6698 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6699 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6700 else
6701 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6702 IEM_MC_ADVANCE_RIP();
6703 IEM_MC_END();
6704 return VINF_SUCCESS;
6705}
6706
6707
6708/** Opcode 0x0f 0xae 11b/7. */
6709FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6710{
6711 RT_NOREF_PV(bRm);
6712 IEMOP_MNEMONIC(sfence, "sfence");
6713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6714 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6715 return IEMOP_RAISE_INVALID_OPCODE();
6716
6717 IEM_MC_BEGIN(0, 0);
6718 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6719 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6720 else
6721 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6722 IEM_MC_ADVANCE_RIP();
6723 IEM_MC_END();
6724 return VINF_SUCCESS;
6725}
6726
6727
6728/** Opcode 0xf3 0x0f 0xae 11b/0. */
6729FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6730
6731/** Opcode 0xf3 0x0f 0xae 11b/1. */
6732FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6733
6734/** Opcode 0xf3 0x0f 0xae 11b/2. */
6735FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6736
6737/** Opcode 0xf3 0x0f 0xae 11b/3. */
6738FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6739
6740
6741/**
6742 * Group 15 jump table for register variant.
6743 */
6744IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6745{ /* pfx: none, 066h, 0f3h, 0f2h */
6746 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6747 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6748 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6749 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6750 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6751 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6752 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6753 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6754};
6755AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6756
6757
6758/**
6759 * Group 15 jump table for memory variant.
6760 */
6761IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6762{ /* pfx: none, 066h, 0f3h, 0f2h */
6763 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6764 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6765 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6766 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6767 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6768 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6769 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6770 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6771};
6772AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6773
6774
6775/** Opcode 0x0f 0xae. */
6776FNIEMOP_DEF(iemOp_Grp15)
6777{
6778 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6779 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6780 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6781 /* register, register */
6782 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6783 + pVCpu->iem.s.idxPrefix], bRm);
6784 /* memory, register */
6785 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6786 + pVCpu->iem.s.idxPrefix], bRm);
6787}
6788
6789
6790/** Opcode 0x0f 0xaf. */
6791FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6792{
6793 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6794 IEMOP_HLP_MIN_386();
6795 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6796 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6797}
6798
6799
6800/** Opcode 0x0f 0xb0. */
6801FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6802{
6803 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6804 IEMOP_HLP_MIN_486();
6805 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6806
6807 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6808 {
6809 IEMOP_HLP_DONE_DECODING();
6810 IEM_MC_BEGIN(4, 0);
6811 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6812 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6813 IEM_MC_ARG(uint8_t, u8Src, 2);
6814 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6815
6816 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6817 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6818 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6819 IEM_MC_REF_EFLAGS(pEFlags);
6820 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6821 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6822 else
6823 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6824
6825 IEM_MC_ADVANCE_RIP();
6826 IEM_MC_END();
6827 }
6828 else
6829 {
6830 IEM_MC_BEGIN(4, 3);
6831 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6832 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6833 IEM_MC_ARG(uint8_t, u8Src, 2);
6834 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6836 IEM_MC_LOCAL(uint8_t, u8Al);
6837
6838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6839 IEMOP_HLP_DONE_DECODING();
6840 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6841 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6842 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6843 IEM_MC_FETCH_EFLAGS(EFlags);
6844 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6845 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6846 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6847 else
6848 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6849
6850 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6851 IEM_MC_COMMIT_EFLAGS(EFlags);
6852 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6853 IEM_MC_ADVANCE_RIP();
6854 IEM_MC_END();
6855 }
6856 return VINF_SUCCESS;
6857}
6858
6859/** Opcode 0x0f 0xb1. */
6860FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6861{
6862 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6863 IEMOP_HLP_MIN_486();
6864 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6865
6866 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6867 {
6868 IEMOP_HLP_DONE_DECODING();
6869 switch (pVCpu->iem.s.enmEffOpSize)
6870 {
6871 case IEMMODE_16BIT:
6872 IEM_MC_BEGIN(4, 0);
6873 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6874 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6875 IEM_MC_ARG(uint16_t, u16Src, 2);
6876 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6877
6878 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6879 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6880 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6881 IEM_MC_REF_EFLAGS(pEFlags);
6882 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6883 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6884 else
6885 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6886
6887 IEM_MC_ADVANCE_RIP();
6888 IEM_MC_END();
6889 return VINF_SUCCESS;
6890
6891 case IEMMODE_32BIT:
6892 IEM_MC_BEGIN(4, 0);
6893 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6894 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6895 IEM_MC_ARG(uint32_t, u32Src, 2);
6896 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6897
6898 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6899 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6900 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6901 IEM_MC_REF_EFLAGS(pEFlags);
6902 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6903 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6904 else
6905 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6906
6907 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6908 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6909 IEM_MC_ADVANCE_RIP();
6910 IEM_MC_END();
6911 return VINF_SUCCESS;
6912
6913 case IEMMODE_64BIT:
6914 IEM_MC_BEGIN(4, 0);
6915 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6916 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6917#ifdef RT_ARCH_X86
6918 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6919#else
6920 IEM_MC_ARG(uint64_t, u64Src, 2);
6921#endif
6922 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6923
6924 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6925 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6926 IEM_MC_REF_EFLAGS(pEFlags);
6927#ifdef RT_ARCH_X86
6928 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6929 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6930 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6931 else
6932 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6933#else
6934 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6935 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6936 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6937 else
6938 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6939#endif
6940
6941 IEM_MC_ADVANCE_RIP();
6942 IEM_MC_END();
6943 return VINF_SUCCESS;
6944
6945 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6946 }
6947 }
6948 else
6949 {
6950 switch (pVCpu->iem.s.enmEffOpSize)
6951 {
6952 case IEMMODE_16BIT:
6953 IEM_MC_BEGIN(4, 3);
6954 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6955 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6956 IEM_MC_ARG(uint16_t, u16Src, 2);
6957 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6959 IEM_MC_LOCAL(uint16_t, u16Ax);
6960
6961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6962 IEMOP_HLP_DONE_DECODING();
6963 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6964 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6965 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6966 IEM_MC_FETCH_EFLAGS(EFlags);
6967 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6968 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6969 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6970 else
6971 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6972
6973 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6974 IEM_MC_COMMIT_EFLAGS(EFlags);
6975 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6976 IEM_MC_ADVANCE_RIP();
6977 IEM_MC_END();
6978 return VINF_SUCCESS;
6979
6980 case IEMMODE_32BIT:
6981 IEM_MC_BEGIN(4, 3);
6982 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6983 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6984 IEM_MC_ARG(uint32_t, u32Src, 2);
6985 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6987 IEM_MC_LOCAL(uint32_t, u32Eax);
6988
6989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6990 IEMOP_HLP_DONE_DECODING();
6991 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6992 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6993 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6994 IEM_MC_FETCH_EFLAGS(EFlags);
6995 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6996 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6997 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6998 else
6999 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7000
7001 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7002 IEM_MC_COMMIT_EFLAGS(EFlags);
7003 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7004 IEM_MC_ADVANCE_RIP();
7005 IEM_MC_END();
7006 return VINF_SUCCESS;
7007
7008 case IEMMODE_64BIT:
7009 IEM_MC_BEGIN(4, 3);
7010 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7011 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7012#ifdef RT_ARCH_X86
7013 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7014#else
7015 IEM_MC_ARG(uint64_t, u64Src, 2);
7016#endif
7017 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7019 IEM_MC_LOCAL(uint64_t, u64Rax);
7020
7021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7022 IEMOP_HLP_DONE_DECODING();
7023 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7024 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7025 IEM_MC_FETCH_EFLAGS(EFlags);
7026 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7027#ifdef RT_ARCH_X86
7028 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7029 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7030 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7031 else
7032 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7033#else
7034 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7035 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7036 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7037 else
7038 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7039#endif
7040
7041 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7042 IEM_MC_COMMIT_EFLAGS(EFlags);
7043 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7044 IEM_MC_ADVANCE_RIP();
7045 IEM_MC_END();
7046 return VINF_SUCCESS;
7047
7048 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7049 }
7050 }
7051}
7052
7053
7054FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7055{
7056 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7057 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7058
7059 switch (pVCpu->iem.s.enmEffOpSize)
7060 {
7061 case IEMMODE_16BIT:
7062 IEM_MC_BEGIN(5, 1);
7063 IEM_MC_ARG(uint16_t, uSel, 0);
7064 IEM_MC_ARG(uint16_t, offSeg, 1);
7065 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7066 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7067 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7068 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7071 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7072 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7073 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7074 IEM_MC_END();
7075 return VINF_SUCCESS;
7076
7077 case IEMMODE_32BIT:
7078 IEM_MC_BEGIN(5, 1);
7079 IEM_MC_ARG(uint16_t, uSel, 0);
7080 IEM_MC_ARG(uint32_t, offSeg, 1);
7081 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7082 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7083 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7084 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7087 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7088 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7089 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7090 IEM_MC_END();
7091 return VINF_SUCCESS;
7092
7093 case IEMMODE_64BIT:
7094 IEM_MC_BEGIN(5, 1);
7095 IEM_MC_ARG(uint16_t, uSel, 0);
7096 IEM_MC_ARG(uint64_t, offSeg, 1);
7097 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7098 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7099 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7100 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7103 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7104 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7105 else
7106 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7107 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7108 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7109 IEM_MC_END();
7110 return VINF_SUCCESS;
7111
7112 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7113 }
7114}
7115
7116
7117/** Opcode 0x0f 0xb2. */
7118FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7119{
7120 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7121 IEMOP_HLP_MIN_386();
7122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7123 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7124 return IEMOP_RAISE_INVALID_OPCODE();
7125 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7126}
7127
7128
7129/** Opcode 0x0f 0xb3. */
7130FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7131{
7132 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7133 IEMOP_HLP_MIN_386();
7134 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7135}
7136
7137
7138/** Opcode 0x0f 0xb4. */
7139FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7140{
7141 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7142 IEMOP_HLP_MIN_386();
7143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7144 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7145 return IEMOP_RAISE_INVALID_OPCODE();
7146 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7147}
7148
7149
7150/** Opcode 0x0f 0xb5. */
7151FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7152{
7153 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7154 IEMOP_HLP_MIN_386();
7155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7156 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7157 return IEMOP_RAISE_INVALID_OPCODE();
7158 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7159}
7160
7161
7162/** Opcode 0x0f 0xb6. */
7163FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7164{
7165 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7166 IEMOP_HLP_MIN_386();
7167
7168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7169
7170 /*
7171 * If rm is denoting a register, no more instruction bytes.
7172 */
7173 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7174 {
7175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7176 switch (pVCpu->iem.s.enmEffOpSize)
7177 {
7178 case IEMMODE_16BIT:
7179 IEM_MC_BEGIN(0, 1);
7180 IEM_MC_LOCAL(uint16_t, u16Value);
7181 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7182 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7183 IEM_MC_ADVANCE_RIP();
7184 IEM_MC_END();
7185 return VINF_SUCCESS;
7186
7187 case IEMMODE_32BIT:
7188 IEM_MC_BEGIN(0, 1);
7189 IEM_MC_LOCAL(uint32_t, u32Value);
7190 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7191 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7192 IEM_MC_ADVANCE_RIP();
7193 IEM_MC_END();
7194 return VINF_SUCCESS;
7195
7196 case IEMMODE_64BIT:
7197 IEM_MC_BEGIN(0, 1);
7198 IEM_MC_LOCAL(uint64_t, u64Value);
7199 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7200 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7201 IEM_MC_ADVANCE_RIP();
7202 IEM_MC_END();
7203 return VINF_SUCCESS;
7204
7205 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7206 }
7207 }
7208 else
7209 {
7210 /*
7211 * We're loading a register from memory.
7212 */
7213 switch (pVCpu->iem.s.enmEffOpSize)
7214 {
7215 case IEMMODE_16BIT:
7216 IEM_MC_BEGIN(0, 2);
7217 IEM_MC_LOCAL(uint16_t, u16Value);
7218 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7221 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7222 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7223 IEM_MC_ADVANCE_RIP();
7224 IEM_MC_END();
7225 return VINF_SUCCESS;
7226
7227 case IEMMODE_32BIT:
7228 IEM_MC_BEGIN(0, 2);
7229 IEM_MC_LOCAL(uint32_t, u32Value);
7230 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7233 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7234 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7235 IEM_MC_ADVANCE_RIP();
7236 IEM_MC_END();
7237 return VINF_SUCCESS;
7238
7239 case IEMMODE_64BIT:
7240 IEM_MC_BEGIN(0, 2);
7241 IEM_MC_LOCAL(uint64_t, u64Value);
7242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7245 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7246 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7247 IEM_MC_ADVANCE_RIP();
7248 IEM_MC_END();
7249 return VINF_SUCCESS;
7250
7251 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7252 }
7253 }
7254}
7255
7256
7257/** Opcode 0x0f 0xb7. */
7258FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7259{
7260 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7261 IEMOP_HLP_MIN_386();
7262
7263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7264
7265 /** @todo Not entirely sure how the operand size prefix is handled here,
7266 * assuming that it will be ignored. Would be nice to have a few
7267 * test for this. */
7268 /*
7269 * If rm is denoting a register, no more instruction bytes.
7270 */
7271 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7272 {
7273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7274 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7275 {
7276 IEM_MC_BEGIN(0, 1);
7277 IEM_MC_LOCAL(uint32_t, u32Value);
7278 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7279 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7280 IEM_MC_ADVANCE_RIP();
7281 IEM_MC_END();
7282 }
7283 else
7284 {
7285 IEM_MC_BEGIN(0, 1);
7286 IEM_MC_LOCAL(uint64_t, u64Value);
7287 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7288 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7289 IEM_MC_ADVANCE_RIP();
7290 IEM_MC_END();
7291 }
7292 }
7293 else
7294 {
7295 /*
7296 * We're loading a register from memory.
7297 */
7298 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7299 {
7300 IEM_MC_BEGIN(0, 2);
7301 IEM_MC_LOCAL(uint32_t, u32Value);
7302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7305 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7306 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7307 IEM_MC_ADVANCE_RIP();
7308 IEM_MC_END();
7309 }
7310 else
7311 {
7312 IEM_MC_BEGIN(0, 2);
7313 IEM_MC_LOCAL(uint64_t, u64Value);
7314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7317 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7318 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7319 IEM_MC_ADVANCE_RIP();
7320 IEM_MC_END();
7321 }
7322 }
7323 return VINF_SUCCESS;
7324}
7325
7326
7327/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7328FNIEMOP_UD_STUB(iemOp_jmpe);
7329/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7330FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7331
7332
7333/**
7334 * @opcode 0xb9
7335 * @opinvalid intel-modrm
7336 * @optest ->
7337 */
7338FNIEMOP_DEF(iemOp_Grp10)
7339{
7340 /*
7341 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7342 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7343 */
7344 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7345 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZE); /* just picked Gb,Eb here. */
7346 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7347}
7348
7349
7350/** Opcode 0x0f 0xba. */
7351FNIEMOP_DEF(iemOp_Grp8)
7352{
7353 IEMOP_HLP_MIN_386();
7354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7355 PCIEMOPBINSIZES pImpl;
7356 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7357 {
7358 case 0: case 1: case 2: case 3:
7359 /* Both AMD and Intel want full modr/m decoding and imm8. */
7360 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7361 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7362 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7363 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7364 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7366 }
7367 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7368
7369 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7370 {
7371 /* register destination. */
7372 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7374
7375 switch (pVCpu->iem.s.enmEffOpSize)
7376 {
7377 case IEMMODE_16BIT:
7378 IEM_MC_BEGIN(3, 0);
7379 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7380 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7382
7383 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7384 IEM_MC_REF_EFLAGS(pEFlags);
7385 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7386
7387 IEM_MC_ADVANCE_RIP();
7388 IEM_MC_END();
7389 return VINF_SUCCESS;
7390
7391 case IEMMODE_32BIT:
7392 IEM_MC_BEGIN(3, 0);
7393 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7394 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7395 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7396
7397 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7398 IEM_MC_REF_EFLAGS(pEFlags);
7399 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7400
7401 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7402 IEM_MC_ADVANCE_RIP();
7403 IEM_MC_END();
7404 return VINF_SUCCESS;
7405
7406 case IEMMODE_64BIT:
7407 IEM_MC_BEGIN(3, 0);
7408 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7409 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7410 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7411
7412 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7413 IEM_MC_REF_EFLAGS(pEFlags);
7414 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7415
7416 IEM_MC_ADVANCE_RIP();
7417 IEM_MC_END();
7418 return VINF_SUCCESS;
7419
7420 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7421 }
7422 }
7423 else
7424 {
7425 /* memory destination. */
7426
7427 uint32_t fAccess;
7428 if (pImpl->pfnLockedU16)
7429 fAccess = IEM_ACCESS_DATA_RW;
7430 else /* BT */
7431 fAccess = IEM_ACCESS_DATA_R;
7432
7433 /** @todo test negative bit offsets! */
7434 switch (pVCpu->iem.s.enmEffOpSize)
7435 {
7436 case IEMMODE_16BIT:
7437 IEM_MC_BEGIN(3, 1);
7438 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7439 IEM_MC_ARG(uint16_t, u16Src, 1);
7440 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7442
7443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7444 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7445 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7446 if (pImpl->pfnLockedU16)
7447 IEMOP_HLP_DONE_DECODING();
7448 else
7449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7450 IEM_MC_FETCH_EFLAGS(EFlags);
7451 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7452 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7453 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7454 else
7455 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7456 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7457
7458 IEM_MC_COMMIT_EFLAGS(EFlags);
7459 IEM_MC_ADVANCE_RIP();
7460 IEM_MC_END();
7461 return VINF_SUCCESS;
7462
7463 case IEMMODE_32BIT:
7464 IEM_MC_BEGIN(3, 1);
7465 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7466 IEM_MC_ARG(uint32_t, u32Src, 1);
7467 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7469
7470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7471 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7472 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7473 if (pImpl->pfnLockedU16)
7474 IEMOP_HLP_DONE_DECODING();
7475 else
7476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7477 IEM_MC_FETCH_EFLAGS(EFlags);
7478 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7479 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7480 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7481 else
7482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7483 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7484
7485 IEM_MC_COMMIT_EFLAGS(EFlags);
7486 IEM_MC_ADVANCE_RIP();
7487 IEM_MC_END();
7488 return VINF_SUCCESS;
7489
7490 case IEMMODE_64BIT:
7491 IEM_MC_BEGIN(3, 1);
7492 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7493 IEM_MC_ARG(uint64_t, u64Src, 1);
7494 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7496
7497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7498 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7499 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7500 if (pImpl->pfnLockedU16)
7501 IEMOP_HLP_DONE_DECODING();
7502 else
7503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7504 IEM_MC_FETCH_EFLAGS(EFlags);
7505 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7506 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7507 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7508 else
7509 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7510 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7511
7512 IEM_MC_COMMIT_EFLAGS(EFlags);
7513 IEM_MC_ADVANCE_RIP();
7514 IEM_MC_END();
7515 return VINF_SUCCESS;
7516
7517 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7518 }
7519 }
7520}
7521
7522
7523/** Opcode 0x0f 0xbb. */
7524FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7525{
7526 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7527 IEMOP_HLP_MIN_386();
7528 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7529}
7530
7531
7532/** Opcode 0x0f 0xbc. */
7533FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7534{
7535 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7536 IEMOP_HLP_MIN_386();
7537 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7538 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7539}
7540
7541
7542/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7543FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7544
7545
7546/** Opcode 0x0f 0xbd. */
7547FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7548{
7549 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7550 IEMOP_HLP_MIN_386();
7551 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7552 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7553}
7554
7555
7556/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7557FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7558
7559
7560/** Opcode 0x0f 0xbe. */
7561FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7562{
7563 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7564 IEMOP_HLP_MIN_386();
7565
7566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7567
7568 /*
7569 * If rm is denoting a register, no more instruction bytes.
7570 */
7571 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7572 {
7573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7574 switch (pVCpu->iem.s.enmEffOpSize)
7575 {
7576 case IEMMODE_16BIT:
7577 IEM_MC_BEGIN(0, 1);
7578 IEM_MC_LOCAL(uint16_t, u16Value);
7579 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7580 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7581 IEM_MC_ADVANCE_RIP();
7582 IEM_MC_END();
7583 return VINF_SUCCESS;
7584
7585 case IEMMODE_32BIT:
7586 IEM_MC_BEGIN(0, 1);
7587 IEM_MC_LOCAL(uint32_t, u32Value);
7588 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7589 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7590 IEM_MC_ADVANCE_RIP();
7591 IEM_MC_END();
7592 return VINF_SUCCESS;
7593
7594 case IEMMODE_64BIT:
7595 IEM_MC_BEGIN(0, 1);
7596 IEM_MC_LOCAL(uint64_t, u64Value);
7597 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7598 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7599 IEM_MC_ADVANCE_RIP();
7600 IEM_MC_END();
7601 return VINF_SUCCESS;
7602
7603 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7604 }
7605 }
7606 else
7607 {
7608 /*
7609 * We're loading a register from memory.
7610 */
7611 switch (pVCpu->iem.s.enmEffOpSize)
7612 {
7613 case IEMMODE_16BIT:
7614 IEM_MC_BEGIN(0, 2);
7615 IEM_MC_LOCAL(uint16_t, u16Value);
7616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7619 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7620 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7621 IEM_MC_ADVANCE_RIP();
7622 IEM_MC_END();
7623 return VINF_SUCCESS;
7624
7625 case IEMMODE_32BIT:
7626 IEM_MC_BEGIN(0, 2);
7627 IEM_MC_LOCAL(uint32_t, u32Value);
7628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7631 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7632 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7633 IEM_MC_ADVANCE_RIP();
7634 IEM_MC_END();
7635 return VINF_SUCCESS;
7636
7637 case IEMMODE_64BIT:
7638 IEM_MC_BEGIN(0, 2);
7639 IEM_MC_LOCAL(uint64_t, u64Value);
7640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7643 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7644 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7645 IEM_MC_ADVANCE_RIP();
7646 IEM_MC_END();
7647 return VINF_SUCCESS;
7648
7649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7650 }
7651 }
7652}
7653
7654
7655/** Opcode 0x0f 0xbf. */
7656FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7657{
7658 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7659 IEMOP_HLP_MIN_386();
7660
7661 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7662
7663 /** @todo Not entirely sure how the operand size prefix is handled here,
7664 * assuming that it will be ignored. Would be nice to have a few
7665 * test for this. */
7666 /*
7667 * If rm is denoting a register, no more instruction bytes.
7668 */
7669 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7670 {
7671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7672 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7673 {
7674 IEM_MC_BEGIN(0, 1);
7675 IEM_MC_LOCAL(uint32_t, u32Value);
7676 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7677 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7678 IEM_MC_ADVANCE_RIP();
7679 IEM_MC_END();
7680 }
7681 else
7682 {
7683 IEM_MC_BEGIN(0, 1);
7684 IEM_MC_LOCAL(uint64_t, u64Value);
7685 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7686 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7687 IEM_MC_ADVANCE_RIP();
7688 IEM_MC_END();
7689 }
7690 }
7691 else
7692 {
7693 /*
7694 * We're loading a register from memory.
7695 */
7696 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7697 {
7698 IEM_MC_BEGIN(0, 2);
7699 IEM_MC_LOCAL(uint32_t, u32Value);
7700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7703 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7704 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7705 IEM_MC_ADVANCE_RIP();
7706 IEM_MC_END();
7707 }
7708 else
7709 {
7710 IEM_MC_BEGIN(0, 2);
7711 IEM_MC_LOCAL(uint64_t, u64Value);
7712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7715 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7716 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7717 IEM_MC_ADVANCE_RIP();
7718 IEM_MC_END();
7719 }
7720 }
7721 return VINF_SUCCESS;
7722}
7723
7724
7725/** Opcode 0x0f 0xc0. */
7726FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7727{
7728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7729 IEMOP_HLP_MIN_486();
7730 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7731
7732 /*
7733 * If rm is denoting a register, no more instruction bytes.
7734 */
7735 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7736 {
7737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7738
7739 IEM_MC_BEGIN(3, 0);
7740 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7741 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7742 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7743
7744 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7745 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7746 IEM_MC_REF_EFLAGS(pEFlags);
7747 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7748
7749 IEM_MC_ADVANCE_RIP();
7750 IEM_MC_END();
7751 }
7752 else
7753 {
7754 /*
7755 * We're accessing memory.
7756 */
7757 IEM_MC_BEGIN(3, 3);
7758 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7759 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7760 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7761 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7763
7764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7765 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7766 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7767 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7768 IEM_MC_FETCH_EFLAGS(EFlags);
7769 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7770 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7771 else
7772 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7773
7774 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7775 IEM_MC_COMMIT_EFLAGS(EFlags);
7776 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7777 IEM_MC_ADVANCE_RIP();
7778 IEM_MC_END();
7779 return VINF_SUCCESS;
7780 }
7781 return VINF_SUCCESS;
7782}
7783
7784
7785/** Opcode 0x0f 0xc1. */
7786FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7787{
7788 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7789 IEMOP_HLP_MIN_486();
7790 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7791
7792 /*
7793 * If rm is denoting a register, no more instruction bytes.
7794 */
7795 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7796 {
7797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7798
7799 switch (pVCpu->iem.s.enmEffOpSize)
7800 {
7801 case IEMMODE_16BIT:
7802 IEM_MC_BEGIN(3, 0);
7803 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7804 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7805 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7806
7807 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7808 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7809 IEM_MC_REF_EFLAGS(pEFlags);
7810 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7811
7812 IEM_MC_ADVANCE_RIP();
7813 IEM_MC_END();
7814 return VINF_SUCCESS;
7815
7816 case IEMMODE_32BIT:
7817 IEM_MC_BEGIN(3, 0);
7818 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7819 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7820 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7821
7822 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7823 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7824 IEM_MC_REF_EFLAGS(pEFlags);
7825 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7826
7827 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7828 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7829 IEM_MC_ADVANCE_RIP();
7830 IEM_MC_END();
7831 return VINF_SUCCESS;
7832
7833 case IEMMODE_64BIT:
7834 IEM_MC_BEGIN(3, 0);
7835 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7836 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7837 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7838
7839 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7840 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7841 IEM_MC_REF_EFLAGS(pEFlags);
7842 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7843
7844 IEM_MC_ADVANCE_RIP();
7845 IEM_MC_END();
7846 return VINF_SUCCESS;
7847
7848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7849 }
7850 }
7851 else
7852 {
7853 /*
7854 * We're accessing memory.
7855 */
7856 switch (pVCpu->iem.s.enmEffOpSize)
7857 {
7858 case IEMMODE_16BIT:
7859 IEM_MC_BEGIN(3, 3);
7860 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7861 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7862 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7863 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7865
7866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7867 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7868 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7869 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7870 IEM_MC_FETCH_EFLAGS(EFlags);
7871 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7872 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7873 else
7874 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7875
7876 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7877 IEM_MC_COMMIT_EFLAGS(EFlags);
7878 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7879 IEM_MC_ADVANCE_RIP();
7880 IEM_MC_END();
7881 return VINF_SUCCESS;
7882
7883 case IEMMODE_32BIT:
7884 IEM_MC_BEGIN(3, 3);
7885 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7886 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7887 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7888 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7890
7891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7892 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7893 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7894 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7895 IEM_MC_FETCH_EFLAGS(EFlags);
7896 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7897 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7898 else
7899 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7900
7901 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7902 IEM_MC_COMMIT_EFLAGS(EFlags);
7903 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7904 IEM_MC_ADVANCE_RIP();
7905 IEM_MC_END();
7906 return VINF_SUCCESS;
7907
7908 case IEMMODE_64BIT:
7909 IEM_MC_BEGIN(3, 3);
7910 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7911 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7912 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7913 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7915
7916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7917 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7918 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7919 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7920 IEM_MC_FETCH_EFLAGS(EFlags);
7921 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7922 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7923 else
7924 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7925
7926 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7927 IEM_MC_COMMIT_EFLAGS(EFlags);
7928 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7929 IEM_MC_ADVANCE_RIP();
7930 IEM_MC_END();
7931 return VINF_SUCCESS;
7932
7933 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7934 }
7935 }
7936}
7937
7938
7939/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
7940FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
7941/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
7942FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
7943/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
7944FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
7945/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
7946FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
7947
7948
7949/** Opcode 0x0f 0xc3. */
7950FNIEMOP_DEF(iemOp_movnti_My_Gy)
7951{
7952 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7953
7954 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7955
7956 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7957 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7958 {
7959 switch (pVCpu->iem.s.enmEffOpSize)
7960 {
7961 case IEMMODE_32BIT:
7962 IEM_MC_BEGIN(0, 2);
7963 IEM_MC_LOCAL(uint32_t, u32Value);
7964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7965
7966 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7968 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7969 return IEMOP_RAISE_INVALID_OPCODE();
7970
7971 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7972 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7973 IEM_MC_ADVANCE_RIP();
7974 IEM_MC_END();
7975 break;
7976
7977 case IEMMODE_64BIT:
7978 IEM_MC_BEGIN(0, 2);
7979 IEM_MC_LOCAL(uint64_t, u64Value);
7980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7981
7982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7984 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7985 return IEMOP_RAISE_INVALID_OPCODE();
7986
7987 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7988 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7989 IEM_MC_ADVANCE_RIP();
7990 IEM_MC_END();
7991 break;
7992
7993 case IEMMODE_16BIT:
7994 /** @todo check this form. */
7995 return IEMOP_RAISE_INVALID_OPCODE();
7996 }
7997 }
7998 else
7999 return IEMOP_RAISE_INVALID_OPCODE();
8000 return VINF_SUCCESS;
8001}
8002/* Opcode 0x66 0x0f 0xc3 - invalid */
8003/* Opcode 0xf3 0x0f 0xc3 - invalid */
8004/* Opcode 0xf2 0x0f 0xc3 - invalid */
8005
8006/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8007FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8008/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8009FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8010/* Opcode 0xf3 0x0f 0xc4 - invalid */
8011/* Opcode 0xf2 0x0f 0xc4 - invalid */
8012
8013/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8014FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8015/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8016FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8017/* Opcode 0xf3 0x0f 0xc5 - invalid */
8018/* Opcode 0xf2 0x0f 0xc5 - invalid */
8019
8020/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8021FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8022/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8023FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8024/* Opcode 0xf3 0x0f 0xc6 - invalid */
8025/* Opcode 0xf2 0x0f 0xc6 - invalid */
8026
8027
8028/** Opcode 0x0f 0xc7 !11/1. */
8029FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8030{
8031 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8032
8033 IEM_MC_BEGIN(4, 3);
8034 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8035 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8036 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8037 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8038 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8039 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8041
8042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8043 IEMOP_HLP_DONE_DECODING();
8044 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8045
8046 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8047 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8048 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8049
8050 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8051 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8052 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8053
8054 IEM_MC_FETCH_EFLAGS(EFlags);
8055 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8056 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8057 else
8058 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8059
8060 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8061 IEM_MC_COMMIT_EFLAGS(EFlags);
8062 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8063 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8064 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8065 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8066 IEM_MC_ENDIF();
8067 IEM_MC_ADVANCE_RIP();
8068
8069 IEM_MC_END();
8070 return VINF_SUCCESS;
8071}
8072
8073
8074/** Opcode REX.W 0x0f 0xc7 !11/1. */
8075FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8076{
8077 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8078 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8079 {
8080#if 0
8081 RT_NOREF(bRm);
8082 IEMOP_BITCH_ABOUT_STUB();
8083 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8084#else
8085 IEM_MC_BEGIN(4, 3);
8086 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8087 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8088 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8089 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8090 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8091 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8093
8094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8095 IEMOP_HLP_DONE_DECODING();
8096 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8097 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8098
8099 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8100 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8101 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8102
8103 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8104 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8105 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8106
8107 IEM_MC_FETCH_EFLAGS(EFlags);
8108# ifdef RT_ARCH_AMD64
8109 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8110 {
8111 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8112 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8113 else
8114 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8115 }
8116 else
8117# endif
8118 {
8119 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8120 accesses and not all all atomic, which works fine on in UNI CPU guest
8121 configuration (ignoring DMA). If guest SMP is active we have no choice
8122 but to use a rendezvous callback here. Sigh. */
8123 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8124 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8125 else
8126 {
8127 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8128 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8129 }
8130 }
8131
8132 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8133 IEM_MC_COMMIT_EFLAGS(EFlags);
8134 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8135 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8136 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8137 IEM_MC_ENDIF();
8138 IEM_MC_ADVANCE_RIP();
8139
8140 IEM_MC_END();
8141 return VINF_SUCCESS;
8142#endif
8143 }
8144 Log(("cmpxchg16b -> #UD\n"));
8145 return IEMOP_RAISE_INVALID_OPCODE();
8146}
8147
8148FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8149{
8150 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8151 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8152 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8153}
8154
8155/** Opcode 0x0f 0xc7 11/6. */
8156FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8157
8158/** Opcode 0x0f 0xc7 !11/6. */
8159FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8160
8161/** Opcode 0x66 0x0f 0xc7 !11/6. */
8162FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8163
8164/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8165FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8166
8167/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8168FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8169
8170/** Opcode 0x0f 0xc7 11/7. */
8171FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8172
8173
8174/**
8175 * Group 9 jump table for register variant.
8176 */
8177IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8178{ /* pfx: none, 066h, 0f3h, 0f2h */
8179 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8180 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8181 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8182 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8183 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8184 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8185 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8186 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8187};
8188AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8189
8190
8191/**
8192 * Group 9 jump table for memory variant.
8193 */
8194IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8195{ /* pfx: none, 066h, 0f3h, 0f2h */
8196 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8197 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8198 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8199 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8200 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8201 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8202 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8203 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8204};
8205AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8206
8207
8208/** Opcode 0x0f 0xc7. */
8209FNIEMOP_DEF(iemOp_Grp9)
8210{
8211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8212 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8213 /* register, register */
8214 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8215 + pVCpu->iem.s.idxPrefix], bRm);
8216 /* memory, register */
8217 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8218 + pVCpu->iem.s.idxPrefix], bRm);
8219}
8220
8221
8222/**
8223 * Common 'bswap register' helper.
8224 */
8225FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8226{
8227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8228 switch (pVCpu->iem.s.enmEffOpSize)
8229 {
8230 case IEMMODE_16BIT:
8231 IEM_MC_BEGIN(1, 0);
8232 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8233 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8234 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8235 IEM_MC_ADVANCE_RIP();
8236 IEM_MC_END();
8237 return VINF_SUCCESS;
8238
8239 case IEMMODE_32BIT:
8240 IEM_MC_BEGIN(1, 0);
8241 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8242 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8243 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8244 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8245 IEM_MC_ADVANCE_RIP();
8246 IEM_MC_END();
8247 return VINF_SUCCESS;
8248
8249 case IEMMODE_64BIT:
8250 IEM_MC_BEGIN(1, 0);
8251 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8252 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8253 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8254 IEM_MC_ADVANCE_RIP();
8255 IEM_MC_END();
8256 return VINF_SUCCESS;
8257
8258 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8259 }
8260}
8261
8262
8263/** Opcode 0x0f 0xc8. */
8264FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8265{
8266 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8267 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8268 prefix. REX.B is the correct prefix it appears. For a parallel
8269 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8270 IEMOP_HLP_MIN_486();
8271 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8272}
8273
8274
8275/** Opcode 0x0f 0xc9. */
8276FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8277{
8278 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8279 IEMOP_HLP_MIN_486();
8280 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8281}
8282
8283
8284/** Opcode 0x0f 0xca. */
8285FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8286{
8287 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8288 IEMOP_HLP_MIN_486();
8289 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8290}
8291
8292
8293/** Opcode 0x0f 0xcb. */
8294FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8295{
8296 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8297 IEMOP_HLP_MIN_486();
8298 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8299}
8300
8301
8302/** Opcode 0x0f 0xcc. */
8303FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8304{
8305 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8306 IEMOP_HLP_MIN_486();
8307 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8308}
8309
8310
8311/** Opcode 0x0f 0xcd. */
8312FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8313{
8314 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8315 IEMOP_HLP_MIN_486();
8316 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8317}
8318
8319
8320/** Opcode 0x0f 0xce. */
8321FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8322{
8323 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8324 IEMOP_HLP_MIN_486();
8325 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8326}
8327
8328
8329/** Opcode 0x0f 0xcf. */
8330FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8331{
8332 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8333 IEMOP_HLP_MIN_486();
8334 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8335}
8336
8337
8338/* Opcode 0x0f 0xd0 - invalid */
8339/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8340FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8341/* Opcode 0xf3 0x0f 0xd0 - invalid */
8342/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8343FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8344
8345/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8346FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8347/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8348FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8349/* Opcode 0xf3 0x0f 0xd1 - invalid */
8350/* Opcode 0xf2 0x0f 0xd1 - invalid */
8351
8352/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8353FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8354/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8355FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8356/* Opcode 0xf3 0x0f 0xd2 - invalid */
8357/* Opcode 0xf2 0x0f 0xd2 - invalid */
8358
8359/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8360FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8361/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8362FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8363/* Opcode 0xf3 0x0f 0xd3 - invalid */
8364/* Opcode 0xf2 0x0f 0xd3 - invalid */
8365
8366/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8367FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8368/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8369FNIEMOP_STUB(iemOp_paddq_Vx_W);
8370/* Opcode 0xf3 0x0f 0xd4 - invalid */
8371/* Opcode 0xf2 0x0f 0xd4 - invalid */
8372
8373/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8374FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8375/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8376FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8377/* Opcode 0xf3 0x0f 0xd5 - invalid */
8378/* Opcode 0xf2 0x0f 0xd5 - invalid */
8379
8380/* Opcode 0x0f 0xd6 - invalid */
8381
8382/**
8383 * @opcode 0xd6
8384 * @oppfx 0x66
8385 * @opcpuid sse2
8386 * @opgroup og_sse2_pcksclr_datamove
8387 * @opxcpttype none
8388 * @optest op1=-1 op2=2 -> op1=2
8389 * @optest op1=0 op2=-42 -> op1=-42
8390 */
8391FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8392{
8393 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
8394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8395 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8396 {
8397 /*
8398 * Register, register.
8399 */
8400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8401 IEM_MC_BEGIN(0, 2);
8402 IEM_MC_LOCAL(uint64_t, uSrc);
8403
8404 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8405 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8406
8407 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8408 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8409
8410 IEM_MC_ADVANCE_RIP();
8411 IEM_MC_END();
8412 }
8413 else
8414 {
8415 /*
8416 * Memory, register.
8417 */
8418 IEM_MC_BEGIN(0, 2);
8419 IEM_MC_LOCAL(uint64_t, uSrc);
8420 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8421
8422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8424 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8425 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8426
8427 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8428 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8429
8430 IEM_MC_ADVANCE_RIP();
8431 IEM_MC_END();
8432 }
8433 return VINF_SUCCESS;
8434}
8435
8436
8437/**
8438 * @opcode 0xd6
8439 * @opcodesub 11 mr/reg
8440 * @oppfx f3
8441 * @opcpuid sse2
8442 * @opgroup og_sse2_simdint_datamove
8443 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8444 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8445 */
8446FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8447{
8448 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8449 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8450 {
8451 /*
8452 * Register, register.
8453 */
8454 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
8455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8456 IEM_MC_BEGIN(0, 1);
8457 IEM_MC_LOCAL(uint64_t, uSrc);
8458
8459 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8460 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8461
8462 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8463 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8464 IEM_MC_FPU_TO_MMX_MODE();
8465
8466 IEM_MC_ADVANCE_RIP();
8467 IEM_MC_END();
8468 return VINF_SUCCESS;
8469 }
8470
8471 /**
8472 * @opdone
8473 * @opmnemonic udf30fd6mem
8474 * @opcode 0xd6
8475 * @opcodesub !11 mr/reg
8476 * @oppfx f3
8477 * @opunused intel-modrm
8478 * @opcpuid sse
8479 * @optest ->
8480 */
8481 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8482}
8483
8484
8485/**
8486 * @opcode 0xd6
8487 * @opcodesub 11 mr/reg
8488 * @oppfx f2
8489 * @opcpuid sse2
8490 * @opgroup og_sse2_simdint_datamove
8491 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8492 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8493 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
8494 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
8495 * @optest op1=-42 op2=0xfedcba9876543210
8496 * -> op1=0xfedcba9876543210 ftw=0xff
8497 */
8498FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
8499{
8500 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8501 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8502 {
8503 /*
8504 * Register, register.
8505 */
8506 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
8507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8508 IEM_MC_BEGIN(0, 1);
8509 IEM_MC_LOCAL(uint64_t, uSrc);
8510
8511 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8512 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8513
8514 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8515 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
8516 IEM_MC_FPU_TO_MMX_MODE();
8517
8518 IEM_MC_ADVANCE_RIP();
8519 IEM_MC_END();
8520 return VINF_SUCCESS;
8521 }
8522
8523 /**
8524 * @opdone
8525 * @opmnemonic udf20fd6mem
8526 * @opcode 0xd6
8527 * @opcodesub !11 mr/reg
8528 * @oppfx f2
8529 * @opunused intel-modrm
8530 * @opcpuid sse
8531 * @optest ->
8532 */
8533 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8534}
8535
8536/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8537FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8538{
8539 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8540 /** @todo testcase: Check that the instruction implicitly clears the high
8541 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8542 * and opcode modifications are made to work with the whole width (not
8543 * just 128). */
8544 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8545 /* Docs says register only. */
8546 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8547 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8548 {
8549 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8550 IEM_MC_BEGIN(2, 0);
8551 IEM_MC_ARG(uint64_t *, pDst, 0);
8552 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8553 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8554 IEM_MC_PREPARE_FPU_USAGE();
8555 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8556 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8557 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8558 IEM_MC_ADVANCE_RIP();
8559 IEM_MC_END();
8560 return VINF_SUCCESS;
8561 }
8562 return IEMOP_RAISE_INVALID_OPCODE();
8563}
8564
8565/** Opcode 0x66 0x0f 0xd7 - */
8566FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8567{
8568 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8569 /** @todo testcase: Check that the instruction implicitly clears the high
8570 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8571 * and opcode modifications are made to work with the whole width (not
8572 * just 128). */
8573 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8574 /* Docs says register only. */
8575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8576 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8577 {
8578 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8579 IEM_MC_BEGIN(2, 0);
8580 IEM_MC_ARG(uint64_t *, pDst, 0);
8581 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8582 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8583 IEM_MC_PREPARE_SSE_USAGE();
8584 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8585 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8586 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8587 IEM_MC_ADVANCE_RIP();
8588 IEM_MC_END();
8589 return VINF_SUCCESS;
8590 }
8591 return IEMOP_RAISE_INVALID_OPCODE();
8592}
8593
8594/* Opcode 0xf3 0x0f 0xd7 - invalid */
8595/* Opcode 0xf2 0x0f 0xd7 - invalid */
8596
8597
8598/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8599FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8600/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8601FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8602/* Opcode 0xf3 0x0f 0xd8 - invalid */
8603/* Opcode 0xf2 0x0f 0xd8 - invalid */
8604
8605/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8606FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8607/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8608FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8609/* Opcode 0xf3 0x0f 0xd9 - invalid */
8610/* Opcode 0xf2 0x0f 0xd9 - invalid */
8611
8612/** Opcode 0x0f 0xda - pminub Pq, Qq */
8613FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8614/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8615FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8616/* Opcode 0xf3 0x0f 0xda - invalid */
8617/* Opcode 0xf2 0x0f 0xda - invalid */
8618
8619/** Opcode 0x0f 0xdb - pand Pq, Qq */
8620FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8621/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8622FNIEMOP_STUB(iemOp_pand_Vx_W);
8623/* Opcode 0xf3 0x0f 0xdb - invalid */
8624/* Opcode 0xf2 0x0f 0xdb - invalid */
8625
8626/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8627FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8628/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8629FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8630/* Opcode 0xf3 0x0f 0xdc - invalid */
8631/* Opcode 0xf2 0x0f 0xdc - invalid */
8632
8633/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8634FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8635/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8636FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8637/* Opcode 0xf3 0x0f 0xdd - invalid */
8638/* Opcode 0xf2 0x0f 0xdd - invalid */
8639
8640/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8641FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8642/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8643FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8644/* Opcode 0xf3 0x0f 0xde - invalid */
8645/* Opcode 0xf2 0x0f 0xde - invalid */
8646
8647/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8648FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8649/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8650FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8651/* Opcode 0xf3 0x0f 0xdf - invalid */
8652/* Opcode 0xf2 0x0f 0xdf - invalid */
8653
8654/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8655FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8656/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8657FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8658/* Opcode 0xf3 0x0f 0xe0 - invalid */
8659/* Opcode 0xf2 0x0f 0xe0 - invalid */
8660
8661/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8662FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8663/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8664FNIEMOP_STUB(iemOp_psraw_Vx_W);
8665/* Opcode 0xf3 0x0f 0xe1 - invalid */
8666/* Opcode 0xf2 0x0f 0xe1 - invalid */
8667
8668/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8669FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8670/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8671FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8672/* Opcode 0xf3 0x0f 0xe2 - invalid */
8673/* Opcode 0xf2 0x0f 0xe2 - invalid */
8674
8675/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8676FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8677/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8678FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8679/* Opcode 0xf3 0x0f 0xe3 - invalid */
8680/* Opcode 0xf2 0x0f 0xe3 - invalid */
8681
8682/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8683FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8684/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8685FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8686/* Opcode 0xf3 0x0f 0xe4 - invalid */
8687/* Opcode 0xf2 0x0f 0xe4 - invalid */
8688
8689/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8690FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8691/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8692FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8693/* Opcode 0xf3 0x0f 0xe5 - invalid */
8694/* Opcode 0xf2 0x0f 0xe5 - invalid */
8695
8696/* Opcode 0x0f 0xe6 - invalid */
8697/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8698FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8699/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8700FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8701/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8702FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8703
8704
8705/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8706FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8707{
8708 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8709 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8710 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8711 {
8712 /* Register, memory. */
8713 IEM_MC_BEGIN(0, 2);
8714 IEM_MC_LOCAL(uint64_t, uSrc);
8715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8716
8717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8719 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8720 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8721
8722 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8723 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8724
8725 IEM_MC_ADVANCE_RIP();
8726 IEM_MC_END();
8727 return VINF_SUCCESS;
8728 }
8729 /* The register, register encoding is invalid. */
8730 return IEMOP_RAISE_INVALID_OPCODE();
8731}
8732
8733/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
8734FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
8735{
8736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8737 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8738 {
8739 /* Register, memory. */
8740 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
8741 IEM_MC_BEGIN(0, 2);
8742 IEM_MC_LOCAL(RTUINT128U, uSrc);
8743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8744
8745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8747 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8748 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8749
8750 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8751 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8752
8753 IEM_MC_ADVANCE_RIP();
8754 IEM_MC_END();
8755 return VINF_SUCCESS;
8756 }
8757
8758 /* The register, register encoding is invalid. */
8759 return IEMOP_RAISE_INVALID_OPCODE();
8760}
8761
8762/* Opcode 0xf3 0x0f 0xe7 - invalid */
8763/* Opcode 0xf2 0x0f 0xe7 - invalid */
8764
8765
8766/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8767FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8768/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8769FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8770/* Opcode 0xf3 0x0f 0xe8 - invalid */
8771/* Opcode 0xf2 0x0f 0xe8 - invalid */
8772
8773/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8774FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8775/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8776FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8777/* Opcode 0xf3 0x0f 0xe9 - invalid */
8778/* Opcode 0xf2 0x0f 0xe9 - invalid */
8779
8780/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8781FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8782/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8783FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8784/* Opcode 0xf3 0x0f 0xea - invalid */
8785/* Opcode 0xf2 0x0f 0xea - invalid */
8786
8787/** Opcode 0x0f 0xeb - por Pq, Qq */
8788FNIEMOP_STUB(iemOp_por_Pq_Qq);
8789/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8790FNIEMOP_STUB(iemOp_por_Vx_W);
8791/* Opcode 0xf3 0x0f 0xeb - invalid */
8792/* Opcode 0xf2 0x0f 0xeb - invalid */
8793
8794/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8795FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8796/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8797FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8798/* Opcode 0xf3 0x0f 0xec - invalid */
8799/* Opcode 0xf2 0x0f 0xec - invalid */
8800
8801/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8802FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8803/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8804FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8805/* Opcode 0xf3 0x0f 0xed - invalid */
8806/* Opcode 0xf2 0x0f 0xed - invalid */
8807
8808/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8809FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8810/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8811FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8812/* Opcode 0xf3 0x0f 0xee - invalid */
8813/* Opcode 0xf2 0x0f 0xee - invalid */
8814
8815
8816/** Opcode 0x0f 0xef - pxor Pq, Qq */
8817FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8818{
8819 IEMOP_MNEMONIC(pxor, "pxor");
8820 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8821}
8822
8823/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8824FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8825{
8826 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8827 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8828}
8829
8830/* Opcode 0xf3 0x0f 0xef - invalid */
8831/* Opcode 0xf2 0x0f 0xef - invalid */
8832
8833/* Opcode 0x0f 0xf0 - invalid */
8834/* Opcode 0x66 0x0f 0xf0 - invalid */
8835/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8836FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8837
8838/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8839FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8840/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8841FNIEMOP_STUB(iemOp_psllw_Vx_W);
8842/* Opcode 0xf2 0x0f 0xf1 - invalid */
8843
8844/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8845FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8846/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8847FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8848/* Opcode 0xf2 0x0f 0xf2 - invalid */
8849
8850/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8851FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8852/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8853FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8854/* Opcode 0xf2 0x0f 0xf3 - invalid */
8855
8856/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8857FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8858/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8859FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8860/* Opcode 0xf2 0x0f 0xf4 - invalid */
8861
8862/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8863FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8864/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
8865FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
8866/* Opcode 0xf2 0x0f 0xf5 - invalid */
8867
8868/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8869FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8870/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
8871FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
8872/* Opcode 0xf2 0x0f 0xf6 - invalid */
8873
8874/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8875FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8876/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
8877FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
8878/* Opcode 0xf2 0x0f 0xf7 - invalid */
8879
8880/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8881FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8882/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
8883FNIEMOP_STUB(iemOp_psubb_Vx_W);
8884/* Opcode 0xf2 0x0f 0xf8 - invalid */
8885
8886/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8887FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8888/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
8889FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
8890/* Opcode 0xf2 0x0f 0xf9 - invalid */
8891
8892/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8893FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8894/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
8895FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
8896/* Opcode 0xf2 0x0f 0xfa - invalid */
8897
8898/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8899FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8900/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
8901FNIEMOP_STUB(iemOp_psubq_Vx_W);
8902/* Opcode 0xf2 0x0f 0xfb - invalid */
8903
8904/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8905FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8906/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
8907FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
8908/* Opcode 0xf2 0x0f 0xfc - invalid */
8909
8910/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8911FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8912/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
8913FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
8914/* Opcode 0xf2 0x0f 0xfd - invalid */
8915
8916/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8917FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8918/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
8919FNIEMOP_STUB(iemOp_paddd_Vx_W);
8920/* Opcode 0xf2 0x0f 0xfe - invalid */
8921
8922
8923/** Opcode **** 0x0f 0xff - UD0 */
8924FNIEMOP_DEF(iemOp_ud0)
8925{
8926 IEMOP_MNEMONIC(ud0, "ud0");
8927 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8928 {
8929 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8930#ifndef TST_IEM_CHECK_MC
8931 RTGCPTR GCPtrEff;
8932 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8933 if (rcStrict != VINF_SUCCESS)
8934 return rcStrict;
8935#endif
8936 IEMOP_HLP_DONE_DECODING();
8937 }
8938 return IEMOP_RAISE_INVALID_OPCODE();
8939}
8940
8941
8942
8943/**
8944 * Two byte opcode map, first byte 0x0f.
8945 *
8946 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
8947 * check if it needs updating as well when making changes.
8948 */
8949IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8950{
8951 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8952 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8953 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8954 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8955 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8956 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8957 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8958 /* 0x06 */ IEMOP_X4(iemOp_clts),
8959 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8960 /* 0x08 */ IEMOP_X4(iemOp_invd),
8961 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8962 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8963 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8964 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8965 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8966 /* 0x0e */ IEMOP_X4(iemOp_femms),
8967 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8968
8969 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
8970 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
8971 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
8972 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8973 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8974 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8975 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
8976 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8977 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8978 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8979 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8980 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8981 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8982 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8983 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8984 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8985
8986 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8987 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8988 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8989 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8990 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8991 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8992 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8993 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8994 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8995 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8996 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
8997 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8998 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
8999 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9000 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9001 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9002
9003 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9004 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9005 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9006 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9007 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9008 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9009 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9010 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9011 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9012 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9013 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9014 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9015 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9016 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9017 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9018 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9019
9020 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9021 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9022 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9023 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9024 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9025 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9026 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9027 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9028 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9029 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9030 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9031 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9032 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9033 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9034 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9035 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9036
9037 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9038 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9039 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9040 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9041 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9042 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9043 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9044 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9045 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9046 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9047 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9048 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9049 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9050 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9051 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9052 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9053
9054 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9055 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9056 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9057 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9058 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9059 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9060 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9061 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9062 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9063 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9064 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9065 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9066 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9067 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9068 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9069 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vx_Wx, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM,
9070
9071 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9072 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9073 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9074 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9075 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9076 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9077 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9078 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9079
9080 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9081 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9082 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9083 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9084 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9085 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9086 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9087 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9088
9089 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9090 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9091 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9092 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9093 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9094 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9095 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9096 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9097 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9098 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9099 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9100 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9101 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9102 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9103 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9104 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9105
9106 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9107 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9108 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9109 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9110 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9111 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9112 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9113 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9114 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9115 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9116 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9117 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9118 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9119 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9120 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9121 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9122
9123 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9124 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9125 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9126 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9127 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9128 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9129 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9130 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9131 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9132 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9133 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9134 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9135 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9136 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9137 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9138 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9139
9140 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9141 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9142 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9143 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9144 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9145 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9146 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9147 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9148 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9149 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9150 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9151 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9152 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9153 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9154 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9155 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9156
9157 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9158 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9159 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9160 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9161 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9162 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9163 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9164 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9165 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9166 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9167 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9168 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9169 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9170 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9171 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9172 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9173
9174 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9175 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9176 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9177 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9178 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9179 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9180 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9181 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9182 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9183 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9184 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9185 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9186 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9187 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9188 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9189 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9190
9191 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9192 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9193 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9194 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9195 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9196 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9197 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9198 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9199 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9200 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9201 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9202 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9203 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9204 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9205 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9206 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9207
9208 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9209 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9210 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9211 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9212 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9213 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9214 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9215 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9216 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9217 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9218 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9219 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9220 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9221 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9222 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9223 /* 0xff */ IEMOP_X4(iemOp_ud0),
9224};
9225AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9226
9227/** @} */
9228
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette