VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66975

Last change on this file since 66975 was 66975, checked in by vboxsync, 8 years ago

IEM: Added docs and tests to movaps and movapd.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 323.2 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66975 2017-05-19 12:02:35Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.215389.xyz. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
38 switch (pVCpu->iem.s.enmEffOpSize)
39 {
40 case IEMMODE_16BIT:
41 IEM_MC_BEGIN(0, 1);
42 IEM_MC_LOCAL(uint16_t, u16Ldtr);
43 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
44 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
45 IEM_MC_ADVANCE_RIP();
46 IEM_MC_END();
47 break;
48
49 case IEMMODE_32BIT:
50 IEM_MC_BEGIN(0, 1);
51 IEM_MC_LOCAL(uint32_t, u32Ldtr);
52 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
53 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
54 IEM_MC_ADVANCE_RIP();
55 IEM_MC_END();
56 break;
57
58 case IEMMODE_64BIT:
59 IEM_MC_BEGIN(0, 1);
60 IEM_MC_LOCAL(uint64_t, u64Ldtr);
61 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
62 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 break;
66
67 IEM_NOT_REACHED_DEFAULT_CASE_RET();
68 }
69 }
70 else
71 {
72 IEM_MC_BEGIN(0, 2);
73 IEM_MC_LOCAL(uint16_t, u16Ldtr);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
76 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
77 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
78 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
79 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/** Opcode 0x0f 0x00 /1. */
88FNIEMOPRM_DEF(iemOp_Grp6_str)
89{
90 IEMOP_MNEMONIC(str, "str Rv/Mw");
91 IEMOP_HLP_MIN_286();
92 IEMOP_HLP_NO_REAL_OR_V86_MODE();
93
94 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
95 {
96 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
97 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
98 switch (pVCpu->iem.s.enmEffOpSize)
99 {
100 case IEMMODE_16BIT:
101 IEM_MC_BEGIN(0, 1);
102 IEM_MC_LOCAL(uint16_t, u16Tr);
103 IEM_MC_FETCH_TR_U16(u16Tr);
104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
105 IEM_MC_ADVANCE_RIP();
106 IEM_MC_END();
107 break;
108
109 case IEMMODE_32BIT:
110 IEM_MC_BEGIN(0, 1);
111 IEM_MC_LOCAL(uint32_t, u32Tr);
112 IEM_MC_FETCH_TR_U32(u32Tr);
113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
114 IEM_MC_ADVANCE_RIP();
115 IEM_MC_END();
116 break;
117
118 case IEMMODE_64BIT:
119 IEM_MC_BEGIN(0, 1);
120 IEM_MC_LOCAL(uint64_t, u64Tr);
121 IEM_MC_FETCH_TR_U64(u64Tr);
122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 break;
126
127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
128 }
129 }
130 else
131 {
132 IEM_MC_BEGIN(0, 2);
133 IEM_MC_LOCAL(uint16_t, u16Tr);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
136 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
137 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
138 IEM_MC_FETCH_TR_U16(u16Tr);
139 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
140 IEM_MC_ADVANCE_RIP();
141 IEM_MC_END();
142 }
143 return VINF_SUCCESS;
144}
145
146
147/** Opcode 0x0f 0x00 /2. */
148FNIEMOPRM_DEF(iemOp_Grp6_lldt)
149{
150 IEMOP_MNEMONIC(lldt, "lldt Ew");
151 IEMOP_HLP_MIN_286();
152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
153
154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
155 {
156 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
157 IEM_MC_BEGIN(1, 0);
158 IEM_MC_ARG(uint16_t, u16Sel, 0);
159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
161 IEM_MC_END();
162 }
163 else
164 {
165 IEM_MC_BEGIN(1, 1);
166 IEM_MC_ARG(uint16_t, u16Sel, 0);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
169 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
170 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
171 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
172 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
173 IEM_MC_END();
174 }
175 return VINF_SUCCESS;
176}
177
178
179/** Opcode 0x0f 0x00 /3. */
180FNIEMOPRM_DEF(iemOp_Grp6_ltr)
181{
182 IEMOP_MNEMONIC(ltr, "ltr Ew");
183 IEMOP_HLP_MIN_286();
184 IEMOP_HLP_NO_REAL_OR_V86_MODE();
185
186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
187 {
188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
189 IEM_MC_BEGIN(1, 0);
190 IEM_MC_ARG(uint16_t, u16Sel, 0);
191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
192 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
193 IEM_MC_END();
194 }
195 else
196 {
197 IEM_MC_BEGIN(1, 1);
198 IEM_MC_ARG(uint16_t, u16Sel, 0);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
202 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
203 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
204 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
205 IEM_MC_END();
206 }
207 return VINF_SUCCESS;
208}
209
210
211/** Opcode 0x0f 0x00 /3. */
212FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
213{
214 IEMOP_HLP_MIN_286();
215 IEMOP_HLP_NO_REAL_OR_V86_MODE();
216
217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
218 {
219 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
220 IEM_MC_BEGIN(2, 0);
221 IEM_MC_ARG(uint16_t, u16Sel, 0);
222 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
223 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
224 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
225 IEM_MC_END();
226 }
227 else
228 {
229 IEM_MC_BEGIN(2, 1);
230 IEM_MC_ARG(uint16_t, u16Sel, 0);
231 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
234 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
235 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
236 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
237 IEM_MC_END();
238 }
239 return VINF_SUCCESS;
240}
241
242
243/** Opcode 0x0f 0x00 /4. */
244FNIEMOPRM_DEF(iemOp_Grp6_verr)
245{
246 IEMOP_MNEMONIC(verr, "verr Ew");
247 IEMOP_HLP_MIN_286();
248 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
249}
250
251
252/** Opcode 0x0f 0x00 /5. */
253FNIEMOPRM_DEF(iemOp_Grp6_verw)
254{
255 IEMOP_MNEMONIC(verw, "verw Ew");
256 IEMOP_HLP_MIN_286();
257 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
258}
259
260
261/**
262 * Group 6 jump table.
263 */
264IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
265{
266 iemOp_Grp6_sldt,
267 iemOp_Grp6_str,
268 iemOp_Grp6_lldt,
269 iemOp_Grp6_ltr,
270 iemOp_Grp6_verr,
271 iemOp_Grp6_verw,
272 iemOp_InvalidWithRM,
273 iemOp_InvalidWithRM
274};
275
276/** Opcode 0x0f 0x00. */
277FNIEMOP_DEF(iemOp_Grp6)
278{
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
280 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
281}
282
283
284/** Opcode 0x0f 0x01 /0. */
285FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
286{
287 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
288 IEMOP_HLP_MIN_286();
289 IEMOP_HLP_64BIT_OP_SIZE();
290 IEM_MC_BEGIN(2, 1);
291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
292 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
295 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
296 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
297 IEM_MC_END();
298 return VINF_SUCCESS;
299}
300
301
302/** Opcode 0x0f 0x01 /0. */
303FNIEMOP_DEF(iemOp_Grp7_vmcall)
304{
305 IEMOP_BITCH_ABOUT_STUB();
306 return IEMOP_RAISE_INVALID_OPCODE();
307}
308
309
310/** Opcode 0x0f 0x01 /0. */
311FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
312{
313 IEMOP_BITCH_ABOUT_STUB();
314 return IEMOP_RAISE_INVALID_OPCODE();
315}
316
317
318/** Opcode 0x0f 0x01 /0. */
319FNIEMOP_DEF(iemOp_Grp7_vmresume)
320{
321 IEMOP_BITCH_ABOUT_STUB();
322 return IEMOP_RAISE_INVALID_OPCODE();
323}
324
325
326/** Opcode 0x0f 0x01 /0. */
327FNIEMOP_DEF(iemOp_Grp7_vmxoff)
328{
329 IEMOP_BITCH_ABOUT_STUB();
330 return IEMOP_RAISE_INVALID_OPCODE();
331}
332
333
334/** Opcode 0x0f 0x01 /1. */
335FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
336{
337 IEMOP_MNEMONIC(sidt, "sidt Ms");
338 IEMOP_HLP_MIN_286();
339 IEMOP_HLP_64BIT_OP_SIZE();
340 IEM_MC_BEGIN(2, 1);
341 IEM_MC_ARG(uint8_t, iEffSeg, 0);
342 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
345 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
346 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
347 IEM_MC_END();
348 return VINF_SUCCESS;
349}
350
351
352/** Opcode 0x0f 0x01 /1. */
353FNIEMOP_DEF(iemOp_Grp7_monitor)
354{
355 IEMOP_MNEMONIC(monitor, "monitor");
356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
357 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
358}
359
360
361/** Opcode 0x0f 0x01 /1. */
362FNIEMOP_DEF(iemOp_Grp7_mwait)
363{
364 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
367}
368
369
370/** Opcode 0x0f 0x01 /2. */
371FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
372{
373 IEMOP_MNEMONIC(lgdt, "lgdt");
374 IEMOP_HLP_64BIT_OP_SIZE();
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd0. */
389FNIEMOP_DEF(iemOp_Grp7_xgetbv)
390{
391 IEMOP_MNEMONIC(xgetbv, "xgetbv");
392 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
393 {
394 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
396 }
397 return IEMOP_RAISE_INVALID_OPCODE();
398}
399
400
401/** Opcode 0x0f 0x01 0xd1. */
402FNIEMOP_DEF(iemOp_Grp7_xsetbv)
403{
404 IEMOP_MNEMONIC(xsetbv, "xsetbv");
405 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
406 {
407 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
408 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
409 }
410 return IEMOP_RAISE_INVALID_OPCODE();
411}
412
413
414/** Opcode 0x0f 0x01 /3. */
415FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
416{
417 IEMOP_MNEMONIC(lidt, "lidt");
418 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
419 ? IEMMODE_64BIT
420 : pVCpu->iem.s.enmEffOpSize;
421 IEM_MC_BEGIN(3, 1);
422 IEM_MC_ARG(uint8_t, iEffSeg, 0);
423 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
424 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
428 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
429 IEM_MC_END();
430 return VINF_SUCCESS;
431}
432
433
434#ifdef VBOX_WITH_NESTED_HWVIRT
435/** Opcode 0x0f 0x01 0xd8. */
436FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
437{
438 IEMOP_MNEMONIC(vmrun, "vmrun");
439 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
440}
441
442/** Opcode 0x0f 0x01 0xd9. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
444{
445 IEMOP_MNEMONIC(vmmcall, "vmmcall");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
447}
448
449
450/** Opcode 0x0f 0x01 0xda. */
451FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
452{
453 IEMOP_MNEMONIC(vmload, "vmload");
454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
455}
456
457
458/** Opcode 0x0f 0x01 0xdb. */
459FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
460{
461 IEMOP_MNEMONIC(vmsave, "vmsave");
462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
463}
464
465
466/** Opcode 0x0f 0x01 0xdc. */
467FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
468{
469 IEMOP_MNEMONIC(stgi, "stgi");
470 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
471}
472
473
474/** Opcode 0x0f 0x01 0xdd. */
475FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
476{
477 IEMOP_MNEMONIC(clgi, "clgi");
478 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
479}
480
481
482/** Opcode 0x0f 0x01 0xdf. */
483FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
484{
485 IEMOP_MNEMONIC(invlpga, "invlpga");
486 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
487}
488
489
490/** Opcode 0x0f 0x01 0xde. */
491FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
492{
493 IEMOP_MNEMONIC(skinit, "skinit");
494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
495}
496#else
497/** Opcode 0x0f 0x01 0xd8. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
499
500/** Opcode 0x0f 0x01 0xd9. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
502
503/** Opcode 0x0f 0x01 0xda. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
505
506/** Opcode 0x0f 0x01 0xdb. */
507FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
508
509/** Opcode 0x0f 0x01 0xdc. */
510FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
511
512/** Opcode 0x0f 0x01 0xdd. */
513FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
514
515/** Opcode 0x0f 0x01 0xdf. */
516FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520#endif /* VBOX_WITH_NESTED_HWVIRT */
521
522/** Opcode 0x0f 0x01 /4. */
523FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
524{
525 IEMOP_MNEMONIC(smsw, "smsw");
526 IEMOP_HLP_MIN_286();
527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
528 {
529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
530 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
531 switch (pVCpu->iem.s.enmEffOpSize)
532 {
533 case IEMMODE_16BIT:
534 IEM_MC_BEGIN(0, 1);
535 IEM_MC_LOCAL(uint16_t, u16Tmp);
536 IEM_MC_FETCH_CR0_U16(u16Tmp);
537 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
538 { /* likely */ }
539 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
541 else
542 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
543 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
544 IEM_MC_ADVANCE_RIP();
545 IEM_MC_END();
546 return VINF_SUCCESS;
547
548 case IEMMODE_32BIT:
549 IEM_MC_BEGIN(0, 1);
550 IEM_MC_LOCAL(uint32_t, u32Tmp);
551 IEM_MC_FETCH_CR0_U32(u32Tmp);
552 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 return VINF_SUCCESS;
556
557 case IEMMODE_64BIT:
558 IEM_MC_BEGIN(0, 1);
559 IEM_MC_LOCAL(uint64_t, u64Tmp);
560 IEM_MC_FETCH_CR0_U64(u64Tmp);
561 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
562 IEM_MC_ADVANCE_RIP();
563 IEM_MC_END();
564 return VINF_SUCCESS;
565
566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
567 }
568 }
569 else
570 {
571 /* Ignore operand size here, memory refs are always 16-bit. */
572 IEM_MC_BEGIN(0, 2);
573 IEM_MC_LOCAL(uint16_t, u16Tmp);
574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
578 IEM_MC_FETCH_CR0_U16(u16Tmp);
579 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
580 { /* likely */ }
581 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
582 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
583 else
584 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
585 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
586 IEM_MC_ADVANCE_RIP();
587 IEM_MC_END();
588 return VINF_SUCCESS;
589 }
590}
591
592
593/** Opcode 0x0f 0x01 /6. */
594FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
595{
596 /* The operand size is effectively ignored, all is 16-bit and only the
597 lower 3-bits are used. */
598 IEMOP_MNEMONIC(lmsw, "lmsw");
599 IEMOP_HLP_MIN_286();
600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
601 {
602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
603 IEM_MC_BEGIN(1, 0);
604 IEM_MC_ARG(uint16_t, u16Tmp, 0);
605 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
606 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
607 IEM_MC_END();
608 }
609 else
610 {
611 IEM_MC_BEGIN(1, 1);
612 IEM_MC_ARG(uint16_t, u16Tmp, 0);
613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
617 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
618 IEM_MC_END();
619 }
620 return VINF_SUCCESS;
621}
622
623
624/** Opcode 0x0f 0x01 /7. */
625FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
626{
627 IEMOP_MNEMONIC(invlpg, "invlpg");
628 IEMOP_HLP_MIN_486();
629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
630 IEM_MC_BEGIN(1, 1);
631 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
633 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
634 IEM_MC_END();
635 return VINF_SUCCESS;
636}
637
638
639/** Opcode 0x0f 0x01 /7. */
640FNIEMOP_DEF(iemOp_Grp7_swapgs)
641{
642 IEMOP_MNEMONIC(swapgs, "swapgs");
643 IEMOP_HLP_ONLY_64BIT();
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
646}
647
648
649/** Opcode 0x0f 0x01 /7. */
650FNIEMOP_DEF(iemOp_Grp7_rdtscp)
651{
652 IEMOP_MNEMONIC(rdtscp, "rdtscp");
653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
654 /** @todo SVM intercept removal from here. */
655 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RDTSCP, SVM_EXIT_RDTSCP, 0, 0);
656 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
657}
658
659
660/**
661 * Group 7 jump table, memory variant.
662 */
663IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
664{
665 iemOp_Grp7_sgdt,
666 iemOp_Grp7_sidt,
667 iemOp_Grp7_lgdt,
668 iemOp_Grp7_lidt,
669 iemOp_Grp7_smsw,
670 iemOp_InvalidWithRM,
671 iemOp_Grp7_lmsw,
672 iemOp_Grp7_invlpg
673};
674
675
676/** Opcode 0x0f 0x01. */
677FNIEMOP_DEF(iemOp_Grp7)
678{
679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
680 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
681 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
682
683 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
684 {
685 case 0:
686 switch (bRm & X86_MODRM_RM_MASK)
687 {
688 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
689 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
690 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
691 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 1:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
700 }
701 return IEMOP_RAISE_INVALID_OPCODE();
702
703 case 2:
704 switch (bRm & X86_MODRM_RM_MASK)
705 {
706 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
707 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
708 }
709 return IEMOP_RAISE_INVALID_OPCODE();
710
711 case 3:
712 switch (bRm & X86_MODRM_RM_MASK)
713 {
714 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
715 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
716 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
717 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
718 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
719 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
720 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
721 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
723 }
724
725 case 4:
726 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
727
728 case 5:
729 return IEMOP_RAISE_INVALID_OPCODE();
730
731 case 6:
732 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
733
734 case 7:
735 switch (bRm & X86_MODRM_RM_MASK)
736 {
737 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
738 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
739 }
740 return IEMOP_RAISE_INVALID_OPCODE();
741
742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
743 }
744}
745
746/** Opcode 0x0f 0x00 /3. */
747FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
748{
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
751
752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
753 {
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 switch (pVCpu->iem.s.enmEffOpSize)
756 {
757 case IEMMODE_16BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 case IEMMODE_32BIT:
773 case IEMMODE_64BIT:
774 {
775 IEM_MC_BEGIN(3, 0);
776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
777 IEM_MC_ARG(uint16_t, u16Sel, 1);
778 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
779
780 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
781 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
782 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
783
784 IEM_MC_END();
785 return VINF_SUCCESS;
786 }
787
788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
789 }
790 }
791 else
792 {
793 switch (pVCpu->iem.s.enmEffOpSize)
794 {
795 case IEMMODE_16BIT:
796 {
797 IEM_MC_BEGIN(3, 1);
798 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
799 IEM_MC_ARG(uint16_t, u16Sel, 1);
800 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
802
803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
804 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
805
806 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
807 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
808 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
809
810 IEM_MC_END();
811 return VINF_SUCCESS;
812 }
813
814 case IEMMODE_32BIT:
815 case IEMMODE_64BIT:
816 {
817 IEM_MC_BEGIN(3, 1);
818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 1);
820 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
822
823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
824 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
825/** @todo testcase: make sure it's a 16-bit read. */
826
827 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
828 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
829 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
830
831 IEM_MC_END();
832 return VINF_SUCCESS;
833 }
834
835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
836 }
837 }
838}
839
840
841
842/** Opcode 0x0f 0x02. */
843FNIEMOP_DEF(iemOp_lar_Gv_Ew)
844{
845 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
846 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
847}
848
849
850/** Opcode 0x0f 0x03. */
851FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
852{
853 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
854 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
855}
856
857
858/** Opcode 0x0f 0x05. */
859FNIEMOP_DEF(iemOp_syscall)
860{
861 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
863 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
864}
865
866
867/** Opcode 0x0f 0x06. */
868FNIEMOP_DEF(iemOp_clts)
869{
870 IEMOP_MNEMONIC(clts, "clts");
871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
872 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
873}
874
875
876/** Opcode 0x0f 0x07. */
877FNIEMOP_DEF(iemOp_sysret)
878{
879 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
881 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
882}
883
884
885/** Opcode 0x0f 0x08. */
886FNIEMOP_DEF(iemOp_invd)
887{
888 IEMOP_MNEMONIC(invd, "invd");
889#ifdef VBOX_WITH_NESTED_HWVIRT
890 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
891 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
892#else
893 RT_NOREF_PV(pVCpu);
894#endif
895 /** @todo implement invd for the regular case (above only handles nested SVM
896 * exits). */
897 IEMOP_BITCH_ABOUT_STUB();
898 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
899}
900
901// IEMOP_HLP_MIN_486();
902
903
904/** Opcode 0x0f 0x09. */
905FNIEMOP_DEF(iemOp_wbinvd)
906{
907 IEMOP_MNEMONIC(wbinvd, "wbinvd");
908 IEMOP_HLP_MIN_486();
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
910 IEM_MC_BEGIN(0, 0);
911 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
912 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
913 IEM_MC_ADVANCE_RIP();
914 IEM_MC_END();
915 return VINF_SUCCESS; /* ignore for now */
916}
917
918
919/** Opcode 0x0f 0x0b. */
920FNIEMOP_DEF(iemOp_ud2)
921{
922 IEMOP_MNEMONIC(ud2, "ud2");
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926/** Opcode 0x0f 0x0d. */
927FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
928{
929 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
931 {
932 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
933 return IEMOP_RAISE_INVALID_OPCODE();
934 }
935
936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
938 {
939 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
940 return IEMOP_RAISE_INVALID_OPCODE();
941 }
942
943 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
944 {
945 case 2: /* Aliased to /0 for the time being. */
946 case 4: /* Aliased to /0 for the time being. */
947 case 5: /* Aliased to /0 for the time being. */
948 case 6: /* Aliased to /0 for the time being. */
949 case 7: /* Aliased to /0 for the time being. */
950 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
951 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
952 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
954 }
955
956 IEM_MC_BEGIN(0, 1);
957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
960 /* Currently a NOP. */
961 NOREF(GCPtrEffSrc);
962 IEM_MC_ADVANCE_RIP();
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x0e. */
969FNIEMOP_STUB(iemOp_femms);
970
971
972/** Opcode 0x0f 0x0f. */
973FNIEMOP_DEF(iemOp_3Dnow)
974{
975 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
976 {
977 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
978 return IEMOP_RAISE_INVALID_OPCODE();
979 }
980
981#ifdef IEM_WITH_3DNOW
982 /* This is pretty sparse, use switch instead of table. */
983 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
984 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
985#else
986 IEMOP_BITCH_ABOUT_STUB();
987 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
988#endif
989}
990
991
992/**
993 * @opcode 0x10
994 * @oppfx none
995 * @opcpuid sse
996 * @opgroup og_sse_simdfp_datamove
997 * @opxcpttype 4UA
998 * @optest op1=1 op2=2 -> op1=2
999 * @optest op1=0 op2=-22 -> op1=-22
1000 */
1001FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1002{
1003 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1006 {
1007 /*
1008 * Register, register.
1009 */
1010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1011 IEM_MC_BEGIN(0, 0);
1012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1013 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1014 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1015 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1016 IEM_MC_ADVANCE_RIP();
1017 IEM_MC_END();
1018 }
1019 else
1020 {
1021 /*
1022 * Memory, register.
1023 */
1024 IEM_MC_BEGIN(0, 2);
1025 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1027
1028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1032
1033 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1034 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1035
1036 IEM_MC_ADVANCE_RIP();
1037 IEM_MC_END();
1038 }
1039 return VINF_SUCCESS;
1040
1041}
1042
1043
1044/**
1045 * @opcode 0x10
1046 * @oppfx 0x66
1047 * @opcpuid sse2
1048 * @opgroup og_sse2_pcksclr_datamove
1049 * @opxcpttype 4UA
1050 * @optest op1=1 op2=2 -> op1=2
1051 * @optest op1=0 op2=-42 -> op1=-42
1052 */
1053FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1054{
1055 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1058 {
1059 /*
1060 * Register, register.
1061 */
1062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1063 IEM_MC_BEGIN(0, 0);
1064 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1066 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1067 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1068 IEM_MC_ADVANCE_RIP();
1069 IEM_MC_END();
1070 }
1071 else
1072 {
1073 /*
1074 * Memory, register.
1075 */
1076 IEM_MC_BEGIN(0, 2);
1077 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1079
1080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1083 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1084
1085 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1086 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1087
1088 IEM_MC_ADVANCE_RIP();
1089 IEM_MC_END();
1090 }
1091 return VINF_SUCCESS;
1092}
1093
1094
1095/**
1096 * @opcode 0x10
1097 * @oppfx 0xf3
1098 * @opcpuid sse
1099 * @opgroup og_sse_simdfp_datamove
1100 * @opxcpttype 5
1101 * @optest op1=1 op2=2 -> op1=2
1102 * @optest op1=0 op2=-22 -> op1=-22
1103 */
1104FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1105{
1106 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1109 {
1110 /*
1111 * Register, register.
1112 */
1113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1114 IEM_MC_BEGIN(0, 1);
1115 IEM_MC_LOCAL(uint32_t, uSrc);
1116
1117 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1118 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1119 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1120 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 else
1126 {
1127 /*
1128 * Memory, register.
1129 */
1130 IEM_MC_BEGIN(0, 2);
1131 IEM_MC_LOCAL(uint32_t, uSrc);
1132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1133
1134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1136 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1138
1139 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1140 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1141
1142 IEM_MC_ADVANCE_RIP();
1143 IEM_MC_END();
1144 }
1145 return VINF_SUCCESS;
1146}
1147
1148
1149/**
1150 * @opcode 0x10
1151 * @oppfx 0xf2
1152 * @opcpuid sse2
1153 * @opgroup og_sse2_pcksclr_datamove
1154 * @opxcpttype 5
1155 * @optest op1=1 op2=2 -> op1=2
1156 * @optest op1=0 op2=-42 -> op1=-42
1157 */
1158FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1159{
1160 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1163 {
1164 /*
1165 * Register, register.
1166 */
1167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1168 IEM_MC_BEGIN(0, 1);
1169 IEM_MC_LOCAL(uint64_t, uSrc);
1170
1171 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1172 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1173 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1174 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1175
1176 IEM_MC_ADVANCE_RIP();
1177 IEM_MC_END();
1178 }
1179 else
1180 {
1181 /*
1182 * Memory, register.
1183 */
1184 IEM_MC_BEGIN(0, 2);
1185 IEM_MC_LOCAL(uint64_t, uSrc);
1186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1187
1188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1190 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1191 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1192
1193 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1194 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1195
1196 IEM_MC_ADVANCE_RIP();
1197 IEM_MC_END();
1198 }
1199 return VINF_SUCCESS;
1200}
1201
1202
1203/**
1204 * @opcode 0x11
1205 * @oppfx none
1206 * @opcpuid sse
1207 * @opgroup og_sse_simdfp_datamove
1208 * @opxcpttype 4UA
1209 * @optest op1=1 op2=2 -> op1=2
1210 * @optest op1=0 op2=-42 -> op1=-42
1211 */
1212FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1213{
1214 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1217 {
1218 /*
1219 * Register, register.
1220 */
1221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1222 IEM_MC_BEGIN(0, 0);
1223 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1224 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1225 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1226 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1227 IEM_MC_ADVANCE_RIP();
1228 IEM_MC_END();
1229 }
1230 else
1231 {
1232 /*
1233 * Memory, register.
1234 */
1235 IEM_MC_BEGIN(0, 2);
1236 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1238
1239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1243
1244 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1245 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1246
1247 IEM_MC_ADVANCE_RIP();
1248 IEM_MC_END();
1249 }
1250 return VINF_SUCCESS;
1251}
1252
1253
1254/**
1255 * @opcode 0x11
1256 * @oppfx 0x66
1257 * @opcpuid sse2
1258 * @opgroup og_sse2_pcksclr_datamove
1259 * @opxcpttype 4UA
1260 * @optest op1=1 op2=2 -> op1=2
1261 * @optest op1=0 op2=-42 -> op1=-42
1262 */
1263FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1264{
1265 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1268 {
1269 /*
1270 * Register, register.
1271 */
1272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1273 IEM_MC_BEGIN(0, 0);
1274 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1275 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1276 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1277 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1278 IEM_MC_ADVANCE_RIP();
1279 IEM_MC_END();
1280 }
1281 else
1282 {
1283 /*
1284 * Memory, register.
1285 */
1286 IEM_MC_BEGIN(0, 2);
1287 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1289
1290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1292 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1294
1295 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1296 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1297
1298 IEM_MC_ADVANCE_RIP();
1299 IEM_MC_END();
1300 }
1301 return VINF_SUCCESS;
1302}
1303
1304
1305/**
1306 * @opcode 0x11
1307 * @oppfx 0xf3
1308 * @opcpuid sse
1309 * @opgroup og_sse_simdfp_datamove
1310 * @opxcpttype 5
1311 * @optest op1=1 op2=2 -> op1=2
1312 * @optest op1=0 op2=-22 -> op1=-22
1313 */
1314FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1315{
1316 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1319 {
1320 /*
1321 * Register, register.
1322 */
1323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1324 IEM_MC_BEGIN(0, 1);
1325 IEM_MC_LOCAL(uint32_t, uSrc);
1326
1327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1328 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1329 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1330 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1331
1332 IEM_MC_ADVANCE_RIP();
1333 IEM_MC_END();
1334 }
1335 else
1336 {
1337 /*
1338 * Memory, register.
1339 */
1340 IEM_MC_BEGIN(0, 2);
1341 IEM_MC_LOCAL(uint32_t, uSrc);
1342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1343
1344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1346 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1348
1349 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1350 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1351
1352 IEM_MC_ADVANCE_RIP();
1353 IEM_MC_END();
1354 }
1355 return VINF_SUCCESS;
1356}
1357
1358
1359/**
1360 * @opcode 0x11
1361 * @oppfx 0xf2
1362 * @opcpuid sse2
1363 * @opgroup og_sse2_pcksclr_datamove
1364 * @opxcpttype 5
1365 * @optest op1=1 op2=2 -> op1=2
1366 * @optest op1=0 op2=-42 -> op1=-42
1367 */
1368FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1369{
1370 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1373 {
1374 /*
1375 * Register, register.
1376 */
1377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1378 IEM_MC_BEGIN(0, 1);
1379 IEM_MC_LOCAL(uint64_t, uSrc);
1380
1381 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1382 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1383 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1384 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1385
1386 IEM_MC_ADVANCE_RIP();
1387 IEM_MC_END();
1388 }
1389 else
1390 {
1391 /*
1392 * Memory, register.
1393 */
1394 IEM_MC_BEGIN(0, 2);
1395 IEM_MC_LOCAL(uint64_t, uSrc);
1396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1397
1398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1400 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1402
1403 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1404 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1405
1406 IEM_MC_ADVANCE_RIP();
1407 IEM_MC_END();
1408 }
1409 return VINF_SUCCESS;
1410}
1411
1412
1413FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1414{
1415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1416 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1417 {
1418 /**
1419 * @opcode 0x12
1420 * @opcodesub 11 mr/reg
1421 * @oppfx none
1422 * @opcpuid sse
1423 * @opgroup og_sse_simdfp_datamove
1424 * @opxcpttype 5
1425 * @optest op1=1 op2=2 -> op1=2
1426 * @optest op1=0 op2=-42 -> op1=-42
1427 */
1428 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1429
1430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1431 IEM_MC_BEGIN(0, 1);
1432 IEM_MC_LOCAL(uint64_t, uSrc);
1433
1434 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1436 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1437 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1438
1439 IEM_MC_ADVANCE_RIP();
1440 IEM_MC_END();
1441 }
1442 else
1443 {
1444 /**
1445 * @opdone
1446 * @opcode 0x12
1447 * @opcodesub !11 mr/reg
1448 * @oppfx none
1449 * @opcpuid sse
1450 * @opgroup og_sse_simdfp_datamove
1451 * @opxcpttype 5
1452 * @optest op1=1 op2=2 -> op1=2
1453 * @optest op1=0 op2=-42 -> op1=-42
1454 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1455 */
1456 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1457
1458 IEM_MC_BEGIN(0, 2);
1459 IEM_MC_LOCAL(uint64_t, uSrc);
1460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1461
1462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1466
1467 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1468 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1469
1470 IEM_MC_ADVANCE_RIP();
1471 IEM_MC_END();
1472 }
1473 return VINF_SUCCESS;
1474}
1475
1476
1477/**
1478 * @opcode 0x12
1479 * @opcodesub !11 mr/reg
1480 * @oppfx 0x66
1481 * @opcpuid sse2
1482 * @opgroup og_sse2_pcksclr_datamove
1483 * @opxcpttype 5
1484 * @optest op1=1 op2=2 -> op1=2
1485 * @optest op1=0 op2=-42 -> op1=-42
1486 */
1487FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1488{
1489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1490 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1491 {
1492 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1493
1494 IEM_MC_BEGIN(0, 2);
1495 IEM_MC_LOCAL(uint64_t, uSrc);
1496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1497
1498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1502
1503 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1504 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1505
1506 IEM_MC_ADVANCE_RIP();
1507 IEM_MC_END();
1508 return VINF_SUCCESS;
1509 }
1510
1511 /**
1512 * @opdone
1513 * @opmnemonic ud660f12m3
1514 * @opcode 0x12
1515 * @opcodesub 11 mr/reg
1516 * @oppfx 0x66
1517 * @opunused immediate
1518 * @opcpuid sse
1519 * @optest ->
1520 */
1521 return IEMOP_RAISE_INVALID_OPCODE();
1522}
1523
1524
1525/**
1526 * @opcode 0x12
1527 * @oppfx 0xf3
1528 * @opcpuid sse3
1529 * @opgroup og_sse3_pcksclr_datamove
1530 * @opxcpttype 4
1531 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1532 * op1=0x00000002000000020000000100000001
1533 */
1534FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1535{
1536 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1538 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1539 {
1540 /*
1541 * Register, register.
1542 */
1543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1544 IEM_MC_BEGIN(2, 0);
1545 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1546 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1547
1548 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1549 IEM_MC_PREPARE_SSE_USAGE();
1550
1551 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1552 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1553 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1554
1555 IEM_MC_ADVANCE_RIP();
1556 IEM_MC_END();
1557 }
1558 else
1559 {
1560 /*
1561 * Register, memory.
1562 */
1563 IEM_MC_BEGIN(2, 2);
1564 IEM_MC_LOCAL(RTUINT128U, uSrc);
1565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1566 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1567 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1568
1569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1571 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1572 IEM_MC_PREPARE_SSE_USAGE();
1573
1574 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1575 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1576 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1577
1578 IEM_MC_ADVANCE_RIP();
1579 IEM_MC_END();
1580 }
1581 return VINF_SUCCESS;
1582}
1583
1584
1585/**
1586 * @opcode 0x12
1587 * @oppfx 0xf2
1588 * @opcpuid sse3
1589 * @opgroup og_sse3_pcksclr_datamove
1590 * @opxcpttype 5
1591 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1592 * op1=0x22222222111111112222222211111111
1593 */
1594FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1595{
1596 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1599 {
1600 /*
1601 * Register, register.
1602 */
1603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1604 IEM_MC_BEGIN(2, 0);
1605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1606 IEM_MC_ARG(uint64_t, uSrc, 1);
1607
1608 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1609 IEM_MC_PREPARE_SSE_USAGE();
1610
1611 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1612 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1613 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1614
1615 IEM_MC_ADVANCE_RIP();
1616 IEM_MC_END();
1617 }
1618 else
1619 {
1620 /*
1621 * Register, memory.
1622 */
1623 IEM_MC_BEGIN(2, 2);
1624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1625 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1626 IEM_MC_ARG(uint64_t, uSrc, 1);
1627
1628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1630 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1631 IEM_MC_PREPARE_SSE_USAGE();
1632
1633 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1634 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1635 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1636
1637 IEM_MC_ADVANCE_RIP();
1638 IEM_MC_END();
1639 }
1640 return VINF_SUCCESS;
1641}
1642
1643
1644/**
1645 * @opcode 0x13
1646 * @opcodesub !11 mr/reg
1647 * @oppfx none
1648 * @opcpuid sse
1649 * @opgroup og_sse_simdfp_datamove
1650 * @opxcpttype 5
1651 * @optest op1=1 op2=2 -> op1=2
1652 * @optest op1=0 op2=-42 -> op1=-42
1653 */
1654FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1655{
1656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1657 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1658 {
1659 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1660
1661 IEM_MC_BEGIN(0, 2);
1662 IEM_MC_LOCAL(uint64_t, uSrc);
1663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1664
1665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1667 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1668 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1669
1670 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1671 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1672
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 return VINF_SUCCESS;
1676 }
1677
1678 /**
1679 * @opdone
1680 * @opmnemonic ud0f13m3
1681 * @opcode 0x13
1682 * @opcodesub 11 mr/reg
1683 * @oppfx none
1684 * @opunused immediate
1685 * @opcpuid sse
1686 * @optest ->
1687 */
1688 return IEMOP_RAISE_INVALID_OPCODE();
1689}
1690
1691
1692/**
1693 * @opcode 0x13
1694 * @opcodesub !11 mr/reg
1695 * @oppfx 0x66
1696 * @opcpuid sse2
1697 * @opgroup og_sse2_pcksclr_datamove
1698 * @opxcpttype 5
1699 * @optest op1=1 op2=2 -> op1=2
1700 * @optest op1=0 op2=-42 -> op1=-42
1701 */
1702FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1703{
1704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1705 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1706 {
1707 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1708 IEM_MC_BEGIN(0, 2);
1709 IEM_MC_LOCAL(uint64_t, uSrc);
1710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1711
1712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1714 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1715 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1716
1717 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1718 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1719
1720 IEM_MC_ADVANCE_RIP();
1721 IEM_MC_END();
1722 return VINF_SUCCESS;
1723 }
1724
1725 /**
1726 * @opdone
1727 * @opmnemonic ud660f13m3
1728 * @opcode 0x13
1729 * @opcodesub 11 mr/reg
1730 * @oppfx 0x66
1731 * @opunused immediate
1732 * @opcpuid sse
1733 * @optest ->
1734 */
1735 return IEMOP_RAISE_INVALID_OPCODE();
1736}
1737
1738
1739/**
1740 * @opmnemonic udf30f13
1741 * @opcode 0x13
1742 * @oppfx 0xf3
1743 * @opunused intel-modrm
1744 * @opcpuid sse
1745 * @optest ->
1746 * @opdone
1747 */
1748
1749/**
1750 * @opmnemonic udf20f13
1751 * @opcode 0x13
1752 * @oppfx 0xf2
1753 * @opunused intel-modrm
1754 * @opcpuid sse
1755 * @optest ->
1756 * @opdone
1757 */
1758
1759/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1760FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1761/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1762FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1763
1764/**
1765 * @opdone
1766 * @opmnemonic udf30f14
1767 * @opcode 0x14
1768 * @oppfx 0xf3
1769 * @opunused intel-modrm
1770 * @opcpuid sse
1771 * @optest ->
1772 * @opdone
1773 */
1774
1775/**
1776 * @opmnemonic udf20f14
1777 * @opcode 0x14
1778 * @oppfx 0xf2
1779 * @opunused intel-modrm
1780 * @opcpuid sse
1781 * @optest ->
1782 * @opdone
1783 */
1784
1785/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1786FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1787/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1788FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1789/* Opcode 0xf3 0x0f 0x15 - invalid */
1790/* Opcode 0xf2 0x0f 0x15 - invalid */
1791
1792/**
1793 * @opdone
1794 * @opmnemonic udf30f15
1795 * @opcode 0x15
1796 * @oppfx 0xf3
1797 * @opunused intel-modrm
1798 * @opcpuid sse
1799 * @optest ->
1800 * @opdone
1801 */
1802
1803/**
1804 * @opmnemonic udf20f15
1805 * @opcode 0x15
1806 * @oppfx 0xf2
1807 * @opunused intel-modrm
1808 * @opcpuid sse
1809 * @optest ->
1810 * @opdone
1811 */
1812
1813FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1814{
1815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1817 {
1818 /**
1819 * @opcode 0x16
1820 * @opcodesub 11 mr/reg
1821 * @oppfx none
1822 * @opcpuid sse
1823 * @opgroup og_sse_simdfp_datamove
1824 * @opxcpttype 5
1825 * @optest op1=1 op2=2 -> op1=2
1826 * @optest op1=0 op2=-42 -> op1=-42
1827 */
1828 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1829
1830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1831 IEM_MC_BEGIN(0, 1);
1832 IEM_MC_LOCAL(uint64_t, uSrc);
1833
1834 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1835 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1836 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1837 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1838
1839 IEM_MC_ADVANCE_RIP();
1840 IEM_MC_END();
1841 }
1842 else
1843 {
1844 /**
1845 * @opdone
1846 * @opcode 0x16
1847 * @opcodesub !11 mr/reg
1848 * @oppfx none
1849 * @opcpuid sse
1850 * @opgroup og_sse_simdfp_datamove
1851 * @opxcpttype 5
1852 * @optest op1=1 op2=2 -> op1=2
1853 * @optest op1=0 op2=-42 -> op1=-42
1854 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1855 */
1856 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1857
1858 IEM_MC_BEGIN(0, 2);
1859 IEM_MC_LOCAL(uint64_t, uSrc);
1860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1861
1862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1866
1867 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1868 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1869
1870 IEM_MC_ADVANCE_RIP();
1871 IEM_MC_END();
1872 }
1873 return VINF_SUCCESS;
1874}
1875
1876
1877/**
1878 * @opcode 0x16
1879 * @opcodesub !11 mr/reg
1880 * @oppfx 0x66
1881 * @opcpuid sse2
1882 * @opgroup og_sse2_pcksclr_datamove
1883 * @opxcpttype 5
1884 * @optest op1=1 op2=2 -> op1=2
1885 * @optest op1=0 op2=-42 -> op1=-42
1886 */
1887FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1888{
1889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1890 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1891 {
1892 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1893 IEM_MC_BEGIN(0, 2);
1894 IEM_MC_LOCAL(uint64_t, uSrc);
1895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1896
1897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1899 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1900 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1901
1902 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1903 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1904
1905 IEM_MC_ADVANCE_RIP();
1906 IEM_MC_END();
1907 return VINF_SUCCESS;
1908 }
1909
1910 /**
1911 * @opdone
1912 * @opmnemonic ud660f16m3
1913 * @opcode 0x16
1914 * @opcodesub 11 mr/reg
1915 * @oppfx 0x66
1916 * @opunused immediate
1917 * @opcpuid sse
1918 * @optest ->
1919 */
1920 return IEMOP_RAISE_INVALID_OPCODE();
1921}
1922
1923
1924/**
1925 * @opcode 0x16
1926 * @oppfx 0xf3
1927 * @opcpuid sse3
1928 * @opgroup og_sse3_pcksclr_datamove
1929 * @opxcpttype 4
1930 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1931 * op1=0x00000002000000020000000100000001
1932 */
1933FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1934{
1935 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1938 {
1939 /*
1940 * Register, register.
1941 */
1942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1943 IEM_MC_BEGIN(2, 0);
1944 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1945 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1946
1947 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1948 IEM_MC_PREPARE_SSE_USAGE();
1949
1950 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1951 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1952 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1953
1954 IEM_MC_ADVANCE_RIP();
1955 IEM_MC_END();
1956 }
1957 else
1958 {
1959 /*
1960 * Register, memory.
1961 */
1962 IEM_MC_BEGIN(2, 2);
1963 IEM_MC_LOCAL(RTUINT128U, uSrc);
1964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1965 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1966 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1967
1968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1970 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1971 IEM_MC_PREPARE_SSE_USAGE();
1972
1973 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1974 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1975 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1976
1977 IEM_MC_ADVANCE_RIP();
1978 IEM_MC_END();
1979 }
1980 return VINF_SUCCESS;
1981}
1982
1983/**
1984 * @opdone
1985 * @opmnemonic udf30f16
1986 * @opcode 0x16
1987 * @oppfx 0xf2
1988 * @opunused intel-modrm
1989 * @opcpuid sse
1990 * @optest ->
1991 * @opdone
1992 */
1993
1994
1995/**
1996 * @opcode 0x17
1997 * @opcodesub !11 mr/reg
1998 * @oppfx none
1999 * @opcpuid sse
2000 * @opgroup og_sse_simdfp_datamove
2001 * @opxcpttype 5
2002 * @optest op1=1 op2=2 -> op1=2
2003 * @optest op1=0 op2=-42 -> op1=-42
2004 */
2005FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2006{
2007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2008 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2009 {
2010 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2011
2012 IEM_MC_BEGIN(0, 2);
2013 IEM_MC_LOCAL(uint64_t, uSrc);
2014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2015
2016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2018 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2019 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2020
2021 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2022 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2023
2024 IEM_MC_ADVANCE_RIP();
2025 IEM_MC_END();
2026 return VINF_SUCCESS;
2027 }
2028
2029 /**
2030 * @opdone
2031 * @opmnemonic ud0f17m3
2032 * @opcode 0x17
2033 * @opcodesub 11 mr/reg
2034 * @oppfx none
2035 * @opunused immediate
2036 * @opcpuid sse
2037 * @optest ->
2038 */
2039 return IEMOP_RAISE_INVALID_OPCODE();
2040}
2041
2042
2043/**
2044 * @opcode 0x17
2045 * @opcodesub !11 mr/reg
2046 * @oppfx 0x66
2047 * @opcpuid sse2
2048 * @opgroup og_sse2_pcksclr_datamove
2049 * @opxcpttype 5
2050 * @optest op1=1 op2=2 -> op1=2
2051 * @optest op1=0 op2=-42 -> op1=-42
2052 */
2053FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
2054{
2055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2056 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2057 {
2058 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2059
2060 IEM_MC_BEGIN(0, 2);
2061 IEM_MC_LOCAL(uint64_t, uSrc);
2062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2063
2064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2066 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2067 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2068
2069 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2070 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2071
2072 IEM_MC_ADVANCE_RIP();
2073 IEM_MC_END();
2074 return VINF_SUCCESS;
2075 }
2076
2077 /**
2078 * @opdone
2079 * @opmnemonic ud660f17m3
2080 * @opcode 0x17
2081 * @opcodesub 11 mr/reg
2082 * @oppfx 0x66
2083 * @opunused immediate
2084 * @opcpuid sse
2085 * @optest ->
2086 */
2087 return IEMOP_RAISE_INVALID_OPCODE();
2088}
2089
2090
2091/**
2092 * @opdone
2093 * @opmnemonic udf30f17
2094 * @opcode 0x17
2095 * @oppfx 0xf3
2096 * @opunused intel-modrm
2097 * @opcpuid sse
2098 * @optest ->
2099 * @opdone
2100 */
2101
2102/**
2103 * @opmnemonic udf20f17
2104 * @opcode 0x17
2105 * @oppfx 0xf2
2106 * @opunused intel-modrm
2107 * @opcpuid sse
2108 * @optest ->
2109 * @opdone
2110 */
2111
2112
2113/** Opcode 0x0f 0x18. */
2114FNIEMOP_DEF(iemOp_prefetch_Grp16)
2115{
2116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2117 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2118 {
2119 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2120 {
2121 case 4: /* Aliased to /0 for the time being according to AMD. */
2122 case 5: /* Aliased to /0 for the time being according to AMD. */
2123 case 6: /* Aliased to /0 for the time being according to AMD. */
2124 case 7: /* Aliased to /0 for the time being according to AMD. */
2125 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2126 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2127 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2128 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2130 }
2131
2132 IEM_MC_BEGIN(0, 1);
2133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2136 /* Currently a NOP. */
2137 NOREF(GCPtrEffSrc);
2138 IEM_MC_ADVANCE_RIP();
2139 IEM_MC_END();
2140 return VINF_SUCCESS;
2141 }
2142
2143 return IEMOP_RAISE_INVALID_OPCODE();
2144}
2145
2146
2147/** Opcode 0x0f 0x19..0x1f. */
2148FNIEMOP_DEF(iemOp_nop_Ev)
2149{
2150 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2151 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2152 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2153 {
2154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2155 IEM_MC_BEGIN(0, 0);
2156 IEM_MC_ADVANCE_RIP();
2157 IEM_MC_END();
2158 }
2159 else
2160 {
2161 IEM_MC_BEGIN(0, 1);
2162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2165 /* Currently a NOP. */
2166 NOREF(GCPtrEffSrc);
2167 IEM_MC_ADVANCE_RIP();
2168 IEM_MC_END();
2169 }
2170 return VINF_SUCCESS;
2171}
2172
2173
2174/** Opcode 0x0f 0x20. */
2175FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2176{
2177 /* mod is ignored, as is operand size overrides. */
2178 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2179 IEMOP_HLP_MIN_386();
2180 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2181 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2182 else
2183 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2184
2185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2186 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2187 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2188 {
2189 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2190 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2191 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2192 iCrReg |= 8;
2193 }
2194 switch (iCrReg)
2195 {
2196 case 0: case 2: case 3: case 4: case 8:
2197 break;
2198 default:
2199 return IEMOP_RAISE_INVALID_OPCODE();
2200 }
2201 IEMOP_HLP_DONE_DECODING();
2202
2203 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2204}
2205
2206
2207/** Opcode 0x0f 0x21. */
2208FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2209{
2210 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2211 IEMOP_HLP_MIN_386();
2212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2214 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2215 return IEMOP_RAISE_INVALID_OPCODE();
2216 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2217 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2218 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2219}
2220
2221
2222/** Opcode 0x0f 0x22. */
2223FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2224{
2225 /* mod is ignored, as is operand size overrides. */
2226 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2227 IEMOP_HLP_MIN_386();
2228 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2229 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2230 else
2231 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2232
2233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2234 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2235 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2236 {
2237 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2238 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2239 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2240 iCrReg |= 8;
2241 }
2242 switch (iCrReg)
2243 {
2244 case 0: case 2: case 3: case 4: case 8:
2245 break;
2246 default:
2247 return IEMOP_RAISE_INVALID_OPCODE();
2248 }
2249 IEMOP_HLP_DONE_DECODING();
2250
2251 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2252}
2253
2254
2255/** Opcode 0x0f 0x23. */
2256FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2257{
2258 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2259 IEMOP_HLP_MIN_386();
2260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2262 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2263 return IEMOP_RAISE_INVALID_OPCODE();
2264 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2265 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2266 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2267}
2268
2269
2270/** Opcode 0x0f 0x24. */
2271FNIEMOP_DEF(iemOp_mov_Rd_Td)
2272{
2273 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2274 /** @todo works on 386 and 486. */
2275 /* The RM byte is not considered, see testcase. */
2276 return IEMOP_RAISE_INVALID_OPCODE();
2277}
2278
2279
2280/** Opcode 0x0f 0x26. */
2281FNIEMOP_DEF(iemOp_mov_Td_Rd)
2282{
2283 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2284 /** @todo works on 386 and 486. */
2285 /* The RM byte is not considered, see testcase. */
2286 return IEMOP_RAISE_INVALID_OPCODE();
2287}
2288
2289
2290/**
2291 * @opcode 0x28
2292 * @oppfx none
2293 * @opcpuid sse
2294 * @opgroup og_sse_simdfp_datamove
2295 * @opxcpttype 1
2296 * @optest op1=1 op2=2 -> op1=2
2297 * @optest op1=0 op2=-42 -> op1=-42
2298 */
2299FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2300{
2301 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2303 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2304 {
2305 /*
2306 * Register, register.
2307 */
2308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2309 IEM_MC_BEGIN(0, 0);
2310 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2312 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2313 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2314 IEM_MC_ADVANCE_RIP();
2315 IEM_MC_END();
2316 }
2317 else
2318 {
2319 /*
2320 * Register, memory.
2321 */
2322 IEM_MC_BEGIN(0, 2);
2323 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2325
2326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2328 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2329 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2330
2331 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2332 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2333
2334 IEM_MC_ADVANCE_RIP();
2335 IEM_MC_END();
2336 }
2337 return VINF_SUCCESS;
2338}
2339
2340/**
2341 * @opcode 0x28
2342 * @oppfx 66
2343 * @opcpuid sse2
2344 * @opgroup og_sse2_pcksclr_datamove
2345 * @opxcpttype 1
2346 * @optest op1=1 op2=2 -> op1=2
2347 * @optest op1=0 op2=-42 -> op1=-42
2348 */
2349FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2350{
2351 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2352 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2353 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2354 {
2355 /*
2356 * Register, register.
2357 */
2358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2359 IEM_MC_BEGIN(0, 0);
2360 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2361 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2362 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2363 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2364 IEM_MC_ADVANCE_RIP();
2365 IEM_MC_END();
2366 }
2367 else
2368 {
2369 /*
2370 * Register, memory.
2371 */
2372 IEM_MC_BEGIN(0, 2);
2373 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2375
2376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2378 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2379 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2380
2381 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2382 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2383
2384 IEM_MC_ADVANCE_RIP();
2385 IEM_MC_END();
2386 }
2387 return VINF_SUCCESS;
2388}
2389
2390/* Opcode 0xf3 0x0f 0x28 - invalid */
2391/* Opcode 0xf2 0x0f 0x28 - invalid */
2392
2393/**
2394 * @opcode 0x29
2395 * @oppfx none
2396 * @opcpuid sse
2397 * @opgroup og_sse_simdfp_datamove
2398 * @opxcpttype 1
2399 * @optest op1=1 op2=2 -> op1=2
2400 * @optest op1=0 op2=-42 -> op1=-42
2401 */
2402FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2403{
2404 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2406 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2407 {
2408 /*
2409 * Register, register.
2410 */
2411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2412 IEM_MC_BEGIN(0, 0);
2413 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2414 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2415 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2416 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2417 IEM_MC_ADVANCE_RIP();
2418 IEM_MC_END();
2419 }
2420 else
2421 {
2422 /*
2423 * Memory, register.
2424 */
2425 IEM_MC_BEGIN(0, 2);
2426 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2428
2429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2431 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2432 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2433
2434 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2435 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2436
2437 IEM_MC_ADVANCE_RIP();
2438 IEM_MC_END();
2439 }
2440 return VINF_SUCCESS;
2441}
2442
2443/**
2444 * @opcode 0x29
2445 * @oppfx 66
2446 * @opcpuid sse2
2447 * @opgroup og_sse2_pcksclr_datamove
2448 * @opxcpttype 1
2449 * @optest op1=1 op2=2 -> op1=2
2450 * @optest op1=0 op2=-42 -> op1=-42
2451 */
2452FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2453{
2454 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2456 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2457 {
2458 /*
2459 * Register, register.
2460 */
2461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2462 IEM_MC_BEGIN(0, 0);
2463 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2464 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2465 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2466 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2467 IEM_MC_ADVANCE_RIP();
2468 IEM_MC_END();
2469 }
2470 else
2471 {
2472 /*
2473 * Memory, register.
2474 */
2475 IEM_MC_BEGIN(0, 2);
2476 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2478
2479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2481 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2483
2484 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2485 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2486
2487 IEM_MC_ADVANCE_RIP();
2488 IEM_MC_END();
2489 }
2490 return VINF_SUCCESS;
2491}
2492
2493/* Opcode 0xf3 0x0f 0x29 - invalid */
2494/* Opcode 0xf2 0x0f 0x29 - invalid */
2495
2496
2497/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2498FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2499/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2500FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2501/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2502FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2503/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2504FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2505
2506
2507/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
2508FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2509{
2510 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2511 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2512 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2513 {
2514 /*
2515 * memory, register.
2516 */
2517 IEM_MC_BEGIN(0, 2);
2518 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2520
2521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2523 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2524 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2525
2526 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2527 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2528
2529 IEM_MC_ADVANCE_RIP();
2530 IEM_MC_END();
2531 }
2532 /* The register, register encoding is invalid. */
2533 else
2534 return IEMOP_RAISE_INVALID_OPCODE();
2535 return VINF_SUCCESS;
2536}
2537
2538/** Opcode 0x66 0x0f 0x2b - movntpd Mpd, Vpd */
2539FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2540{
2541 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2542 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2543 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2544 {
2545 /*
2546 * memory, register.
2547 */
2548 IEM_MC_BEGIN(0, 2);
2549 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2551
2552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2554 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2555 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2556
2557 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2558 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2559
2560 IEM_MC_ADVANCE_RIP();
2561 IEM_MC_END();
2562 }
2563 /* The register, register encoding is invalid. */
2564 else
2565 return IEMOP_RAISE_INVALID_OPCODE();
2566 return VINF_SUCCESS;
2567}
2568/* Opcode 0xf3 0x0f 0x2b - invalid */
2569/* Opcode 0xf2 0x0f 0x2b - invalid */
2570
2571
2572/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2573FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2574/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2575FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2576/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2577FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2578/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2579FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2580
2581/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2582FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2583/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2584FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2585/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2586FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2587/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2588FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2589
2590/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2591FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2592/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2593FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2594/* Opcode 0xf3 0x0f 0x2e - invalid */
2595/* Opcode 0xf2 0x0f 0x2e - invalid */
2596
2597/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2598FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2599/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2600FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2601/* Opcode 0xf3 0x0f 0x2f - invalid */
2602/* Opcode 0xf2 0x0f 0x2f - invalid */
2603
2604/** Opcode 0x0f 0x30. */
2605FNIEMOP_DEF(iemOp_wrmsr)
2606{
2607 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2609 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2610}
2611
2612
2613/** Opcode 0x0f 0x31. */
2614FNIEMOP_DEF(iemOp_rdtsc)
2615{
2616 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2618 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2619}
2620
2621
2622/** Opcode 0x0f 0x33. */
2623FNIEMOP_DEF(iemOp_rdmsr)
2624{
2625 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2627 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2628}
2629
2630
2631/** Opcode 0x0f 0x34. */
2632FNIEMOP_DEF(iemOp_rdpmc)
2633{
2634 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2636 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2637}
2638
2639
2640/** Opcode 0x0f 0x34. */
2641FNIEMOP_STUB(iemOp_sysenter);
2642/** Opcode 0x0f 0x35. */
2643FNIEMOP_STUB(iemOp_sysexit);
2644/** Opcode 0x0f 0x37. */
2645FNIEMOP_STUB(iemOp_getsec);
2646
2647
2648/** Opcode 0x0f 0x38. */
2649FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2650{
2651#ifdef IEM_WITH_THREE_0F_38
2652 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2653 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2654#else
2655 IEMOP_BITCH_ABOUT_STUB();
2656 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2657#endif
2658}
2659
2660
2661/** Opcode 0x0f 0x3a. */
2662FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2663{
2664#ifdef IEM_WITH_THREE_0F_3A
2665 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2666 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2667#else
2668 IEMOP_BITCH_ABOUT_STUB();
2669 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2670#endif
2671}
2672
2673
2674/**
2675 * Implements a conditional move.
2676 *
2677 * Wish there was an obvious way to do this where we could share and reduce
2678 * code bloat.
2679 *
2680 * @param a_Cnd The conditional "microcode" operation.
2681 */
2682#define CMOV_X(a_Cnd) \
2683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2684 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2685 { \
2686 switch (pVCpu->iem.s.enmEffOpSize) \
2687 { \
2688 case IEMMODE_16BIT: \
2689 IEM_MC_BEGIN(0, 1); \
2690 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2691 a_Cnd { \
2692 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2693 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2694 } IEM_MC_ENDIF(); \
2695 IEM_MC_ADVANCE_RIP(); \
2696 IEM_MC_END(); \
2697 return VINF_SUCCESS; \
2698 \
2699 case IEMMODE_32BIT: \
2700 IEM_MC_BEGIN(0, 1); \
2701 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2702 a_Cnd { \
2703 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2704 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2705 } IEM_MC_ELSE() { \
2706 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2707 } IEM_MC_ENDIF(); \
2708 IEM_MC_ADVANCE_RIP(); \
2709 IEM_MC_END(); \
2710 return VINF_SUCCESS; \
2711 \
2712 case IEMMODE_64BIT: \
2713 IEM_MC_BEGIN(0, 1); \
2714 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2715 a_Cnd { \
2716 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2717 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2718 } IEM_MC_ENDIF(); \
2719 IEM_MC_ADVANCE_RIP(); \
2720 IEM_MC_END(); \
2721 return VINF_SUCCESS; \
2722 \
2723 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2724 } \
2725 } \
2726 else \
2727 { \
2728 switch (pVCpu->iem.s.enmEffOpSize) \
2729 { \
2730 case IEMMODE_16BIT: \
2731 IEM_MC_BEGIN(0, 2); \
2732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2733 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2735 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2736 a_Cnd { \
2737 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2738 } IEM_MC_ENDIF(); \
2739 IEM_MC_ADVANCE_RIP(); \
2740 IEM_MC_END(); \
2741 return VINF_SUCCESS; \
2742 \
2743 case IEMMODE_32BIT: \
2744 IEM_MC_BEGIN(0, 2); \
2745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2746 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2748 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2749 a_Cnd { \
2750 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2751 } IEM_MC_ELSE() { \
2752 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2753 } IEM_MC_ENDIF(); \
2754 IEM_MC_ADVANCE_RIP(); \
2755 IEM_MC_END(); \
2756 return VINF_SUCCESS; \
2757 \
2758 case IEMMODE_64BIT: \
2759 IEM_MC_BEGIN(0, 2); \
2760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2761 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2763 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2764 a_Cnd { \
2765 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2766 } IEM_MC_ENDIF(); \
2767 IEM_MC_ADVANCE_RIP(); \
2768 IEM_MC_END(); \
2769 return VINF_SUCCESS; \
2770 \
2771 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2772 } \
2773 } do {} while (0)
2774
2775
2776
2777/** Opcode 0x0f 0x40. */
2778FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2779{
2780 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2781 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2782}
2783
2784
2785/** Opcode 0x0f 0x41. */
2786FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2787{
2788 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2789 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2790}
2791
2792
2793/** Opcode 0x0f 0x42. */
2794FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2795{
2796 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2797 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2798}
2799
2800
2801/** Opcode 0x0f 0x43. */
2802FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2803{
2804 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2805 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2806}
2807
2808
2809/** Opcode 0x0f 0x44. */
2810FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2811{
2812 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2813 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2814}
2815
2816
2817/** Opcode 0x0f 0x45. */
2818FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2819{
2820 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2821 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2822}
2823
2824
2825/** Opcode 0x0f 0x46. */
2826FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2827{
2828 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2829 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2830}
2831
2832
2833/** Opcode 0x0f 0x47. */
2834FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2835{
2836 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2837 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2838}
2839
2840
2841/** Opcode 0x0f 0x48. */
2842FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2843{
2844 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2845 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2846}
2847
2848
2849/** Opcode 0x0f 0x49. */
2850FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2851{
2852 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2853 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2854}
2855
2856
2857/** Opcode 0x0f 0x4a. */
2858FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2859{
2860 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2861 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2862}
2863
2864
2865/** Opcode 0x0f 0x4b. */
2866FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2867{
2868 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2869 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2870}
2871
2872
2873/** Opcode 0x0f 0x4c. */
2874FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2875{
2876 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2877 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2878}
2879
2880
2881/** Opcode 0x0f 0x4d. */
2882FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2883{
2884 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2885 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2886}
2887
2888
2889/** Opcode 0x0f 0x4e. */
2890FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2891{
2892 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2893 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2894}
2895
2896
2897/** Opcode 0x0f 0x4f. */
2898FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2899{
2900 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2901 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2902}
2903
2904#undef CMOV_X
2905
2906/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2907FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2908/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2909FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2910/* Opcode 0xf3 0x0f 0x50 - invalid */
2911/* Opcode 0xf2 0x0f 0x50 - invalid */
2912
2913/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2914FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2915/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2916FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2917/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2918FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2919/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2920FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2921
2922/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2923FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2924/* Opcode 0x66 0x0f 0x52 - invalid */
2925/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2926FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2927/* Opcode 0xf2 0x0f 0x52 - invalid */
2928
2929/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2930FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2931/* Opcode 0x66 0x0f 0x53 - invalid */
2932/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2933FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2934/* Opcode 0xf2 0x0f 0x53 - invalid */
2935
2936/** Opcode 0x0f 0x54 - andps Vps, Wps */
2937FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2938/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2939FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2940/* Opcode 0xf3 0x0f 0x54 - invalid */
2941/* Opcode 0xf2 0x0f 0x54 - invalid */
2942
2943/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2944FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2945/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2946FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2947/* Opcode 0xf3 0x0f 0x55 - invalid */
2948/* Opcode 0xf2 0x0f 0x55 - invalid */
2949
2950/** Opcode 0x0f 0x56 - orps Vps, Wps */
2951FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2952/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2953FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2954/* Opcode 0xf3 0x0f 0x56 - invalid */
2955/* Opcode 0xf2 0x0f 0x56 - invalid */
2956
2957/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2958FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2959/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2960FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2961/* Opcode 0xf3 0x0f 0x57 - invalid */
2962/* Opcode 0xf2 0x0f 0x57 - invalid */
2963
2964/** Opcode 0x0f 0x58 - addps Vps, Wps */
2965FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2966/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2967FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2968/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2969FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2970/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2971FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2972
2973/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2974FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2975/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2976FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2977/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2978FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2979/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2980FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2981
2982/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2983FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2984/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2985FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2986/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2987FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2988/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2989FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2990
2991/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2992FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2993/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2994FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2995/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2996FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2997/* Opcode 0xf2 0x0f 0x5b - invalid */
2998
2999/** Opcode 0x0f 0x5c - subps Vps, Wps */
3000FNIEMOP_STUB(iemOp_subps_Vps_Wps);
3001/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
3002FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
3003/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
3004FNIEMOP_STUB(iemOp_subss_Vss_Wss);
3005/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
3006FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
3007
3008/** Opcode 0x0f 0x5d - minps Vps, Wps */
3009FNIEMOP_STUB(iemOp_minps_Vps_Wps);
3010/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
3011FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
3012/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
3013FNIEMOP_STUB(iemOp_minss_Vss_Wss);
3014/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
3015FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
3016
3017/** Opcode 0x0f 0x5e - divps Vps, Wps */
3018FNIEMOP_STUB(iemOp_divps_Vps_Wps);
3019/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
3020FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
3021/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
3022FNIEMOP_STUB(iemOp_divss_Vss_Wss);
3023/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
3024FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
3025
3026/** Opcode 0x0f 0x5f - maxps Vps, Wps */
3027FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
3028/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
3029FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
3030/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
3031FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
3032/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
3033FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
3034
3035/**
3036 * Common worker for MMX instructions on the forms:
3037 * pxxxx mm1, mm2/mem32
3038 *
3039 * The 2nd operand is the first half of a register, which in the memory case
3040 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3041 * memory accessed for MMX.
3042 *
3043 * Exceptions type 4.
3044 */
3045FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3046{
3047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3048 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3049 {
3050 /*
3051 * Register, register.
3052 */
3053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3054 IEM_MC_BEGIN(2, 0);
3055 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3056 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3057 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3058 IEM_MC_PREPARE_SSE_USAGE();
3059 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3060 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3061 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3062 IEM_MC_ADVANCE_RIP();
3063 IEM_MC_END();
3064 }
3065 else
3066 {
3067 /*
3068 * Register, memory.
3069 */
3070 IEM_MC_BEGIN(2, 2);
3071 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3072 IEM_MC_LOCAL(uint64_t, uSrc);
3073 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3075
3076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3078 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3079 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3080
3081 IEM_MC_PREPARE_SSE_USAGE();
3082 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3083 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3084
3085 IEM_MC_ADVANCE_RIP();
3086 IEM_MC_END();
3087 }
3088 return VINF_SUCCESS;
3089}
3090
3091
3092/**
3093 * Common worker for SSE2 instructions on the forms:
3094 * pxxxx xmm1, xmm2/mem128
3095 *
3096 * The 2nd operand is the first half of a register, which in the memory case
3097 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3098 * memory accessed for MMX.
3099 *
3100 * Exceptions type 4.
3101 */
3102FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3103{
3104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3105 if (!pImpl->pfnU64)
3106 return IEMOP_RAISE_INVALID_OPCODE();
3107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3108 {
3109 /*
3110 * Register, register.
3111 */
3112 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3113 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3115 IEM_MC_BEGIN(2, 0);
3116 IEM_MC_ARG(uint64_t *, pDst, 0);
3117 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3118 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3119 IEM_MC_PREPARE_FPU_USAGE();
3120 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3121 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3122 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3123 IEM_MC_ADVANCE_RIP();
3124 IEM_MC_END();
3125 }
3126 else
3127 {
3128 /*
3129 * Register, memory.
3130 */
3131 IEM_MC_BEGIN(2, 2);
3132 IEM_MC_ARG(uint64_t *, pDst, 0);
3133 IEM_MC_LOCAL(uint32_t, uSrc);
3134 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3136
3137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3139 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3140 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3141
3142 IEM_MC_PREPARE_FPU_USAGE();
3143 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3144 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3145
3146 IEM_MC_ADVANCE_RIP();
3147 IEM_MC_END();
3148 }
3149 return VINF_SUCCESS;
3150}
3151
3152
3153/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3154FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3155{
3156 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3157 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3158}
3159
3160/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3161FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3162{
3163 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3164 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3165}
3166
3167/* Opcode 0xf3 0x0f 0x60 - invalid */
3168
3169
3170/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3171FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3172{
3173 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3174 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3175}
3176
3177/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3178FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3179{
3180 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3181 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3182}
3183
3184/* Opcode 0xf3 0x0f 0x61 - invalid */
3185
3186
3187/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3188FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3189{
3190 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3191 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3192}
3193
3194/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3195FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3196{
3197 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3198 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3199}
3200
3201/* Opcode 0xf3 0x0f 0x62 - invalid */
3202
3203
3204
3205/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3206FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3207/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3208FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3209/* Opcode 0xf3 0x0f 0x63 - invalid */
3210
3211/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3212FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3213/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3214FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3215/* Opcode 0xf3 0x0f 0x64 - invalid */
3216
3217/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3218FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3219/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3220FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3221/* Opcode 0xf3 0x0f 0x65 - invalid */
3222
3223/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3224FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3225/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3226FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3227/* Opcode 0xf3 0x0f 0x66 - invalid */
3228
3229/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3230FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3231/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3232FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3233/* Opcode 0xf3 0x0f 0x67 - invalid */
3234
3235
3236/**
3237 * Common worker for MMX instructions on the form:
3238 * pxxxx mm1, mm2/mem64
3239 *
3240 * The 2nd operand is the second half of a register, which in the memory case
3241 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3242 * where it may read the full 128 bits or only the upper 64 bits.
3243 *
3244 * Exceptions type 4.
3245 */
3246FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3247{
3248 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3249 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3250 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3251 {
3252 /*
3253 * Register, register.
3254 */
3255 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3256 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3258 IEM_MC_BEGIN(2, 0);
3259 IEM_MC_ARG(uint64_t *, pDst, 0);
3260 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3261 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3262 IEM_MC_PREPARE_FPU_USAGE();
3263 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3264 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3265 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3266 IEM_MC_ADVANCE_RIP();
3267 IEM_MC_END();
3268 }
3269 else
3270 {
3271 /*
3272 * Register, memory.
3273 */
3274 IEM_MC_BEGIN(2, 2);
3275 IEM_MC_ARG(uint64_t *, pDst, 0);
3276 IEM_MC_LOCAL(uint64_t, uSrc);
3277 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3279
3280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3282 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3283 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3284
3285 IEM_MC_PREPARE_FPU_USAGE();
3286 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3287 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3288
3289 IEM_MC_ADVANCE_RIP();
3290 IEM_MC_END();
3291 }
3292 return VINF_SUCCESS;
3293}
3294
3295
3296/**
3297 * Common worker for SSE2 instructions on the form:
3298 * pxxxx xmm1, xmm2/mem128
3299 *
3300 * The 2nd operand is the second half of a register, which in the memory case
3301 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3302 * where it may read the full 128 bits or only the upper 64 bits.
3303 *
3304 * Exceptions type 4.
3305 */
3306FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3307{
3308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3309 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3310 {
3311 /*
3312 * Register, register.
3313 */
3314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3315 IEM_MC_BEGIN(2, 0);
3316 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3317 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3318 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3319 IEM_MC_PREPARE_SSE_USAGE();
3320 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3321 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3322 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3323 IEM_MC_ADVANCE_RIP();
3324 IEM_MC_END();
3325 }
3326 else
3327 {
3328 /*
3329 * Register, memory.
3330 */
3331 IEM_MC_BEGIN(2, 2);
3332 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3333 IEM_MC_LOCAL(RTUINT128U, uSrc);
3334 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3336
3337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3339 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3340 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3341
3342 IEM_MC_PREPARE_SSE_USAGE();
3343 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3344 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3345
3346 IEM_MC_ADVANCE_RIP();
3347 IEM_MC_END();
3348 }
3349 return VINF_SUCCESS;
3350}
3351
3352
3353/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3354FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3355{
3356 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3357 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3358}
3359
3360/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3361FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3362{
3363 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3364 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3365}
3366/* Opcode 0xf3 0x0f 0x68 - invalid */
3367
3368
3369/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3370FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3371{
3372 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3373 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3374}
3375
3376/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3377FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3378{
3379 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3380 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3381
3382}
3383/* Opcode 0xf3 0x0f 0x69 - invalid */
3384
3385
3386/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3387FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3388{
3389 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3390 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3391}
3392
3393/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3394FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3395{
3396 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3397 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3398}
3399/* Opcode 0xf3 0x0f 0x6a - invalid */
3400
3401
3402/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3403FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3404/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3405FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3406/* Opcode 0xf3 0x0f 0x6b - invalid */
3407
3408
3409/* Opcode 0x0f 0x6c - invalid */
3410
3411/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3412FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3413{
3414 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3415 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3416}
3417
3418/* Opcode 0xf3 0x0f 0x6c - invalid */
3419/* Opcode 0xf2 0x0f 0x6c - invalid */
3420
3421
3422/* Opcode 0x0f 0x6d - invalid */
3423
3424/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3425FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3426{
3427 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3428 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3429}
3430
3431/* Opcode 0xf3 0x0f 0x6d - invalid */
3432
3433
3434/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
3435FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3436{
3437 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3438 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3439 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
3440 else
3441 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
3442 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3443 {
3444 /* MMX, greg */
3445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3446 IEM_MC_BEGIN(0, 1);
3447 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3448 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3449 IEM_MC_LOCAL(uint64_t, u64Tmp);
3450 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3451 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3452 else
3453 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3454 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3455 IEM_MC_ADVANCE_RIP();
3456 IEM_MC_END();
3457 }
3458 else
3459 {
3460 /* MMX, [mem] */
3461 IEM_MC_BEGIN(0, 2);
3462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3463 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3466 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3467 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3468 {
3469 IEM_MC_LOCAL(uint64_t, u64Tmp);
3470 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3471 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3472 }
3473 else
3474 {
3475 IEM_MC_LOCAL(uint32_t, u32Tmp);
3476 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3477 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3478 }
3479 IEM_MC_ADVANCE_RIP();
3480 IEM_MC_END();
3481 }
3482 return VINF_SUCCESS;
3483}
3484
3485/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
3486FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3487{
3488 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3489 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3490 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
3491 else
3492 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
3493 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3494 {
3495 /* XMM, greg*/
3496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3497 IEM_MC_BEGIN(0, 1);
3498 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3499 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3500 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3501 {
3502 IEM_MC_LOCAL(uint64_t, u64Tmp);
3503 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3504 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3505 }
3506 else
3507 {
3508 IEM_MC_LOCAL(uint32_t, u32Tmp);
3509 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3510 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3511 }
3512 IEM_MC_ADVANCE_RIP();
3513 IEM_MC_END();
3514 }
3515 else
3516 {
3517 /* XMM, [mem] */
3518 IEM_MC_BEGIN(0, 2);
3519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3520 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3523 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3524 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3525 {
3526 IEM_MC_LOCAL(uint64_t, u64Tmp);
3527 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3528 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3529 }
3530 else
3531 {
3532 IEM_MC_LOCAL(uint32_t, u32Tmp);
3533 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3534 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3535 }
3536 IEM_MC_ADVANCE_RIP();
3537 IEM_MC_END();
3538 }
3539 return VINF_SUCCESS;
3540}
3541
3542/* Opcode 0xf3 0x0f 0x6e - invalid */
3543
3544
3545/** Opcode 0x0f 0x6f - movq Pq, Qq */
3546FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3547{
3548 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3549 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3550 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3551 {
3552 /*
3553 * Register, register.
3554 */
3555 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3556 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3558 IEM_MC_BEGIN(0, 1);
3559 IEM_MC_LOCAL(uint64_t, u64Tmp);
3560 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3561 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3562 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3563 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3564 IEM_MC_ADVANCE_RIP();
3565 IEM_MC_END();
3566 }
3567 else
3568 {
3569 /*
3570 * Register, memory.
3571 */
3572 IEM_MC_BEGIN(0, 2);
3573 IEM_MC_LOCAL(uint64_t, u64Tmp);
3574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3575
3576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3578 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3579 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3580 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3581 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3582
3583 IEM_MC_ADVANCE_RIP();
3584 IEM_MC_END();
3585 }
3586 return VINF_SUCCESS;
3587}
3588
3589/** Opcode 0x66 0x0f 0x6f - movdqa Vx, Wx */
3590FNIEMOP_DEF(iemOp_movdqa_Vx_Wx)
3591{
3592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3593 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3594 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3595 {
3596 /*
3597 * Register, register.
3598 */
3599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3600 IEM_MC_BEGIN(0, 0);
3601 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3602 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3603 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3604 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3605 IEM_MC_ADVANCE_RIP();
3606 IEM_MC_END();
3607 }
3608 else
3609 {
3610 /*
3611 * Register, memory.
3612 */
3613 IEM_MC_BEGIN(0, 2);
3614 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3616
3617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3619 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3620 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3621 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3622 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3623
3624 IEM_MC_ADVANCE_RIP();
3625 IEM_MC_END();
3626 }
3627 return VINF_SUCCESS;
3628}
3629
3630/** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */
3631FNIEMOP_DEF(iemOp_movdqu_Vx_Wx)
3632{
3633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3634 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3635 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3636 {
3637 /*
3638 * Register, register.
3639 */
3640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3641 IEM_MC_BEGIN(0, 0);
3642 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3643 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3644 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3645 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3646 IEM_MC_ADVANCE_RIP();
3647 IEM_MC_END();
3648 }
3649 else
3650 {
3651 /*
3652 * Register, memory.
3653 */
3654 IEM_MC_BEGIN(0, 2);
3655 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3657
3658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3660 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3661 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3662 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3663 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3664
3665 IEM_MC_ADVANCE_RIP();
3666 IEM_MC_END();
3667 }
3668 return VINF_SUCCESS;
3669}
3670
3671
3672/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3673FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3674{
3675 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3676 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3677 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3678 {
3679 /*
3680 * Register, register.
3681 */
3682 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3684
3685 IEM_MC_BEGIN(3, 0);
3686 IEM_MC_ARG(uint64_t *, pDst, 0);
3687 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3688 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3689 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3690 IEM_MC_PREPARE_FPU_USAGE();
3691 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3692 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3693 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3694 IEM_MC_ADVANCE_RIP();
3695 IEM_MC_END();
3696 }
3697 else
3698 {
3699 /*
3700 * Register, memory.
3701 */
3702 IEM_MC_BEGIN(3, 2);
3703 IEM_MC_ARG(uint64_t *, pDst, 0);
3704 IEM_MC_LOCAL(uint64_t, uSrc);
3705 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3707
3708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3709 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3710 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3712 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3713
3714 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3715 IEM_MC_PREPARE_FPU_USAGE();
3716 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3717 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3718
3719 IEM_MC_ADVANCE_RIP();
3720 IEM_MC_END();
3721 }
3722 return VINF_SUCCESS;
3723}
3724
3725/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3726FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3727{
3728 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3729 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3730 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3731 {
3732 /*
3733 * Register, register.
3734 */
3735 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3737
3738 IEM_MC_BEGIN(3, 0);
3739 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3740 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3741 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3742 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3743 IEM_MC_PREPARE_SSE_USAGE();
3744 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3745 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3746 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3747 IEM_MC_ADVANCE_RIP();
3748 IEM_MC_END();
3749 }
3750 else
3751 {
3752 /*
3753 * Register, memory.
3754 */
3755 IEM_MC_BEGIN(3, 2);
3756 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3757 IEM_MC_LOCAL(RTUINT128U, uSrc);
3758 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3760
3761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3762 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3763 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3765 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3766
3767 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3768 IEM_MC_PREPARE_SSE_USAGE();
3769 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3770 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3771
3772 IEM_MC_ADVANCE_RIP();
3773 IEM_MC_END();
3774 }
3775 return VINF_SUCCESS;
3776}
3777
3778/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3779FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3780{
3781 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3782 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3783 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3784 {
3785 /*
3786 * Register, register.
3787 */
3788 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3790
3791 IEM_MC_BEGIN(3, 0);
3792 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3793 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3794 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3795 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3796 IEM_MC_PREPARE_SSE_USAGE();
3797 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3798 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3799 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3800 IEM_MC_ADVANCE_RIP();
3801 IEM_MC_END();
3802 }
3803 else
3804 {
3805 /*
3806 * Register, memory.
3807 */
3808 IEM_MC_BEGIN(3, 2);
3809 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3810 IEM_MC_LOCAL(RTUINT128U, uSrc);
3811 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3813
3814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3815 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3816 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3818 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3819
3820 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3821 IEM_MC_PREPARE_SSE_USAGE();
3822 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3823 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3824
3825 IEM_MC_ADVANCE_RIP();
3826 IEM_MC_END();
3827 }
3828 return VINF_SUCCESS;
3829}
3830
3831/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3832FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3833{
3834 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3835 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3836 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3837 {
3838 /*
3839 * Register, register.
3840 */
3841 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3843
3844 IEM_MC_BEGIN(3, 0);
3845 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3846 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3847 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3848 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3849 IEM_MC_PREPARE_SSE_USAGE();
3850 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3851 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3852 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3853 IEM_MC_ADVANCE_RIP();
3854 IEM_MC_END();
3855 }
3856 else
3857 {
3858 /*
3859 * Register, memory.
3860 */
3861 IEM_MC_BEGIN(3, 2);
3862 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3863 IEM_MC_LOCAL(RTUINT128U, uSrc);
3864 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3866
3867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3868 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3869 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3871 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3872
3873 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3874 IEM_MC_PREPARE_SSE_USAGE();
3875 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3876 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3877
3878 IEM_MC_ADVANCE_RIP();
3879 IEM_MC_END();
3880 }
3881 return VINF_SUCCESS;
3882}
3883
3884
3885/** Opcode 0x0f 0x71 11/2. */
3886FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3887
3888/** Opcode 0x66 0x0f 0x71 11/2. */
3889FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3890
3891/** Opcode 0x0f 0x71 11/4. */
3892FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3893
3894/** Opcode 0x66 0x0f 0x71 11/4. */
3895FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3896
3897/** Opcode 0x0f 0x71 11/6. */
3898FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3899
3900/** Opcode 0x66 0x0f 0x71 11/6. */
3901FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3902
3903
3904/**
3905 * Group 12 jump table for register variant.
3906 */
3907IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3908{
3909 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3910 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3911 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3912 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3913 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3914 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3915 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3916 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3917};
3918AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3919
3920
3921/** Opcode 0x0f 0x71. */
3922FNIEMOP_DEF(iemOp_Grp12)
3923{
3924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3925 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3926 /* register, register */
3927 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3928 + pVCpu->iem.s.idxPrefix], bRm);
3929 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3930}
3931
3932
3933/** Opcode 0x0f 0x72 11/2. */
3934FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3935
3936/** Opcode 0x66 0x0f 0x72 11/2. */
3937FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
3938
3939/** Opcode 0x0f 0x72 11/4. */
3940FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3941
3942/** Opcode 0x66 0x0f 0x72 11/4. */
3943FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
3944
3945/** Opcode 0x0f 0x72 11/6. */
3946FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3947
3948/** Opcode 0x66 0x0f 0x72 11/6. */
3949FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
3950
3951
3952/**
3953 * Group 13 jump table for register variant.
3954 */
3955IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3956{
3957 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3958 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3959 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3960 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3961 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3962 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3963 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3964 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3965};
3966AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3967
3968/** Opcode 0x0f 0x72. */
3969FNIEMOP_DEF(iemOp_Grp13)
3970{
3971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3972 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3973 /* register, register */
3974 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3975 + pVCpu->iem.s.idxPrefix], bRm);
3976 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3977}
3978
3979
3980/** Opcode 0x0f 0x73 11/2. */
3981FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3982
3983/** Opcode 0x66 0x0f 0x73 11/2. */
3984FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
3985
3986/** Opcode 0x66 0x0f 0x73 11/3. */
3987FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
3988
3989/** Opcode 0x0f 0x73 11/6. */
3990FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3991
3992/** Opcode 0x66 0x0f 0x73 11/6. */
3993FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
3994
3995/** Opcode 0x66 0x0f 0x73 11/7. */
3996FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
3997
3998/**
3999 * Group 14 jump table for register variant.
4000 */
4001IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4002{
4003 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4004 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4005 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4006 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4007 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4008 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4009 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4010 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4011};
4012AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4013
4014
4015/** Opcode 0x0f 0x73. */
4016FNIEMOP_DEF(iemOp_Grp14)
4017{
4018 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4019 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4020 /* register, register */
4021 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4022 + pVCpu->iem.s.idxPrefix], bRm);
4023 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4024}
4025
4026
4027/**
4028 * Common worker for MMX instructions on the form:
4029 * pxxx mm1, mm2/mem64
4030 */
4031FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4032{
4033 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4034 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4035 {
4036 /*
4037 * Register, register.
4038 */
4039 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4040 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4042 IEM_MC_BEGIN(2, 0);
4043 IEM_MC_ARG(uint64_t *, pDst, 0);
4044 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4045 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4046 IEM_MC_PREPARE_FPU_USAGE();
4047 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4048 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4049 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4050 IEM_MC_ADVANCE_RIP();
4051 IEM_MC_END();
4052 }
4053 else
4054 {
4055 /*
4056 * Register, memory.
4057 */
4058 IEM_MC_BEGIN(2, 2);
4059 IEM_MC_ARG(uint64_t *, pDst, 0);
4060 IEM_MC_LOCAL(uint64_t, uSrc);
4061 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4063
4064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4066 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4067 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4068
4069 IEM_MC_PREPARE_FPU_USAGE();
4070 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4071 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4072
4073 IEM_MC_ADVANCE_RIP();
4074 IEM_MC_END();
4075 }
4076 return VINF_SUCCESS;
4077}
4078
4079
4080/**
4081 * Common worker for SSE2 instructions on the forms:
4082 * pxxx xmm1, xmm2/mem128
4083 *
4084 * Proper alignment of the 128-bit operand is enforced.
4085 * Exceptions type 4. SSE2 cpuid checks.
4086 */
4087FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4088{
4089 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4090 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4091 {
4092 /*
4093 * Register, register.
4094 */
4095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4096 IEM_MC_BEGIN(2, 0);
4097 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4098 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4099 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4100 IEM_MC_PREPARE_SSE_USAGE();
4101 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4102 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4103 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4104 IEM_MC_ADVANCE_RIP();
4105 IEM_MC_END();
4106 }
4107 else
4108 {
4109 /*
4110 * Register, memory.
4111 */
4112 IEM_MC_BEGIN(2, 2);
4113 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4114 IEM_MC_LOCAL(RTUINT128U, uSrc);
4115 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4117
4118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4120 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4121 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4122
4123 IEM_MC_PREPARE_SSE_USAGE();
4124 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4125 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4126
4127 IEM_MC_ADVANCE_RIP();
4128 IEM_MC_END();
4129 }
4130 return VINF_SUCCESS;
4131}
4132
4133
4134/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4135FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4136{
4137 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4138 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4139}
4140
4141/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4142FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4143{
4144 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4145 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4146}
4147
4148/* Opcode 0xf3 0x0f 0x74 - invalid */
4149/* Opcode 0xf2 0x0f 0x74 - invalid */
4150
4151
4152/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4153FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4154{
4155 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4156 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4157}
4158
4159/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4160FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4161{
4162 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4163 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4164}
4165
4166/* Opcode 0xf3 0x0f 0x75 - invalid */
4167/* Opcode 0xf2 0x0f 0x75 - invalid */
4168
4169
4170/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4171FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4172{
4173 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4174 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4175}
4176
4177/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4178FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4179{
4180 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4181 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4182}
4183
4184/* Opcode 0xf3 0x0f 0x76 - invalid */
4185/* Opcode 0xf2 0x0f 0x76 - invalid */
4186
4187
4188/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4189FNIEMOP_STUB(iemOp_emms);
4190/* Opcode 0x66 0x0f 0x77 - invalid */
4191/* Opcode 0xf3 0x0f 0x77 - invalid */
4192/* Opcode 0xf2 0x0f 0x77 - invalid */
4193
4194/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4195FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4196/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4197FNIEMOP_STUB(iemOp_AmdGrp17);
4198/* Opcode 0xf3 0x0f 0x78 - invalid */
4199/* Opcode 0xf2 0x0f 0x78 - invalid */
4200
4201/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4202FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4203/* Opcode 0x66 0x0f 0x79 - invalid */
4204/* Opcode 0xf3 0x0f 0x79 - invalid */
4205/* Opcode 0xf2 0x0f 0x79 - invalid */
4206
4207/* Opcode 0x0f 0x7a - invalid */
4208/* Opcode 0x66 0x0f 0x7a - invalid */
4209/* Opcode 0xf3 0x0f 0x7a - invalid */
4210/* Opcode 0xf2 0x0f 0x7a - invalid */
4211
4212/* Opcode 0x0f 0x7b - invalid */
4213/* Opcode 0x66 0x0f 0x7b - invalid */
4214/* Opcode 0xf3 0x0f 0x7b - invalid */
4215/* Opcode 0xf2 0x0f 0x7b - invalid */
4216
4217/* Opcode 0x0f 0x7c - invalid */
4218/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4219FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4220/* Opcode 0xf3 0x0f 0x7c - invalid */
4221/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4222FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4223
4224/* Opcode 0x0f 0x7d - invalid */
4225/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4226FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4227/* Opcode 0xf3 0x0f 0x7d - invalid */
4228/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4229FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4230
4231
4232/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4233FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4234{
4235 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4236 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4237 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
4238 else
4239 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
4240 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4241 {
4242 /* greg, MMX */
4243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4244 IEM_MC_BEGIN(0, 1);
4245 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4246 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4247 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4248 {
4249 IEM_MC_LOCAL(uint64_t, u64Tmp);
4250 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4251 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4252 }
4253 else
4254 {
4255 IEM_MC_LOCAL(uint32_t, u32Tmp);
4256 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4257 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4258 }
4259 IEM_MC_ADVANCE_RIP();
4260 IEM_MC_END();
4261 }
4262 else
4263 {
4264 /* [mem], MMX */
4265 IEM_MC_BEGIN(0, 2);
4266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4267 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4268 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4270 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4271 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4272 {
4273 IEM_MC_LOCAL(uint64_t, u64Tmp);
4274 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4275 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4276 }
4277 else
4278 {
4279 IEM_MC_LOCAL(uint32_t, u32Tmp);
4280 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4281 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4282 }
4283 IEM_MC_ADVANCE_RIP();
4284 IEM_MC_END();
4285 }
4286 return VINF_SUCCESS;
4287}
4288
4289/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
4290FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4291{
4292 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4293 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4294 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
4295 else
4296 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
4297 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4298 {
4299 /* greg, XMM */
4300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4301 IEM_MC_BEGIN(0, 1);
4302 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4303 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4304 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4305 {
4306 IEM_MC_LOCAL(uint64_t, u64Tmp);
4307 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4308 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4309 }
4310 else
4311 {
4312 IEM_MC_LOCAL(uint32_t, u32Tmp);
4313 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4314 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4315 }
4316 IEM_MC_ADVANCE_RIP();
4317 IEM_MC_END();
4318 }
4319 else
4320 {
4321 /* [mem], XMM */
4322 IEM_MC_BEGIN(0, 2);
4323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4324 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4327 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4328 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4329 {
4330 IEM_MC_LOCAL(uint64_t, u64Tmp);
4331 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4332 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4333 }
4334 else
4335 {
4336 IEM_MC_LOCAL(uint32_t, u32Tmp);
4337 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4338 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4339 }
4340 IEM_MC_ADVANCE_RIP();
4341 IEM_MC_END();
4342 }
4343 return VINF_SUCCESS;
4344}
4345
4346
4347/**
4348 * @opcode 0x7e
4349 * @opcodesub !11 mr/reg
4350 * @oppfx 0xf3
4351 * @opcpuid sse2
4352 * @opgroup og_sse2_pcksclr_datamove
4353 * @opxcpttype 5
4354 * @optest op1=1 op2=2 -> op1=2
4355 * @optest op1=0 op2=-42 -> op1=-42
4356 */
4357FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4358{
4359 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
4360 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4361 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4362 {
4363 /*
4364 * Register, register.
4365 */
4366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4367 IEM_MC_BEGIN(0, 2);
4368 IEM_MC_LOCAL(uint64_t, uSrc);
4369
4370 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4371 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4372
4373 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4374 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4375
4376 IEM_MC_ADVANCE_RIP();
4377 IEM_MC_END();
4378 }
4379 else
4380 {
4381 /*
4382 * Memory, register.
4383 */
4384 IEM_MC_BEGIN(0, 2);
4385 IEM_MC_LOCAL(uint64_t, uSrc);
4386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4387
4388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4390 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4391 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4392
4393 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4394 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4395
4396 IEM_MC_ADVANCE_RIP();
4397 IEM_MC_END();
4398 }
4399 return VINF_SUCCESS;
4400}
4401
4402/* Opcode 0xf2 0x0f 0x7e - invalid */
4403
4404
4405/** Opcode 0x0f 0x7f - movq Qq, Pq */
4406FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4407{
4408 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4409 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4410 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4411 {
4412 /*
4413 * Register, register.
4414 */
4415 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4416 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4418 IEM_MC_BEGIN(0, 1);
4419 IEM_MC_LOCAL(uint64_t, u64Tmp);
4420 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4421 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4422 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4423 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4424 IEM_MC_ADVANCE_RIP();
4425 IEM_MC_END();
4426 }
4427 else
4428 {
4429 /*
4430 * Register, memory.
4431 */
4432 IEM_MC_BEGIN(0, 2);
4433 IEM_MC_LOCAL(uint64_t, u64Tmp);
4434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4435
4436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4438 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4439 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4440
4441 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4442 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4443
4444 IEM_MC_ADVANCE_RIP();
4445 IEM_MC_END();
4446 }
4447 return VINF_SUCCESS;
4448}
4449
4450/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4451FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4452{
4453 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4454 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4455 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4456 {
4457 /*
4458 * Register, register.
4459 */
4460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4461 IEM_MC_BEGIN(0, 0);
4462 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4463 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4464 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4465 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4466 IEM_MC_ADVANCE_RIP();
4467 IEM_MC_END();
4468 }
4469 else
4470 {
4471 /*
4472 * Register, memory.
4473 */
4474 IEM_MC_BEGIN(0, 2);
4475 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4477
4478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4480 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4481 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4482
4483 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4484 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4485
4486 IEM_MC_ADVANCE_RIP();
4487 IEM_MC_END();
4488 }
4489 return VINF_SUCCESS;
4490}
4491
4492/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4493FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4494{
4495 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4496 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4497 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4498 {
4499 /*
4500 * Register, register.
4501 */
4502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4503 IEM_MC_BEGIN(0, 0);
4504 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4505 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4506 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4507 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4508 IEM_MC_ADVANCE_RIP();
4509 IEM_MC_END();
4510 }
4511 else
4512 {
4513 /*
4514 * Register, memory.
4515 */
4516 IEM_MC_BEGIN(0, 2);
4517 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4519
4520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4522 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4523 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4524
4525 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4526 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4527
4528 IEM_MC_ADVANCE_RIP();
4529 IEM_MC_END();
4530 }
4531 return VINF_SUCCESS;
4532}
4533
4534/* Opcode 0xf2 0x0f 0x7f - invalid */
4535
4536
4537
4538/** Opcode 0x0f 0x80. */
4539FNIEMOP_DEF(iemOp_jo_Jv)
4540{
4541 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4542 IEMOP_HLP_MIN_386();
4543 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4544 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4545 {
4546 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4548
4549 IEM_MC_BEGIN(0, 0);
4550 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4551 IEM_MC_REL_JMP_S16(i16Imm);
4552 } IEM_MC_ELSE() {
4553 IEM_MC_ADVANCE_RIP();
4554 } IEM_MC_ENDIF();
4555 IEM_MC_END();
4556 }
4557 else
4558 {
4559 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4561
4562 IEM_MC_BEGIN(0, 0);
4563 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4564 IEM_MC_REL_JMP_S32(i32Imm);
4565 } IEM_MC_ELSE() {
4566 IEM_MC_ADVANCE_RIP();
4567 } IEM_MC_ENDIF();
4568 IEM_MC_END();
4569 }
4570 return VINF_SUCCESS;
4571}
4572
4573
4574/** Opcode 0x0f 0x81. */
4575FNIEMOP_DEF(iemOp_jno_Jv)
4576{
4577 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4578 IEMOP_HLP_MIN_386();
4579 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4580 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4581 {
4582 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4584
4585 IEM_MC_BEGIN(0, 0);
4586 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4587 IEM_MC_ADVANCE_RIP();
4588 } IEM_MC_ELSE() {
4589 IEM_MC_REL_JMP_S16(i16Imm);
4590 } IEM_MC_ENDIF();
4591 IEM_MC_END();
4592 }
4593 else
4594 {
4595 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4597
4598 IEM_MC_BEGIN(0, 0);
4599 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4600 IEM_MC_ADVANCE_RIP();
4601 } IEM_MC_ELSE() {
4602 IEM_MC_REL_JMP_S32(i32Imm);
4603 } IEM_MC_ENDIF();
4604 IEM_MC_END();
4605 }
4606 return VINF_SUCCESS;
4607}
4608
4609
4610/** Opcode 0x0f 0x82. */
4611FNIEMOP_DEF(iemOp_jc_Jv)
4612{
4613 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4614 IEMOP_HLP_MIN_386();
4615 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4616 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4617 {
4618 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4620
4621 IEM_MC_BEGIN(0, 0);
4622 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4623 IEM_MC_REL_JMP_S16(i16Imm);
4624 } IEM_MC_ELSE() {
4625 IEM_MC_ADVANCE_RIP();
4626 } IEM_MC_ENDIF();
4627 IEM_MC_END();
4628 }
4629 else
4630 {
4631 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4633
4634 IEM_MC_BEGIN(0, 0);
4635 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4636 IEM_MC_REL_JMP_S32(i32Imm);
4637 } IEM_MC_ELSE() {
4638 IEM_MC_ADVANCE_RIP();
4639 } IEM_MC_ENDIF();
4640 IEM_MC_END();
4641 }
4642 return VINF_SUCCESS;
4643}
4644
4645
4646/** Opcode 0x0f 0x83. */
4647FNIEMOP_DEF(iemOp_jnc_Jv)
4648{
4649 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4650 IEMOP_HLP_MIN_386();
4651 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4652 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4653 {
4654 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4656
4657 IEM_MC_BEGIN(0, 0);
4658 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4659 IEM_MC_ADVANCE_RIP();
4660 } IEM_MC_ELSE() {
4661 IEM_MC_REL_JMP_S16(i16Imm);
4662 } IEM_MC_ENDIF();
4663 IEM_MC_END();
4664 }
4665 else
4666 {
4667 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4669
4670 IEM_MC_BEGIN(0, 0);
4671 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4672 IEM_MC_ADVANCE_RIP();
4673 } IEM_MC_ELSE() {
4674 IEM_MC_REL_JMP_S32(i32Imm);
4675 } IEM_MC_ENDIF();
4676 IEM_MC_END();
4677 }
4678 return VINF_SUCCESS;
4679}
4680
4681
4682/** Opcode 0x0f 0x84. */
4683FNIEMOP_DEF(iemOp_je_Jv)
4684{
4685 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4686 IEMOP_HLP_MIN_386();
4687 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4688 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4689 {
4690 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4692
4693 IEM_MC_BEGIN(0, 0);
4694 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4695 IEM_MC_REL_JMP_S16(i16Imm);
4696 } IEM_MC_ELSE() {
4697 IEM_MC_ADVANCE_RIP();
4698 } IEM_MC_ENDIF();
4699 IEM_MC_END();
4700 }
4701 else
4702 {
4703 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4705
4706 IEM_MC_BEGIN(0, 0);
4707 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4708 IEM_MC_REL_JMP_S32(i32Imm);
4709 } IEM_MC_ELSE() {
4710 IEM_MC_ADVANCE_RIP();
4711 } IEM_MC_ENDIF();
4712 IEM_MC_END();
4713 }
4714 return VINF_SUCCESS;
4715}
4716
4717
4718/** Opcode 0x0f 0x85. */
4719FNIEMOP_DEF(iemOp_jne_Jv)
4720{
4721 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4722 IEMOP_HLP_MIN_386();
4723 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4724 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4725 {
4726 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4728
4729 IEM_MC_BEGIN(0, 0);
4730 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4731 IEM_MC_ADVANCE_RIP();
4732 } IEM_MC_ELSE() {
4733 IEM_MC_REL_JMP_S16(i16Imm);
4734 } IEM_MC_ENDIF();
4735 IEM_MC_END();
4736 }
4737 else
4738 {
4739 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4741
4742 IEM_MC_BEGIN(0, 0);
4743 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4744 IEM_MC_ADVANCE_RIP();
4745 } IEM_MC_ELSE() {
4746 IEM_MC_REL_JMP_S32(i32Imm);
4747 } IEM_MC_ENDIF();
4748 IEM_MC_END();
4749 }
4750 return VINF_SUCCESS;
4751}
4752
4753
4754/** Opcode 0x0f 0x86. */
4755FNIEMOP_DEF(iemOp_jbe_Jv)
4756{
4757 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4758 IEMOP_HLP_MIN_386();
4759 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4760 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4761 {
4762 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4764
4765 IEM_MC_BEGIN(0, 0);
4766 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4767 IEM_MC_REL_JMP_S16(i16Imm);
4768 } IEM_MC_ELSE() {
4769 IEM_MC_ADVANCE_RIP();
4770 } IEM_MC_ENDIF();
4771 IEM_MC_END();
4772 }
4773 else
4774 {
4775 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4777
4778 IEM_MC_BEGIN(0, 0);
4779 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4780 IEM_MC_REL_JMP_S32(i32Imm);
4781 } IEM_MC_ELSE() {
4782 IEM_MC_ADVANCE_RIP();
4783 } IEM_MC_ENDIF();
4784 IEM_MC_END();
4785 }
4786 return VINF_SUCCESS;
4787}
4788
4789
4790/** Opcode 0x0f 0x87. */
4791FNIEMOP_DEF(iemOp_jnbe_Jv)
4792{
4793 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4794 IEMOP_HLP_MIN_386();
4795 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4796 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4797 {
4798 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4800
4801 IEM_MC_BEGIN(0, 0);
4802 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4803 IEM_MC_ADVANCE_RIP();
4804 } IEM_MC_ELSE() {
4805 IEM_MC_REL_JMP_S16(i16Imm);
4806 } IEM_MC_ENDIF();
4807 IEM_MC_END();
4808 }
4809 else
4810 {
4811 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4813
4814 IEM_MC_BEGIN(0, 0);
4815 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4816 IEM_MC_ADVANCE_RIP();
4817 } IEM_MC_ELSE() {
4818 IEM_MC_REL_JMP_S32(i32Imm);
4819 } IEM_MC_ENDIF();
4820 IEM_MC_END();
4821 }
4822 return VINF_SUCCESS;
4823}
4824
4825
4826/** Opcode 0x0f 0x88. */
4827FNIEMOP_DEF(iemOp_js_Jv)
4828{
4829 IEMOP_MNEMONIC(js_Jv, "js Jv");
4830 IEMOP_HLP_MIN_386();
4831 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4832 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4833 {
4834 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4836
4837 IEM_MC_BEGIN(0, 0);
4838 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4839 IEM_MC_REL_JMP_S16(i16Imm);
4840 } IEM_MC_ELSE() {
4841 IEM_MC_ADVANCE_RIP();
4842 } IEM_MC_ENDIF();
4843 IEM_MC_END();
4844 }
4845 else
4846 {
4847 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4849
4850 IEM_MC_BEGIN(0, 0);
4851 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4852 IEM_MC_REL_JMP_S32(i32Imm);
4853 } IEM_MC_ELSE() {
4854 IEM_MC_ADVANCE_RIP();
4855 } IEM_MC_ENDIF();
4856 IEM_MC_END();
4857 }
4858 return VINF_SUCCESS;
4859}
4860
4861
4862/** Opcode 0x0f 0x89. */
4863FNIEMOP_DEF(iemOp_jns_Jv)
4864{
4865 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4866 IEMOP_HLP_MIN_386();
4867 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4868 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4869 {
4870 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4872
4873 IEM_MC_BEGIN(0, 0);
4874 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4875 IEM_MC_ADVANCE_RIP();
4876 } IEM_MC_ELSE() {
4877 IEM_MC_REL_JMP_S16(i16Imm);
4878 } IEM_MC_ENDIF();
4879 IEM_MC_END();
4880 }
4881 else
4882 {
4883 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4885
4886 IEM_MC_BEGIN(0, 0);
4887 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4888 IEM_MC_ADVANCE_RIP();
4889 } IEM_MC_ELSE() {
4890 IEM_MC_REL_JMP_S32(i32Imm);
4891 } IEM_MC_ENDIF();
4892 IEM_MC_END();
4893 }
4894 return VINF_SUCCESS;
4895}
4896
4897
4898/** Opcode 0x0f 0x8a. */
4899FNIEMOP_DEF(iemOp_jp_Jv)
4900{
4901 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4902 IEMOP_HLP_MIN_386();
4903 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4904 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4905 {
4906 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4908
4909 IEM_MC_BEGIN(0, 0);
4910 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4911 IEM_MC_REL_JMP_S16(i16Imm);
4912 } IEM_MC_ELSE() {
4913 IEM_MC_ADVANCE_RIP();
4914 } IEM_MC_ENDIF();
4915 IEM_MC_END();
4916 }
4917 else
4918 {
4919 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4921
4922 IEM_MC_BEGIN(0, 0);
4923 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4924 IEM_MC_REL_JMP_S32(i32Imm);
4925 } IEM_MC_ELSE() {
4926 IEM_MC_ADVANCE_RIP();
4927 } IEM_MC_ENDIF();
4928 IEM_MC_END();
4929 }
4930 return VINF_SUCCESS;
4931}
4932
4933
4934/** Opcode 0x0f 0x8b. */
4935FNIEMOP_DEF(iemOp_jnp_Jv)
4936{
4937 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4938 IEMOP_HLP_MIN_386();
4939 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4940 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4941 {
4942 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4944
4945 IEM_MC_BEGIN(0, 0);
4946 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4947 IEM_MC_ADVANCE_RIP();
4948 } IEM_MC_ELSE() {
4949 IEM_MC_REL_JMP_S16(i16Imm);
4950 } IEM_MC_ENDIF();
4951 IEM_MC_END();
4952 }
4953 else
4954 {
4955 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4957
4958 IEM_MC_BEGIN(0, 0);
4959 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4960 IEM_MC_ADVANCE_RIP();
4961 } IEM_MC_ELSE() {
4962 IEM_MC_REL_JMP_S32(i32Imm);
4963 } IEM_MC_ENDIF();
4964 IEM_MC_END();
4965 }
4966 return VINF_SUCCESS;
4967}
4968
4969
4970/** Opcode 0x0f 0x8c. */
4971FNIEMOP_DEF(iemOp_jl_Jv)
4972{
4973 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4974 IEMOP_HLP_MIN_386();
4975 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4976 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4977 {
4978 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4980
4981 IEM_MC_BEGIN(0, 0);
4982 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4983 IEM_MC_REL_JMP_S16(i16Imm);
4984 } IEM_MC_ELSE() {
4985 IEM_MC_ADVANCE_RIP();
4986 } IEM_MC_ENDIF();
4987 IEM_MC_END();
4988 }
4989 else
4990 {
4991 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4993
4994 IEM_MC_BEGIN(0, 0);
4995 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4996 IEM_MC_REL_JMP_S32(i32Imm);
4997 } IEM_MC_ELSE() {
4998 IEM_MC_ADVANCE_RIP();
4999 } IEM_MC_ENDIF();
5000 IEM_MC_END();
5001 }
5002 return VINF_SUCCESS;
5003}
5004
5005
5006/** Opcode 0x0f 0x8d. */
5007FNIEMOP_DEF(iemOp_jnl_Jv)
5008{
5009 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5010 IEMOP_HLP_MIN_386();
5011 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5012 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5013 {
5014 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5016
5017 IEM_MC_BEGIN(0, 0);
5018 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5019 IEM_MC_ADVANCE_RIP();
5020 } IEM_MC_ELSE() {
5021 IEM_MC_REL_JMP_S16(i16Imm);
5022 } IEM_MC_ENDIF();
5023 IEM_MC_END();
5024 }
5025 else
5026 {
5027 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5029
5030 IEM_MC_BEGIN(0, 0);
5031 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5032 IEM_MC_ADVANCE_RIP();
5033 } IEM_MC_ELSE() {
5034 IEM_MC_REL_JMP_S32(i32Imm);
5035 } IEM_MC_ENDIF();
5036 IEM_MC_END();
5037 }
5038 return VINF_SUCCESS;
5039}
5040
5041
5042/** Opcode 0x0f 0x8e. */
5043FNIEMOP_DEF(iemOp_jle_Jv)
5044{
5045 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5046 IEMOP_HLP_MIN_386();
5047 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5048 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5049 {
5050 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5052
5053 IEM_MC_BEGIN(0, 0);
5054 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5055 IEM_MC_REL_JMP_S16(i16Imm);
5056 } IEM_MC_ELSE() {
5057 IEM_MC_ADVANCE_RIP();
5058 } IEM_MC_ENDIF();
5059 IEM_MC_END();
5060 }
5061 else
5062 {
5063 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5065
5066 IEM_MC_BEGIN(0, 0);
5067 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5068 IEM_MC_REL_JMP_S32(i32Imm);
5069 } IEM_MC_ELSE() {
5070 IEM_MC_ADVANCE_RIP();
5071 } IEM_MC_ENDIF();
5072 IEM_MC_END();
5073 }
5074 return VINF_SUCCESS;
5075}
5076
5077
5078/** Opcode 0x0f 0x8f. */
5079FNIEMOP_DEF(iemOp_jnle_Jv)
5080{
5081 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5082 IEMOP_HLP_MIN_386();
5083 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5084 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5085 {
5086 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5088
5089 IEM_MC_BEGIN(0, 0);
5090 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5091 IEM_MC_ADVANCE_RIP();
5092 } IEM_MC_ELSE() {
5093 IEM_MC_REL_JMP_S16(i16Imm);
5094 } IEM_MC_ENDIF();
5095 IEM_MC_END();
5096 }
5097 else
5098 {
5099 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5101
5102 IEM_MC_BEGIN(0, 0);
5103 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5104 IEM_MC_ADVANCE_RIP();
5105 } IEM_MC_ELSE() {
5106 IEM_MC_REL_JMP_S32(i32Imm);
5107 } IEM_MC_ENDIF();
5108 IEM_MC_END();
5109 }
5110 return VINF_SUCCESS;
5111}
5112
5113
5114/** Opcode 0x0f 0x90. */
5115FNIEMOP_DEF(iemOp_seto_Eb)
5116{
5117 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5118 IEMOP_HLP_MIN_386();
5119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5120
5121 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5122 * any way. AMD says it's "unused", whatever that means. We're
5123 * ignoring for now. */
5124 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5125 {
5126 /* register target */
5127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5128 IEM_MC_BEGIN(0, 0);
5129 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5130 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5131 } IEM_MC_ELSE() {
5132 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5133 } IEM_MC_ENDIF();
5134 IEM_MC_ADVANCE_RIP();
5135 IEM_MC_END();
5136 }
5137 else
5138 {
5139 /* memory target */
5140 IEM_MC_BEGIN(0, 1);
5141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5144 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5145 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5146 } IEM_MC_ELSE() {
5147 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5148 } IEM_MC_ENDIF();
5149 IEM_MC_ADVANCE_RIP();
5150 IEM_MC_END();
5151 }
5152 return VINF_SUCCESS;
5153}
5154
5155
5156/** Opcode 0x0f 0x91. */
5157FNIEMOP_DEF(iemOp_setno_Eb)
5158{
5159 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5160 IEMOP_HLP_MIN_386();
5161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5162
5163 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5164 * any way. AMD says it's "unused", whatever that means. We're
5165 * ignoring for now. */
5166 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5167 {
5168 /* register target */
5169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5170 IEM_MC_BEGIN(0, 0);
5171 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5172 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5173 } IEM_MC_ELSE() {
5174 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5175 } IEM_MC_ENDIF();
5176 IEM_MC_ADVANCE_RIP();
5177 IEM_MC_END();
5178 }
5179 else
5180 {
5181 /* memory target */
5182 IEM_MC_BEGIN(0, 1);
5183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5186 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5187 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5188 } IEM_MC_ELSE() {
5189 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5190 } IEM_MC_ENDIF();
5191 IEM_MC_ADVANCE_RIP();
5192 IEM_MC_END();
5193 }
5194 return VINF_SUCCESS;
5195}
5196
5197
5198/** Opcode 0x0f 0x92. */
5199FNIEMOP_DEF(iemOp_setc_Eb)
5200{
5201 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5202 IEMOP_HLP_MIN_386();
5203 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5204
5205 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5206 * any way. AMD says it's "unused", whatever that means. We're
5207 * ignoring for now. */
5208 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5209 {
5210 /* register target */
5211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5212 IEM_MC_BEGIN(0, 0);
5213 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5214 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5215 } IEM_MC_ELSE() {
5216 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5217 } IEM_MC_ENDIF();
5218 IEM_MC_ADVANCE_RIP();
5219 IEM_MC_END();
5220 }
5221 else
5222 {
5223 /* memory target */
5224 IEM_MC_BEGIN(0, 1);
5225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5228 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5229 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5230 } IEM_MC_ELSE() {
5231 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5232 } IEM_MC_ENDIF();
5233 IEM_MC_ADVANCE_RIP();
5234 IEM_MC_END();
5235 }
5236 return VINF_SUCCESS;
5237}
5238
5239
5240/** Opcode 0x0f 0x93. */
5241FNIEMOP_DEF(iemOp_setnc_Eb)
5242{
5243 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5244 IEMOP_HLP_MIN_386();
5245 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5246
5247 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5248 * any way. AMD says it's "unused", whatever that means. We're
5249 * ignoring for now. */
5250 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5251 {
5252 /* register target */
5253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5254 IEM_MC_BEGIN(0, 0);
5255 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5256 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5257 } IEM_MC_ELSE() {
5258 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5259 } IEM_MC_ENDIF();
5260 IEM_MC_ADVANCE_RIP();
5261 IEM_MC_END();
5262 }
5263 else
5264 {
5265 /* memory target */
5266 IEM_MC_BEGIN(0, 1);
5267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5268 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5270 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5271 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5272 } IEM_MC_ELSE() {
5273 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5274 } IEM_MC_ENDIF();
5275 IEM_MC_ADVANCE_RIP();
5276 IEM_MC_END();
5277 }
5278 return VINF_SUCCESS;
5279}
5280
5281
5282/** Opcode 0x0f 0x94. */
5283FNIEMOP_DEF(iemOp_sete_Eb)
5284{
5285 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5286 IEMOP_HLP_MIN_386();
5287 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5288
5289 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5290 * any way. AMD says it's "unused", whatever that means. We're
5291 * ignoring for now. */
5292 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5293 {
5294 /* register target */
5295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5296 IEM_MC_BEGIN(0, 0);
5297 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5298 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5299 } IEM_MC_ELSE() {
5300 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5301 } IEM_MC_ENDIF();
5302 IEM_MC_ADVANCE_RIP();
5303 IEM_MC_END();
5304 }
5305 else
5306 {
5307 /* memory target */
5308 IEM_MC_BEGIN(0, 1);
5309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5312 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5313 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5314 } IEM_MC_ELSE() {
5315 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5316 } IEM_MC_ENDIF();
5317 IEM_MC_ADVANCE_RIP();
5318 IEM_MC_END();
5319 }
5320 return VINF_SUCCESS;
5321}
5322
5323
5324/** Opcode 0x0f 0x95. */
5325FNIEMOP_DEF(iemOp_setne_Eb)
5326{
5327 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5328 IEMOP_HLP_MIN_386();
5329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5330
5331 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5332 * any way. AMD says it's "unused", whatever that means. We're
5333 * ignoring for now. */
5334 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5335 {
5336 /* register target */
5337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5338 IEM_MC_BEGIN(0, 0);
5339 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5340 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5341 } IEM_MC_ELSE() {
5342 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5343 } IEM_MC_ENDIF();
5344 IEM_MC_ADVANCE_RIP();
5345 IEM_MC_END();
5346 }
5347 else
5348 {
5349 /* memory target */
5350 IEM_MC_BEGIN(0, 1);
5351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5354 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5355 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5356 } IEM_MC_ELSE() {
5357 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5358 } IEM_MC_ENDIF();
5359 IEM_MC_ADVANCE_RIP();
5360 IEM_MC_END();
5361 }
5362 return VINF_SUCCESS;
5363}
5364
5365
5366/** Opcode 0x0f 0x96. */
5367FNIEMOP_DEF(iemOp_setbe_Eb)
5368{
5369 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5370 IEMOP_HLP_MIN_386();
5371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5372
5373 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5374 * any way. AMD says it's "unused", whatever that means. We're
5375 * ignoring for now. */
5376 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5377 {
5378 /* register target */
5379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5380 IEM_MC_BEGIN(0, 0);
5381 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5382 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5383 } IEM_MC_ELSE() {
5384 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5385 } IEM_MC_ENDIF();
5386 IEM_MC_ADVANCE_RIP();
5387 IEM_MC_END();
5388 }
5389 else
5390 {
5391 /* memory target */
5392 IEM_MC_BEGIN(0, 1);
5393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5396 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5397 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5398 } IEM_MC_ELSE() {
5399 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5400 } IEM_MC_ENDIF();
5401 IEM_MC_ADVANCE_RIP();
5402 IEM_MC_END();
5403 }
5404 return VINF_SUCCESS;
5405}
5406
5407
5408/** Opcode 0x0f 0x97. */
5409FNIEMOP_DEF(iemOp_setnbe_Eb)
5410{
5411 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5412 IEMOP_HLP_MIN_386();
5413 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5414
5415 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5416 * any way. AMD says it's "unused", whatever that means. We're
5417 * ignoring for now. */
5418 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5419 {
5420 /* register target */
5421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5422 IEM_MC_BEGIN(0, 0);
5423 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5424 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5425 } IEM_MC_ELSE() {
5426 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5427 } IEM_MC_ENDIF();
5428 IEM_MC_ADVANCE_RIP();
5429 IEM_MC_END();
5430 }
5431 else
5432 {
5433 /* memory target */
5434 IEM_MC_BEGIN(0, 1);
5435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5438 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5439 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5440 } IEM_MC_ELSE() {
5441 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5442 } IEM_MC_ENDIF();
5443 IEM_MC_ADVANCE_RIP();
5444 IEM_MC_END();
5445 }
5446 return VINF_SUCCESS;
5447}
5448
5449
5450/** Opcode 0x0f 0x98. */
5451FNIEMOP_DEF(iemOp_sets_Eb)
5452{
5453 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5454 IEMOP_HLP_MIN_386();
5455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5456
5457 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5458 * any way. AMD says it's "unused", whatever that means. We're
5459 * ignoring for now. */
5460 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5461 {
5462 /* register target */
5463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5464 IEM_MC_BEGIN(0, 0);
5465 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5466 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5467 } IEM_MC_ELSE() {
5468 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5469 } IEM_MC_ENDIF();
5470 IEM_MC_ADVANCE_RIP();
5471 IEM_MC_END();
5472 }
5473 else
5474 {
5475 /* memory target */
5476 IEM_MC_BEGIN(0, 1);
5477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5480 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5481 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5482 } IEM_MC_ELSE() {
5483 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5484 } IEM_MC_ENDIF();
5485 IEM_MC_ADVANCE_RIP();
5486 IEM_MC_END();
5487 }
5488 return VINF_SUCCESS;
5489}
5490
5491
5492/** Opcode 0x0f 0x99. */
5493FNIEMOP_DEF(iemOp_setns_Eb)
5494{
5495 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5496 IEMOP_HLP_MIN_386();
5497 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5498
5499 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5500 * any way. AMD says it's "unused", whatever that means. We're
5501 * ignoring for now. */
5502 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5503 {
5504 /* register target */
5505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5506 IEM_MC_BEGIN(0, 0);
5507 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5508 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5509 } IEM_MC_ELSE() {
5510 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5511 } IEM_MC_ENDIF();
5512 IEM_MC_ADVANCE_RIP();
5513 IEM_MC_END();
5514 }
5515 else
5516 {
5517 /* memory target */
5518 IEM_MC_BEGIN(0, 1);
5519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5522 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5523 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5524 } IEM_MC_ELSE() {
5525 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5526 } IEM_MC_ENDIF();
5527 IEM_MC_ADVANCE_RIP();
5528 IEM_MC_END();
5529 }
5530 return VINF_SUCCESS;
5531}
5532
5533
5534/** Opcode 0x0f 0x9a. */
5535FNIEMOP_DEF(iemOp_setp_Eb)
5536{
5537 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5538 IEMOP_HLP_MIN_386();
5539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5540
5541 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5542 * any way. AMD says it's "unused", whatever that means. We're
5543 * ignoring for now. */
5544 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5545 {
5546 /* register target */
5547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5548 IEM_MC_BEGIN(0, 0);
5549 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5550 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5551 } IEM_MC_ELSE() {
5552 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5553 } IEM_MC_ENDIF();
5554 IEM_MC_ADVANCE_RIP();
5555 IEM_MC_END();
5556 }
5557 else
5558 {
5559 /* memory target */
5560 IEM_MC_BEGIN(0, 1);
5561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5564 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5565 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5566 } IEM_MC_ELSE() {
5567 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5568 } IEM_MC_ENDIF();
5569 IEM_MC_ADVANCE_RIP();
5570 IEM_MC_END();
5571 }
5572 return VINF_SUCCESS;
5573}
5574
5575
5576/** Opcode 0x0f 0x9b. */
5577FNIEMOP_DEF(iemOp_setnp_Eb)
5578{
5579 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5580 IEMOP_HLP_MIN_386();
5581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5582
5583 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5584 * any way. AMD says it's "unused", whatever that means. We're
5585 * ignoring for now. */
5586 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5587 {
5588 /* register target */
5589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5590 IEM_MC_BEGIN(0, 0);
5591 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5592 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5593 } IEM_MC_ELSE() {
5594 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5595 } IEM_MC_ENDIF();
5596 IEM_MC_ADVANCE_RIP();
5597 IEM_MC_END();
5598 }
5599 else
5600 {
5601 /* memory target */
5602 IEM_MC_BEGIN(0, 1);
5603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5606 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5607 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5608 } IEM_MC_ELSE() {
5609 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5610 } IEM_MC_ENDIF();
5611 IEM_MC_ADVANCE_RIP();
5612 IEM_MC_END();
5613 }
5614 return VINF_SUCCESS;
5615}
5616
5617
5618/** Opcode 0x0f 0x9c. */
5619FNIEMOP_DEF(iemOp_setl_Eb)
5620{
5621 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5622 IEMOP_HLP_MIN_386();
5623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5624
5625 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5626 * any way. AMD says it's "unused", whatever that means. We're
5627 * ignoring for now. */
5628 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5629 {
5630 /* register target */
5631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5632 IEM_MC_BEGIN(0, 0);
5633 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5634 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5635 } IEM_MC_ELSE() {
5636 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5637 } IEM_MC_ENDIF();
5638 IEM_MC_ADVANCE_RIP();
5639 IEM_MC_END();
5640 }
5641 else
5642 {
5643 /* memory target */
5644 IEM_MC_BEGIN(0, 1);
5645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5648 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5649 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5650 } IEM_MC_ELSE() {
5651 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5652 } IEM_MC_ENDIF();
5653 IEM_MC_ADVANCE_RIP();
5654 IEM_MC_END();
5655 }
5656 return VINF_SUCCESS;
5657}
5658
5659
5660/** Opcode 0x0f 0x9d. */
5661FNIEMOP_DEF(iemOp_setnl_Eb)
5662{
5663 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5664 IEMOP_HLP_MIN_386();
5665 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5666
5667 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5668 * any way. AMD says it's "unused", whatever that means. We're
5669 * ignoring for now. */
5670 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5671 {
5672 /* register target */
5673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5674 IEM_MC_BEGIN(0, 0);
5675 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5676 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5677 } IEM_MC_ELSE() {
5678 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5679 } IEM_MC_ENDIF();
5680 IEM_MC_ADVANCE_RIP();
5681 IEM_MC_END();
5682 }
5683 else
5684 {
5685 /* memory target */
5686 IEM_MC_BEGIN(0, 1);
5687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5690 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5691 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5692 } IEM_MC_ELSE() {
5693 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5694 } IEM_MC_ENDIF();
5695 IEM_MC_ADVANCE_RIP();
5696 IEM_MC_END();
5697 }
5698 return VINF_SUCCESS;
5699}
5700
5701
5702/** Opcode 0x0f 0x9e. */
5703FNIEMOP_DEF(iemOp_setle_Eb)
5704{
5705 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5706 IEMOP_HLP_MIN_386();
5707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5708
5709 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5710 * any way. AMD says it's "unused", whatever that means. We're
5711 * ignoring for now. */
5712 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5713 {
5714 /* register target */
5715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5716 IEM_MC_BEGIN(0, 0);
5717 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5718 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5719 } IEM_MC_ELSE() {
5720 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5721 } IEM_MC_ENDIF();
5722 IEM_MC_ADVANCE_RIP();
5723 IEM_MC_END();
5724 }
5725 else
5726 {
5727 /* memory target */
5728 IEM_MC_BEGIN(0, 1);
5729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5732 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5733 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5734 } IEM_MC_ELSE() {
5735 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5736 } IEM_MC_ENDIF();
5737 IEM_MC_ADVANCE_RIP();
5738 IEM_MC_END();
5739 }
5740 return VINF_SUCCESS;
5741}
5742
5743
5744/** Opcode 0x0f 0x9f. */
5745FNIEMOP_DEF(iemOp_setnle_Eb)
5746{
5747 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5748 IEMOP_HLP_MIN_386();
5749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5750
5751 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5752 * any way. AMD says it's "unused", whatever that means. We're
5753 * ignoring for now. */
5754 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5755 {
5756 /* register target */
5757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5758 IEM_MC_BEGIN(0, 0);
5759 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5760 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5761 } IEM_MC_ELSE() {
5762 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5763 } IEM_MC_ENDIF();
5764 IEM_MC_ADVANCE_RIP();
5765 IEM_MC_END();
5766 }
5767 else
5768 {
5769 /* memory target */
5770 IEM_MC_BEGIN(0, 1);
5771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5774 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5775 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5776 } IEM_MC_ELSE() {
5777 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5778 } IEM_MC_ENDIF();
5779 IEM_MC_ADVANCE_RIP();
5780 IEM_MC_END();
5781 }
5782 return VINF_SUCCESS;
5783}
5784
5785
5786/**
5787 * Common 'push segment-register' helper.
5788 */
5789FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5790{
5791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5792 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5793 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5794
5795 switch (pVCpu->iem.s.enmEffOpSize)
5796 {
5797 case IEMMODE_16BIT:
5798 IEM_MC_BEGIN(0, 1);
5799 IEM_MC_LOCAL(uint16_t, u16Value);
5800 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5801 IEM_MC_PUSH_U16(u16Value);
5802 IEM_MC_ADVANCE_RIP();
5803 IEM_MC_END();
5804 break;
5805
5806 case IEMMODE_32BIT:
5807 IEM_MC_BEGIN(0, 1);
5808 IEM_MC_LOCAL(uint32_t, u32Value);
5809 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5810 IEM_MC_PUSH_U32_SREG(u32Value);
5811 IEM_MC_ADVANCE_RIP();
5812 IEM_MC_END();
5813 break;
5814
5815 case IEMMODE_64BIT:
5816 IEM_MC_BEGIN(0, 1);
5817 IEM_MC_LOCAL(uint64_t, u64Value);
5818 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5819 IEM_MC_PUSH_U64(u64Value);
5820 IEM_MC_ADVANCE_RIP();
5821 IEM_MC_END();
5822 break;
5823 }
5824
5825 return VINF_SUCCESS;
5826}
5827
5828
5829/** Opcode 0x0f 0xa0. */
5830FNIEMOP_DEF(iemOp_push_fs)
5831{
5832 IEMOP_MNEMONIC(push_fs, "push fs");
5833 IEMOP_HLP_MIN_386();
5834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5835 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5836}
5837
5838
5839/** Opcode 0x0f 0xa1. */
5840FNIEMOP_DEF(iemOp_pop_fs)
5841{
5842 IEMOP_MNEMONIC(pop_fs, "pop fs");
5843 IEMOP_HLP_MIN_386();
5844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5845 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5846}
5847
5848
5849/** Opcode 0x0f 0xa2. */
5850FNIEMOP_DEF(iemOp_cpuid)
5851{
5852 IEMOP_MNEMONIC(cpuid, "cpuid");
5853 IEMOP_HLP_MIN_486(); /* not all 486es. */
5854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5855 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5856}
5857
5858
5859/**
5860 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5861 * iemOp_bts_Ev_Gv.
5862 */
5863FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5864{
5865 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5866 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5867
5868 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5869 {
5870 /* register destination. */
5871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5872 switch (pVCpu->iem.s.enmEffOpSize)
5873 {
5874 case IEMMODE_16BIT:
5875 IEM_MC_BEGIN(3, 0);
5876 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5877 IEM_MC_ARG(uint16_t, u16Src, 1);
5878 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5879
5880 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5881 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5882 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5883 IEM_MC_REF_EFLAGS(pEFlags);
5884 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5885
5886 IEM_MC_ADVANCE_RIP();
5887 IEM_MC_END();
5888 return VINF_SUCCESS;
5889
5890 case IEMMODE_32BIT:
5891 IEM_MC_BEGIN(3, 0);
5892 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5893 IEM_MC_ARG(uint32_t, u32Src, 1);
5894 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5895
5896 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5897 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5898 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5899 IEM_MC_REF_EFLAGS(pEFlags);
5900 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5901
5902 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5903 IEM_MC_ADVANCE_RIP();
5904 IEM_MC_END();
5905 return VINF_SUCCESS;
5906
5907 case IEMMODE_64BIT:
5908 IEM_MC_BEGIN(3, 0);
5909 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5910 IEM_MC_ARG(uint64_t, u64Src, 1);
5911 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5912
5913 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5914 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5915 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5916 IEM_MC_REF_EFLAGS(pEFlags);
5917 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5918
5919 IEM_MC_ADVANCE_RIP();
5920 IEM_MC_END();
5921 return VINF_SUCCESS;
5922
5923 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5924 }
5925 }
5926 else
5927 {
5928 /* memory destination. */
5929
5930 uint32_t fAccess;
5931 if (pImpl->pfnLockedU16)
5932 fAccess = IEM_ACCESS_DATA_RW;
5933 else /* BT */
5934 fAccess = IEM_ACCESS_DATA_R;
5935
5936 /** @todo test negative bit offsets! */
5937 switch (pVCpu->iem.s.enmEffOpSize)
5938 {
5939 case IEMMODE_16BIT:
5940 IEM_MC_BEGIN(3, 2);
5941 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5942 IEM_MC_ARG(uint16_t, u16Src, 1);
5943 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5945 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5946
5947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5948 if (pImpl->pfnLockedU16)
5949 IEMOP_HLP_DONE_DECODING();
5950 else
5951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5952 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5953 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5954 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5955 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5956 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5957 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5958 IEM_MC_FETCH_EFLAGS(EFlags);
5959
5960 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5961 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5962 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5963 else
5964 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5965 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5966
5967 IEM_MC_COMMIT_EFLAGS(EFlags);
5968 IEM_MC_ADVANCE_RIP();
5969 IEM_MC_END();
5970 return VINF_SUCCESS;
5971
5972 case IEMMODE_32BIT:
5973 IEM_MC_BEGIN(3, 2);
5974 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5975 IEM_MC_ARG(uint32_t, u32Src, 1);
5976 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5978 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5979
5980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5981 if (pImpl->pfnLockedU16)
5982 IEMOP_HLP_DONE_DECODING();
5983 else
5984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5985 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5986 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5987 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5988 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5989 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5990 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5991 IEM_MC_FETCH_EFLAGS(EFlags);
5992
5993 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5994 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5995 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5996 else
5997 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5998 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5999
6000 IEM_MC_COMMIT_EFLAGS(EFlags);
6001 IEM_MC_ADVANCE_RIP();
6002 IEM_MC_END();
6003 return VINF_SUCCESS;
6004
6005 case IEMMODE_64BIT:
6006 IEM_MC_BEGIN(3, 2);
6007 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6008 IEM_MC_ARG(uint64_t, u64Src, 1);
6009 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6011 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6012
6013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6014 if (pImpl->pfnLockedU16)
6015 IEMOP_HLP_DONE_DECODING();
6016 else
6017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6018 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6019 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6020 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6021 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6022 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6023 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6024 IEM_MC_FETCH_EFLAGS(EFlags);
6025
6026 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6027 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6028 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6029 else
6030 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6031 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6032
6033 IEM_MC_COMMIT_EFLAGS(EFlags);
6034 IEM_MC_ADVANCE_RIP();
6035 IEM_MC_END();
6036 return VINF_SUCCESS;
6037
6038 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6039 }
6040 }
6041}
6042
6043
6044/** Opcode 0x0f 0xa3. */
6045FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6046{
6047 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6048 IEMOP_HLP_MIN_386();
6049 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6050}
6051
6052
6053/**
6054 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6055 */
6056FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6057{
6058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6059 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6060
6061 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6062 {
6063 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6065
6066 switch (pVCpu->iem.s.enmEffOpSize)
6067 {
6068 case IEMMODE_16BIT:
6069 IEM_MC_BEGIN(4, 0);
6070 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6071 IEM_MC_ARG(uint16_t, u16Src, 1);
6072 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6073 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6074
6075 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6076 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6077 IEM_MC_REF_EFLAGS(pEFlags);
6078 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6079
6080 IEM_MC_ADVANCE_RIP();
6081 IEM_MC_END();
6082 return VINF_SUCCESS;
6083
6084 case IEMMODE_32BIT:
6085 IEM_MC_BEGIN(4, 0);
6086 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6087 IEM_MC_ARG(uint32_t, u32Src, 1);
6088 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6089 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6090
6091 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6092 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6093 IEM_MC_REF_EFLAGS(pEFlags);
6094 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6095
6096 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6097 IEM_MC_ADVANCE_RIP();
6098 IEM_MC_END();
6099 return VINF_SUCCESS;
6100
6101 case IEMMODE_64BIT:
6102 IEM_MC_BEGIN(4, 0);
6103 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6104 IEM_MC_ARG(uint64_t, u64Src, 1);
6105 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6106 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6107
6108 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6109 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6110 IEM_MC_REF_EFLAGS(pEFlags);
6111 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6112
6113 IEM_MC_ADVANCE_RIP();
6114 IEM_MC_END();
6115 return VINF_SUCCESS;
6116
6117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6118 }
6119 }
6120 else
6121 {
6122 switch (pVCpu->iem.s.enmEffOpSize)
6123 {
6124 case IEMMODE_16BIT:
6125 IEM_MC_BEGIN(4, 2);
6126 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6127 IEM_MC_ARG(uint16_t, u16Src, 1);
6128 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6129 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6131
6132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6133 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6134 IEM_MC_ASSIGN(cShiftArg, cShift);
6135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6136 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6137 IEM_MC_FETCH_EFLAGS(EFlags);
6138 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6139 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6140
6141 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6142 IEM_MC_COMMIT_EFLAGS(EFlags);
6143 IEM_MC_ADVANCE_RIP();
6144 IEM_MC_END();
6145 return VINF_SUCCESS;
6146
6147 case IEMMODE_32BIT:
6148 IEM_MC_BEGIN(4, 2);
6149 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6150 IEM_MC_ARG(uint32_t, u32Src, 1);
6151 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6152 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6154
6155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6156 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6157 IEM_MC_ASSIGN(cShiftArg, cShift);
6158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6159 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6160 IEM_MC_FETCH_EFLAGS(EFlags);
6161 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6162 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6163
6164 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6165 IEM_MC_COMMIT_EFLAGS(EFlags);
6166 IEM_MC_ADVANCE_RIP();
6167 IEM_MC_END();
6168 return VINF_SUCCESS;
6169
6170 case IEMMODE_64BIT:
6171 IEM_MC_BEGIN(4, 2);
6172 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6173 IEM_MC_ARG(uint64_t, u64Src, 1);
6174 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6175 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6177
6178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6179 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6180 IEM_MC_ASSIGN(cShiftArg, cShift);
6181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6182 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6183 IEM_MC_FETCH_EFLAGS(EFlags);
6184 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6185 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6186
6187 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6188 IEM_MC_COMMIT_EFLAGS(EFlags);
6189 IEM_MC_ADVANCE_RIP();
6190 IEM_MC_END();
6191 return VINF_SUCCESS;
6192
6193 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6194 }
6195 }
6196}
6197
6198
6199/**
6200 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6201 */
6202FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6203{
6204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6205 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6206
6207 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6208 {
6209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6210
6211 switch (pVCpu->iem.s.enmEffOpSize)
6212 {
6213 case IEMMODE_16BIT:
6214 IEM_MC_BEGIN(4, 0);
6215 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6216 IEM_MC_ARG(uint16_t, u16Src, 1);
6217 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6218 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6219
6220 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6221 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6222 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6223 IEM_MC_REF_EFLAGS(pEFlags);
6224 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6225
6226 IEM_MC_ADVANCE_RIP();
6227 IEM_MC_END();
6228 return VINF_SUCCESS;
6229
6230 case IEMMODE_32BIT:
6231 IEM_MC_BEGIN(4, 0);
6232 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6233 IEM_MC_ARG(uint32_t, u32Src, 1);
6234 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6235 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6236
6237 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6238 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6239 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6240 IEM_MC_REF_EFLAGS(pEFlags);
6241 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6242
6243 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6244 IEM_MC_ADVANCE_RIP();
6245 IEM_MC_END();
6246 return VINF_SUCCESS;
6247
6248 case IEMMODE_64BIT:
6249 IEM_MC_BEGIN(4, 0);
6250 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6251 IEM_MC_ARG(uint64_t, u64Src, 1);
6252 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6253 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6254
6255 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6256 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6257 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6258 IEM_MC_REF_EFLAGS(pEFlags);
6259 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6260
6261 IEM_MC_ADVANCE_RIP();
6262 IEM_MC_END();
6263 return VINF_SUCCESS;
6264
6265 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6266 }
6267 }
6268 else
6269 {
6270 switch (pVCpu->iem.s.enmEffOpSize)
6271 {
6272 case IEMMODE_16BIT:
6273 IEM_MC_BEGIN(4, 2);
6274 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6275 IEM_MC_ARG(uint16_t, u16Src, 1);
6276 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6277 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6279
6280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6282 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6283 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6284 IEM_MC_FETCH_EFLAGS(EFlags);
6285 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6286 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6287
6288 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6289 IEM_MC_COMMIT_EFLAGS(EFlags);
6290 IEM_MC_ADVANCE_RIP();
6291 IEM_MC_END();
6292 return VINF_SUCCESS;
6293
6294 case IEMMODE_32BIT:
6295 IEM_MC_BEGIN(4, 2);
6296 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6297 IEM_MC_ARG(uint32_t, u32Src, 1);
6298 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6299 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6301
6302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6304 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6305 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6306 IEM_MC_FETCH_EFLAGS(EFlags);
6307 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6308 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6309
6310 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6311 IEM_MC_COMMIT_EFLAGS(EFlags);
6312 IEM_MC_ADVANCE_RIP();
6313 IEM_MC_END();
6314 return VINF_SUCCESS;
6315
6316 case IEMMODE_64BIT:
6317 IEM_MC_BEGIN(4, 2);
6318 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6319 IEM_MC_ARG(uint64_t, u64Src, 1);
6320 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6321 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6323
6324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6326 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6327 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6328 IEM_MC_FETCH_EFLAGS(EFlags);
6329 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6330 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6331
6332 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6333 IEM_MC_COMMIT_EFLAGS(EFlags);
6334 IEM_MC_ADVANCE_RIP();
6335 IEM_MC_END();
6336 return VINF_SUCCESS;
6337
6338 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6339 }
6340 }
6341}
6342
6343
6344
6345/** Opcode 0x0f 0xa4. */
6346FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6347{
6348 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6349 IEMOP_HLP_MIN_386();
6350 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6351}
6352
6353
6354/** Opcode 0x0f 0xa5. */
6355FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6356{
6357 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6358 IEMOP_HLP_MIN_386();
6359 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6360}
6361
6362
6363/** Opcode 0x0f 0xa8. */
6364FNIEMOP_DEF(iemOp_push_gs)
6365{
6366 IEMOP_MNEMONIC(push_gs, "push gs");
6367 IEMOP_HLP_MIN_386();
6368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6369 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6370}
6371
6372
6373/** Opcode 0x0f 0xa9. */
6374FNIEMOP_DEF(iemOp_pop_gs)
6375{
6376 IEMOP_MNEMONIC(pop_gs, "pop gs");
6377 IEMOP_HLP_MIN_386();
6378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6379 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6380}
6381
6382
6383/** Opcode 0x0f 0xaa. */
6384FNIEMOP_DEF(iemOp_rsm)
6385{
6386 IEMOP_MNEMONIC(rsm, "rsm");
6387 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
6388 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
6389 * intercept). */
6390 IEMOP_BITCH_ABOUT_STUB();
6391 return IEMOP_RAISE_INVALID_OPCODE();
6392}
6393
6394//IEMOP_HLP_MIN_386();
6395
6396
6397/** Opcode 0x0f 0xab. */
6398FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6399{
6400 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6401 IEMOP_HLP_MIN_386();
6402 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6403}
6404
6405
6406/** Opcode 0x0f 0xac. */
6407FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6408{
6409 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6410 IEMOP_HLP_MIN_386();
6411 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6412}
6413
6414
6415/** Opcode 0x0f 0xad. */
6416FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6417{
6418 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6419 IEMOP_HLP_MIN_386();
6420 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6421}
6422
6423
6424/** Opcode 0x0f 0xae mem/0. */
6425FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6426{
6427 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6428 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6429 return IEMOP_RAISE_INVALID_OPCODE();
6430
6431 IEM_MC_BEGIN(3, 1);
6432 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6433 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6434 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6437 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6438 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6439 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6440 IEM_MC_END();
6441 return VINF_SUCCESS;
6442}
6443
6444
6445/** Opcode 0x0f 0xae mem/1. */
6446FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6447{
6448 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6449 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6450 return IEMOP_RAISE_INVALID_OPCODE();
6451
6452 IEM_MC_BEGIN(3, 1);
6453 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6454 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6455 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6458 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6459 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6460 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6461 IEM_MC_END();
6462 return VINF_SUCCESS;
6463}
6464
6465
6466/**
6467 * @opmaps grp15
6468 * @opcode !11/2
6469 * @oppfx none
6470 * @opcpuid sse
6471 * @opgroup og_sse_mxcsrsm
6472 * @opxcpttype 5
6473 * @optest op1=0 -> mxcsr=0
6474 * @optest op1=0x2083 -> mxcsr=0x2083
6475 * @optest op1=0xfffffffe -> value.xcpt=0xd
6476 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6477 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6478 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6479 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6480 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6481 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6482 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6483 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6484 */
6485FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6486{
6487 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6488 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6489 return IEMOP_RAISE_INVALID_OPCODE();
6490
6491 IEM_MC_BEGIN(2, 0);
6492 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6493 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6496 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6497 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6498 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6499 IEM_MC_END();
6500 return VINF_SUCCESS;
6501}
6502
6503
6504/**
6505 * @opmaps grp15
6506 * @opcode !11/3
6507 * @oppfx none
6508 * @opcpuid sse
6509 * @opgroup og_sse_mxcsrsm
6510 * @opxcpttype 5
6511 * @optest mxcsr=0 -> op1=0
6512 * @optest mxcsr=0x2083 -> op1=0x2083
6513 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6514 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6515 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6516 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6517 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6518 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6519 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6520 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6521 */
6522FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6523{
6524 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6525 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6526 return IEMOP_RAISE_INVALID_OPCODE();
6527
6528 IEM_MC_BEGIN(2, 0);
6529 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6530 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6533 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6534 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6535 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6536 IEM_MC_END();
6537 return VINF_SUCCESS;
6538}
6539
6540
6541/**
6542 * @opmaps grp15
6543 * @opcode !11/4
6544 * @oppfx none
6545 * @opcpuid xsave
6546 * @opgroup og_system
6547 * @opxcpttype none
6548 */
6549FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6550{
6551 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6552 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6553 return IEMOP_RAISE_INVALID_OPCODE();
6554
6555 IEM_MC_BEGIN(3, 0);
6556 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6557 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6558 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6561 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6562 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6563 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6564 IEM_MC_END();
6565 return VINF_SUCCESS;
6566}
6567
6568
6569/**
6570 * @opmaps grp15
6571 * @opcode !11/5
6572 * @oppfx none
6573 * @opcpuid xsave
6574 * @opgroup og_system
6575 * @opxcpttype none
6576 */
6577FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6578{
6579 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6580 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6581 return IEMOP_RAISE_INVALID_OPCODE();
6582
6583 IEM_MC_BEGIN(3, 0);
6584 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6585 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6586 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6589 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6590 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6591 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6592 IEM_MC_END();
6593 return VINF_SUCCESS;
6594}
6595
6596/** Opcode 0x0f 0xae mem/6. */
6597FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6598
6599/**
6600 * @opmaps grp15
6601 * @opcode !11/7
6602 * @oppfx none
6603 * @opcpuid clfsh
6604 * @opgroup og_cachectl
6605 * @optest op1=1 ->
6606 */
6607FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6608{
6609 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6610 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6611 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6612
6613 IEM_MC_BEGIN(2, 0);
6614 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6615 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6618 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6619 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6620 IEM_MC_END();
6621 return VINF_SUCCESS;
6622}
6623
6624/**
6625 * @opmaps grp15
6626 * @opcode !11/7
6627 * @oppfx 0x66
6628 * @opcpuid clflushopt
6629 * @opgroup og_cachectl
6630 * @optest op1=1 ->
6631 */
6632FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6633{
6634 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6635 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6636 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6637
6638 IEM_MC_BEGIN(2, 0);
6639 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6640 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6643 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6644 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6645 IEM_MC_END();
6646 return VINF_SUCCESS;
6647}
6648
6649
6650/** Opcode 0x0f 0xae 11b/5. */
6651FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6652{
6653 RT_NOREF_PV(bRm);
6654 IEMOP_MNEMONIC(lfence, "lfence");
6655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6656 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6657 return IEMOP_RAISE_INVALID_OPCODE();
6658
6659 IEM_MC_BEGIN(0, 0);
6660 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6661 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6662 else
6663 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6664 IEM_MC_ADVANCE_RIP();
6665 IEM_MC_END();
6666 return VINF_SUCCESS;
6667}
6668
6669
6670/** Opcode 0x0f 0xae 11b/6. */
6671FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6672{
6673 RT_NOREF_PV(bRm);
6674 IEMOP_MNEMONIC(mfence, "mfence");
6675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6676 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6677 return IEMOP_RAISE_INVALID_OPCODE();
6678
6679 IEM_MC_BEGIN(0, 0);
6680 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6681 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6682 else
6683 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6684 IEM_MC_ADVANCE_RIP();
6685 IEM_MC_END();
6686 return VINF_SUCCESS;
6687}
6688
6689
6690/** Opcode 0x0f 0xae 11b/7. */
6691FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6692{
6693 RT_NOREF_PV(bRm);
6694 IEMOP_MNEMONIC(sfence, "sfence");
6695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6696 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6697 return IEMOP_RAISE_INVALID_OPCODE();
6698
6699 IEM_MC_BEGIN(0, 0);
6700 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6701 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6702 else
6703 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6704 IEM_MC_ADVANCE_RIP();
6705 IEM_MC_END();
6706 return VINF_SUCCESS;
6707}
6708
6709
6710/** Opcode 0xf3 0x0f 0xae 11b/0. */
6711FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6712
6713/** Opcode 0xf3 0x0f 0xae 11b/1. */
6714FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6715
6716/** Opcode 0xf3 0x0f 0xae 11b/2. */
6717FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6718
6719/** Opcode 0xf3 0x0f 0xae 11b/3. */
6720FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6721
6722
6723/**
6724 * Group 15 jump table for register variant.
6725 */
6726IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6727{ /* pfx: none, 066h, 0f3h, 0f2h */
6728 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6729 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6730 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6731 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6732 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6733 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6734 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6735 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6736};
6737AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6738
6739
6740/**
6741 * Group 15 jump table for memory variant.
6742 */
6743IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6744{ /* pfx: none, 066h, 0f3h, 0f2h */
6745 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6746 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6747 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6748 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6749 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6750 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6751 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6752 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6753};
6754AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6755
6756
6757/** Opcode 0x0f 0xae. */
6758FNIEMOP_DEF(iemOp_Grp15)
6759{
6760 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6761 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6762 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6763 /* register, register */
6764 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6765 + pVCpu->iem.s.idxPrefix], bRm);
6766 /* memory, register */
6767 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6768 + pVCpu->iem.s.idxPrefix], bRm);
6769}
6770
6771
6772/** Opcode 0x0f 0xaf. */
6773FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6774{
6775 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6776 IEMOP_HLP_MIN_386();
6777 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6778 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6779}
6780
6781
6782/** Opcode 0x0f 0xb0. */
6783FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6784{
6785 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6786 IEMOP_HLP_MIN_486();
6787 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6788
6789 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6790 {
6791 IEMOP_HLP_DONE_DECODING();
6792 IEM_MC_BEGIN(4, 0);
6793 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6794 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6795 IEM_MC_ARG(uint8_t, u8Src, 2);
6796 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6797
6798 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6799 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6800 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6801 IEM_MC_REF_EFLAGS(pEFlags);
6802 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6803 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6804 else
6805 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6806
6807 IEM_MC_ADVANCE_RIP();
6808 IEM_MC_END();
6809 }
6810 else
6811 {
6812 IEM_MC_BEGIN(4, 3);
6813 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6814 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6815 IEM_MC_ARG(uint8_t, u8Src, 2);
6816 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6818 IEM_MC_LOCAL(uint8_t, u8Al);
6819
6820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6821 IEMOP_HLP_DONE_DECODING();
6822 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6823 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6824 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6825 IEM_MC_FETCH_EFLAGS(EFlags);
6826 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6827 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6828 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6829 else
6830 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6831
6832 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6833 IEM_MC_COMMIT_EFLAGS(EFlags);
6834 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6835 IEM_MC_ADVANCE_RIP();
6836 IEM_MC_END();
6837 }
6838 return VINF_SUCCESS;
6839}
6840
6841/** Opcode 0x0f 0xb1. */
6842FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6843{
6844 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6845 IEMOP_HLP_MIN_486();
6846 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6847
6848 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6849 {
6850 IEMOP_HLP_DONE_DECODING();
6851 switch (pVCpu->iem.s.enmEffOpSize)
6852 {
6853 case IEMMODE_16BIT:
6854 IEM_MC_BEGIN(4, 0);
6855 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6856 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6857 IEM_MC_ARG(uint16_t, u16Src, 2);
6858 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6859
6860 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6861 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6862 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6863 IEM_MC_REF_EFLAGS(pEFlags);
6864 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6865 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6866 else
6867 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6868
6869 IEM_MC_ADVANCE_RIP();
6870 IEM_MC_END();
6871 return VINF_SUCCESS;
6872
6873 case IEMMODE_32BIT:
6874 IEM_MC_BEGIN(4, 0);
6875 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6876 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6877 IEM_MC_ARG(uint32_t, u32Src, 2);
6878 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6879
6880 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6881 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6882 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6883 IEM_MC_REF_EFLAGS(pEFlags);
6884 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6885 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6886 else
6887 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6888
6889 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6890 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6891 IEM_MC_ADVANCE_RIP();
6892 IEM_MC_END();
6893 return VINF_SUCCESS;
6894
6895 case IEMMODE_64BIT:
6896 IEM_MC_BEGIN(4, 0);
6897 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6898 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6899#ifdef RT_ARCH_X86
6900 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6901#else
6902 IEM_MC_ARG(uint64_t, u64Src, 2);
6903#endif
6904 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6905
6906 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6907 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6908 IEM_MC_REF_EFLAGS(pEFlags);
6909#ifdef RT_ARCH_X86
6910 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6911 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6912 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6913 else
6914 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6915#else
6916 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6917 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6918 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6919 else
6920 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6921#endif
6922
6923 IEM_MC_ADVANCE_RIP();
6924 IEM_MC_END();
6925 return VINF_SUCCESS;
6926
6927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6928 }
6929 }
6930 else
6931 {
6932 switch (pVCpu->iem.s.enmEffOpSize)
6933 {
6934 case IEMMODE_16BIT:
6935 IEM_MC_BEGIN(4, 3);
6936 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6937 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6938 IEM_MC_ARG(uint16_t, u16Src, 2);
6939 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6941 IEM_MC_LOCAL(uint16_t, u16Ax);
6942
6943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6944 IEMOP_HLP_DONE_DECODING();
6945 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6946 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6947 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6948 IEM_MC_FETCH_EFLAGS(EFlags);
6949 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6950 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6951 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6952 else
6953 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6954
6955 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6956 IEM_MC_COMMIT_EFLAGS(EFlags);
6957 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6958 IEM_MC_ADVANCE_RIP();
6959 IEM_MC_END();
6960 return VINF_SUCCESS;
6961
6962 case IEMMODE_32BIT:
6963 IEM_MC_BEGIN(4, 3);
6964 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6965 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6966 IEM_MC_ARG(uint32_t, u32Src, 2);
6967 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6969 IEM_MC_LOCAL(uint32_t, u32Eax);
6970
6971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6972 IEMOP_HLP_DONE_DECODING();
6973 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6974 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6975 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6976 IEM_MC_FETCH_EFLAGS(EFlags);
6977 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6978 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6979 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6980 else
6981 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6982
6983 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6984 IEM_MC_COMMIT_EFLAGS(EFlags);
6985 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6986 IEM_MC_ADVANCE_RIP();
6987 IEM_MC_END();
6988 return VINF_SUCCESS;
6989
6990 case IEMMODE_64BIT:
6991 IEM_MC_BEGIN(4, 3);
6992 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6993 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6994#ifdef RT_ARCH_X86
6995 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6996#else
6997 IEM_MC_ARG(uint64_t, u64Src, 2);
6998#endif
6999 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7001 IEM_MC_LOCAL(uint64_t, u64Rax);
7002
7003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7004 IEMOP_HLP_DONE_DECODING();
7005 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7006 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7007 IEM_MC_FETCH_EFLAGS(EFlags);
7008 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7009#ifdef RT_ARCH_X86
7010 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7011 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7012 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7013 else
7014 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7015#else
7016 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7017 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7018 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7019 else
7020 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7021#endif
7022
7023 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7024 IEM_MC_COMMIT_EFLAGS(EFlags);
7025 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7026 IEM_MC_ADVANCE_RIP();
7027 IEM_MC_END();
7028 return VINF_SUCCESS;
7029
7030 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7031 }
7032 }
7033}
7034
7035
7036FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7037{
7038 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7039 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7040
7041 switch (pVCpu->iem.s.enmEffOpSize)
7042 {
7043 case IEMMODE_16BIT:
7044 IEM_MC_BEGIN(5, 1);
7045 IEM_MC_ARG(uint16_t, uSel, 0);
7046 IEM_MC_ARG(uint16_t, offSeg, 1);
7047 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7048 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7049 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7050 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7053 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7054 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7055 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7056 IEM_MC_END();
7057 return VINF_SUCCESS;
7058
7059 case IEMMODE_32BIT:
7060 IEM_MC_BEGIN(5, 1);
7061 IEM_MC_ARG(uint16_t, uSel, 0);
7062 IEM_MC_ARG(uint32_t, offSeg, 1);
7063 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7064 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7065 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7066 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7069 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7070 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7071 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7072 IEM_MC_END();
7073 return VINF_SUCCESS;
7074
7075 case IEMMODE_64BIT:
7076 IEM_MC_BEGIN(5, 1);
7077 IEM_MC_ARG(uint16_t, uSel, 0);
7078 IEM_MC_ARG(uint64_t, offSeg, 1);
7079 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7080 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7081 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7082 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7085 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7086 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7087 else
7088 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7089 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7090 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7091 IEM_MC_END();
7092 return VINF_SUCCESS;
7093
7094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7095 }
7096}
7097
7098
7099/** Opcode 0x0f 0xb2. */
7100FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7101{
7102 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7103 IEMOP_HLP_MIN_386();
7104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7105 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7106 return IEMOP_RAISE_INVALID_OPCODE();
7107 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7108}
7109
7110
7111/** Opcode 0x0f 0xb3. */
7112FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7113{
7114 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7115 IEMOP_HLP_MIN_386();
7116 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7117}
7118
7119
7120/** Opcode 0x0f 0xb4. */
7121FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7122{
7123 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7124 IEMOP_HLP_MIN_386();
7125 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7126 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7127 return IEMOP_RAISE_INVALID_OPCODE();
7128 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7129}
7130
7131
7132/** Opcode 0x0f 0xb5. */
7133FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7134{
7135 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7136 IEMOP_HLP_MIN_386();
7137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7138 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7139 return IEMOP_RAISE_INVALID_OPCODE();
7140 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7141}
7142
7143
7144/** Opcode 0x0f 0xb6. */
7145FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7146{
7147 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7148 IEMOP_HLP_MIN_386();
7149
7150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7151
7152 /*
7153 * If rm is denoting a register, no more instruction bytes.
7154 */
7155 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7156 {
7157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7158 switch (pVCpu->iem.s.enmEffOpSize)
7159 {
7160 case IEMMODE_16BIT:
7161 IEM_MC_BEGIN(0, 1);
7162 IEM_MC_LOCAL(uint16_t, u16Value);
7163 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7164 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7165 IEM_MC_ADVANCE_RIP();
7166 IEM_MC_END();
7167 return VINF_SUCCESS;
7168
7169 case IEMMODE_32BIT:
7170 IEM_MC_BEGIN(0, 1);
7171 IEM_MC_LOCAL(uint32_t, u32Value);
7172 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7173 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7174 IEM_MC_ADVANCE_RIP();
7175 IEM_MC_END();
7176 return VINF_SUCCESS;
7177
7178 case IEMMODE_64BIT:
7179 IEM_MC_BEGIN(0, 1);
7180 IEM_MC_LOCAL(uint64_t, u64Value);
7181 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7182 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7183 IEM_MC_ADVANCE_RIP();
7184 IEM_MC_END();
7185 return VINF_SUCCESS;
7186
7187 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7188 }
7189 }
7190 else
7191 {
7192 /*
7193 * We're loading a register from memory.
7194 */
7195 switch (pVCpu->iem.s.enmEffOpSize)
7196 {
7197 case IEMMODE_16BIT:
7198 IEM_MC_BEGIN(0, 2);
7199 IEM_MC_LOCAL(uint16_t, u16Value);
7200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7203 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7204 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7205 IEM_MC_ADVANCE_RIP();
7206 IEM_MC_END();
7207 return VINF_SUCCESS;
7208
7209 case IEMMODE_32BIT:
7210 IEM_MC_BEGIN(0, 2);
7211 IEM_MC_LOCAL(uint32_t, u32Value);
7212 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7215 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7216 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7217 IEM_MC_ADVANCE_RIP();
7218 IEM_MC_END();
7219 return VINF_SUCCESS;
7220
7221 case IEMMODE_64BIT:
7222 IEM_MC_BEGIN(0, 2);
7223 IEM_MC_LOCAL(uint64_t, u64Value);
7224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7227 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7228 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7229 IEM_MC_ADVANCE_RIP();
7230 IEM_MC_END();
7231 return VINF_SUCCESS;
7232
7233 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7234 }
7235 }
7236}
7237
7238
7239/** Opcode 0x0f 0xb7. */
7240FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7241{
7242 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7243 IEMOP_HLP_MIN_386();
7244
7245 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7246
7247 /** @todo Not entirely sure how the operand size prefix is handled here,
7248 * assuming that it will be ignored. Would be nice to have a few
7249 * test for this. */
7250 /*
7251 * If rm is denoting a register, no more instruction bytes.
7252 */
7253 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7254 {
7255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7256 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7257 {
7258 IEM_MC_BEGIN(0, 1);
7259 IEM_MC_LOCAL(uint32_t, u32Value);
7260 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7261 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7262 IEM_MC_ADVANCE_RIP();
7263 IEM_MC_END();
7264 }
7265 else
7266 {
7267 IEM_MC_BEGIN(0, 1);
7268 IEM_MC_LOCAL(uint64_t, u64Value);
7269 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7270 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7271 IEM_MC_ADVANCE_RIP();
7272 IEM_MC_END();
7273 }
7274 }
7275 else
7276 {
7277 /*
7278 * We're loading a register from memory.
7279 */
7280 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7281 {
7282 IEM_MC_BEGIN(0, 2);
7283 IEM_MC_LOCAL(uint32_t, u32Value);
7284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7287 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7288 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7289 IEM_MC_ADVANCE_RIP();
7290 IEM_MC_END();
7291 }
7292 else
7293 {
7294 IEM_MC_BEGIN(0, 2);
7295 IEM_MC_LOCAL(uint64_t, u64Value);
7296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7299 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7300 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7301 IEM_MC_ADVANCE_RIP();
7302 IEM_MC_END();
7303 }
7304 }
7305 return VINF_SUCCESS;
7306}
7307
7308
7309/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7310FNIEMOP_UD_STUB(iemOp_jmpe);
7311/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7312FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7313
7314
7315/**
7316 * @opcode 0xb9
7317 * @opinvalid intel-modrm
7318 * @optest ->
7319 */
7320FNIEMOP_DEF(iemOp_Grp10)
7321{
7322 /*
7323 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7324 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7325 */
7326 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7327 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZE); /* just picked Gb,Eb here. */
7328 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7329}
7330
7331
7332/** Opcode 0x0f 0xba. */
7333FNIEMOP_DEF(iemOp_Grp8)
7334{
7335 IEMOP_HLP_MIN_386();
7336 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7337 PCIEMOPBINSIZES pImpl;
7338 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7339 {
7340 case 0: case 1: case 2: case 3:
7341 /* Both AMD and Intel want full modr/m decoding and imm8. */
7342 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7343 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7344 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7345 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7346 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7347 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7348 }
7349 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7350
7351 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7352 {
7353 /* register destination. */
7354 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7356
7357 switch (pVCpu->iem.s.enmEffOpSize)
7358 {
7359 case IEMMODE_16BIT:
7360 IEM_MC_BEGIN(3, 0);
7361 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7362 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7363 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7364
7365 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7366 IEM_MC_REF_EFLAGS(pEFlags);
7367 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7368
7369 IEM_MC_ADVANCE_RIP();
7370 IEM_MC_END();
7371 return VINF_SUCCESS;
7372
7373 case IEMMODE_32BIT:
7374 IEM_MC_BEGIN(3, 0);
7375 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7376 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7377 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7378
7379 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7380 IEM_MC_REF_EFLAGS(pEFlags);
7381 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7382
7383 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7384 IEM_MC_ADVANCE_RIP();
7385 IEM_MC_END();
7386 return VINF_SUCCESS;
7387
7388 case IEMMODE_64BIT:
7389 IEM_MC_BEGIN(3, 0);
7390 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7391 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7392 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7393
7394 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7395 IEM_MC_REF_EFLAGS(pEFlags);
7396 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7397
7398 IEM_MC_ADVANCE_RIP();
7399 IEM_MC_END();
7400 return VINF_SUCCESS;
7401
7402 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7403 }
7404 }
7405 else
7406 {
7407 /* memory destination. */
7408
7409 uint32_t fAccess;
7410 if (pImpl->pfnLockedU16)
7411 fAccess = IEM_ACCESS_DATA_RW;
7412 else /* BT */
7413 fAccess = IEM_ACCESS_DATA_R;
7414
7415 /** @todo test negative bit offsets! */
7416 switch (pVCpu->iem.s.enmEffOpSize)
7417 {
7418 case IEMMODE_16BIT:
7419 IEM_MC_BEGIN(3, 1);
7420 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7421 IEM_MC_ARG(uint16_t, u16Src, 1);
7422 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7424
7425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7426 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7427 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7428 if (pImpl->pfnLockedU16)
7429 IEMOP_HLP_DONE_DECODING();
7430 else
7431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7432 IEM_MC_FETCH_EFLAGS(EFlags);
7433 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7434 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7436 else
7437 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7438 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7439
7440 IEM_MC_COMMIT_EFLAGS(EFlags);
7441 IEM_MC_ADVANCE_RIP();
7442 IEM_MC_END();
7443 return VINF_SUCCESS;
7444
7445 case IEMMODE_32BIT:
7446 IEM_MC_BEGIN(3, 1);
7447 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7448 IEM_MC_ARG(uint32_t, u32Src, 1);
7449 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7451
7452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7453 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7454 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7455 if (pImpl->pfnLockedU16)
7456 IEMOP_HLP_DONE_DECODING();
7457 else
7458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7459 IEM_MC_FETCH_EFLAGS(EFlags);
7460 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7461 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7462 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7463 else
7464 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7465 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7466
7467 IEM_MC_COMMIT_EFLAGS(EFlags);
7468 IEM_MC_ADVANCE_RIP();
7469 IEM_MC_END();
7470 return VINF_SUCCESS;
7471
7472 case IEMMODE_64BIT:
7473 IEM_MC_BEGIN(3, 1);
7474 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7475 IEM_MC_ARG(uint64_t, u64Src, 1);
7476 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7478
7479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7480 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7481 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7482 if (pImpl->pfnLockedU16)
7483 IEMOP_HLP_DONE_DECODING();
7484 else
7485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7486 IEM_MC_FETCH_EFLAGS(EFlags);
7487 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7488 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7489 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7490 else
7491 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7492 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7493
7494 IEM_MC_COMMIT_EFLAGS(EFlags);
7495 IEM_MC_ADVANCE_RIP();
7496 IEM_MC_END();
7497 return VINF_SUCCESS;
7498
7499 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7500 }
7501 }
7502}
7503
7504
7505/** Opcode 0x0f 0xbb. */
7506FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7507{
7508 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7509 IEMOP_HLP_MIN_386();
7510 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7511}
7512
7513
7514/** Opcode 0x0f 0xbc. */
7515FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7516{
7517 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7518 IEMOP_HLP_MIN_386();
7519 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7520 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7521}
7522
7523
7524/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7525FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7526
7527
7528/** Opcode 0x0f 0xbd. */
7529FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7530{
7531 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7532 IEMOP_HLP_MIN_386();
7533 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7534 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7535}
7536
7537
7538/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7539FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7540
7541
7542/** Opcode 0x0f 0xbe. */
7543FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7544{
7545 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7546 IEMOP_HLP_MIN_386();
7547
7548 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7549
7550 /*
7551 * If rm is denoting a register, no more instruction bytes.
7552 */
7553 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7554 {
7555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7556 switch (pVCpu->iem.s.enmEffOpSize)
7557 {
7558 case IEMMODE_16BIT:
7559 IEM_MC_BEGIN(0, 1);
7560 IEM_MC_LOCAL(uint16_t, u16Value);
7561 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7562 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7563 IEM_MC_ADVANCE_RIP();
7564 IEM_MC_END();
7565 return VINF_SUCCESS;
7566
7567 case IEMMODE_32BIT:
7568 IEM_MC_BEGIN(0, 1);
7569 IEM_MC_LOCAL(uint32_t, u32Value);
7570 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7571 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7572 IEM_MC_ADVANCE_RIP();
7573 IEM_MC_END();
7574 return VINF_SUCCESS;
7575
7576 case IEMMODE_64BIT:
7577 IEM_MC_BEGIN(0, 1);
7578 IEM_MC_LOCAL(uint64_t, u64Value);
7579 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7580 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7581 IEM_MC_ADVANCE_RIP();
7582 IEM_MC_END();
7583 return VINF_SUCCESS;
7584
7585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7586 }
7587 }
7588 else
7589 {
7590 /*
7591 * We're loading a register from memory.
7592 */
7593 switch (pVCpu->iem.s.enmEffOpSize)
7594 {
7595 case IEMMODE_16BIT:
7596 IEM_MC_BEGIN(0, 2);
7597 IEM_MC_LOCAL(uint16_t, u16Value);
7598 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7601 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7602 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7603 IEM_MC_ADVANCE_RIP();
7604 IEM_MC_END();
7605 return VINF_SUCCESS;
7606
7607 case IEMMODE_32BIT:
7608 IEM_MC_BEGIN(0, 2);
7609 IEM_MC_LOCAL(uint32_t, u32Value);
7610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7613 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7614 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7615 IEM_MC_ADVANCE_RIP();
7616 IEM_MC_END();
7617 return VINF_SUCCESS;
7618
7619 case IEMMODE_64BIT:
7620 IEM_MC_BEGIN(0, 2);
7621 IEM_MC_LOCAL(uint64_t, u64Value);
7622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7625 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7626 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7627 IEM_MC_ADVANCE_RIP();
7628 IEM_MC_END();
7629 return VINF_SUCCESS;
7630
7631 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7632 }
7633 }
7634}
7635
7636
7637/** Opcode 0x0f 0xbf. */
7638FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7639{
7640 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7641 IEMOP_HLP_MIN_386();
7642
7643 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7644
7645 /** @todo Not entirely sure how the operand size prefix is handled here,
7646 * assuming that it will be ignored. Would be nice to have a few
7647 * test for this. */
7648 /*
7649 * If rm is denoting a register, no more instruction bytes.
7650 */
7651 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7652 {
7653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7654 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7655 {
7656 IEM_MC_BEGIN(0, 1);
7657 IEM_MC_LOCAL(uint32_t, u32Value);
7658 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7659 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7660 IEM_MC_ADVANCE_RIP();
7661 IEM_MC_END();
7662 }
7663 else
7664 {
7665 IEM_MC_BEGIN(0, 1);
7666 IEM_MC_LOCAL(uint64_t, u64Value);
7667 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7668 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7669 IEM_MC_ADVANCE_RIP();
7670 IEM_MC_END();
7671 }
7672 }
7673 else
7674 {
7675 /*
7676 * We're loading a register from memory.
7677 */
7678 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7679 {
7680 IEM_MC_BEGIN(0, 2);
7681 IEM_MC_LOCAL(uint32_t, u32Value);
7682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7685 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7686 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7687 IEM_MC_ADVANCE_RIP();
7688 IEM_MC_END();
7689 }
7690 else
7691 {
7692 IEM_MC_BEGIN(0, 2);
7693 IEM_MC_LOCAL(uint64_t, u64Value);
7694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7697 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7698 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7699 IEM_MC_ADVANCE_RIP();
7700 IEM_MC_END();
7701 }
7702 }
7703 return VINF_SUCCESS;
7704}
7705
7706
7707/** Opcode 0x0f 0xc0. */
7708FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7709{
7710 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7711 IEMOP_HLP_MIN_486();
7712 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7713
7714 /*
7715 * If rm is denoting a register, no more instruction bytes.
7716 */
7717 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7718 {
7719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7720
7721 IEM_MC_BEGIN(3, 0);
7722 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7723 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7724 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7725
7726 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7727 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7728 IEM_MC_REF_EFLAGS(pEFlags);
7729 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7730
7731 IEM_MC_ADVANCE_RIP();
7732 IEM_MC_END();
7733 }
7734 else
7735 {
7736 /*
7737 * We're accessing memory.
7738 */
7739 IEM_MC_BEGIN(3, 3);
7740 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7741 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7742 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7743 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7745
7746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7747 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7748 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7749 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7750 IEM_MC_FETCH_EFLAGS(EFlags);
7751 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7752 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7753 else
7754 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7755
7756 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7757 IEM_MC_COMMIT_EFLAGS(EFlags);
7758 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7759 IEM_MC_ADVANCE_RIP();
7760 IEM_MC_END();
7761 return VINF_SUCCESS;
7762 }
7763 return VINF_SUCCESS;
7764}
7765
7766
7767/** Opcode 0x0f 0xc1. */
7768FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7769{
7770 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7771 IEMOP_HLP_MIN_486();
7772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7773
7774 /*
7775 * If rm is denoting a register, no more instruction bytes.
7776 */
7777 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7778 {
7779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7780
7781 switch (pVCpu->iem.s.enmEffOpSize)
7782 {
7783 case IEMMODE_16BIT:
7784 IEM_MC_BEGIN(3, 0);
7785 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7786 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7787 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7788
7789 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7790 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7791 IEM_MC_REF_EFLAGS(pEFlags);
7792 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7793
7794 IEM_MC_ADVANCE_RIP();
7795 IEM_MC_END();
7796 return VINF_SUCCESS;
7797
7798 case IEMMODE_32BIT:
7799 IEM_MC_BEGIN(3, 0);
7800 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7801 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7802 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7803
7804 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7805 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7806 IEM_MC_REF_EFLAGS(pEFlags);
7807 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7808
7809 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7810 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7811 IEM_MC_ADVANCE_RIP();
7812 IEM_MC_END();
7813 return VINF_SUCCESS;
7814
7815 case IEMMODE_64BIT:
7816 IEM_MC_BEGIN(3, 0);
7817 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7818 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7819 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7820
7821 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7822 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7823 IEM_MC_REF_EFLAGS(pEFlags);
7824 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7825
7826 IEM_MC_ADVANCE_RIP();
7827 IEM_MC_END();
7828 return VINF_SUCCESS;
7829
7830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7831 }
7832 }
7833 else
7834 {
7835 /*
7836 * We're accessing memory.
7837 */
7838 switch (pVCpu->iem.s.enmEffOpSize)
7839 {
7840 case IEMMODE_16BIT:
7841 IEM_MC_BEGIN(3, 3);
7842 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7843 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7844 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7845 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7847
7848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7849 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7850 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7851 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7852 IEM_MC_FETCH_EFLAGS(EFlags);
7853 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7854 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7855 else
7856 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7857
7858 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7859 IEM_MC_COMMIT_EFLAGS(EFlags);
7860 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7861 IEM_MC_ADVANCE_RIP();
7862 IEM_MC_END();
7863 return VINF_SUCCESS;
7864
7865 case IEMMODE_32BIT:
7866 IEM_MC_BEGIN(3, 3);
7867 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7868 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7869 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7870 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7872
7873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7874 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7875 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7876 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7877 IEM_MC_FETCH_EFLAGS(EFlags);
7878 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7879 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7880 else
7881 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7882
7883 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7884 IEM_MC_COMMIT_EFLAGS(EFlags);
7885 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7886 IEM_MC_ADVANCE_RIP();
7887 IEM_MC_END();
7888 return VINF_SUCCESS;
7889
7890 case IEMMODE_64BIT:
7891 IEM_MC_BEGIN(3, 3);
7892 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7893 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7894 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7895 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7897
7898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7899 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7900 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7901 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7902 IEM_MC_FETCH_EFLAGS(EFlags);
7903 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7904 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7905 else
7906 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7907
7908 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7909 IEM_MC_COMMIT_EFLAGS(EFlags);
7910 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7911 IEM_MC_ADVANCE_RIP();
7912 IEM_MC_END();
7913 return VINF_SUCCESS;
7914
7915 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7916 }
7917 }
7918}
7919
7920
7921/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
7922FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
7923/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
7924FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
7925/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
7926FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
7927/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
7928FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
7929
7930
7931/** Opcode 0x0f 0xc3. */
7932FNIEMOP_DEF(iemOp_movnti_My_Gy)
7933{
7934 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7935
7936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7937
7938 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7939 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7940 {
7941 switch (pVCpu->iem.s.enmEffOpSize)
7942 {
7943 case IEMMODE_32BIT:
7944 IEM_MC_BEGIN(0, 2);
7945 IEM_MC_LOCAL(uint32_t, u32Value);
7946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7947
7948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7950 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7951 return IEMOP_RAISE_INVALID_OPCODE();
7952
7953 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7954 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7955 IEM_MC_ADVANCE_RIP();
7956 IEM_MC_END();
7957 break;
7958
7959 case IEMMODE_64BIT:
7960 IEM_MC_BEGIN(0, 2);
7961 IEM_MC_LOCAL(uint64_t, u64Value);
7962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7963
7964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7966 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7967 return IEMOP_RAISE_INVALID_OPCODE();
7968
7969 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7970 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7971 IEM_MC_ADVANCE_RIP();
7972 IEM_MC_END();
7973 break;
7974
7975 case IEMMODE_16BIT:
7976 /** @todo check this form. */
7977 return IEMOP_RAISE_INVALID_OPCODE();
7978 }
7979 }
7980 else
7981 return IEMOP_RAISE_INVALID_OPCODE();
7982 return VINF_SUCCESS;
7983}
7984/* Opcode 0x66 0x0f 0xc3 - invalid */
7985/* Opcode 0xf3 0x0f 0xc3 - invalid */
7986/* Opcode 0xf2 0x0f 0xc3 - invalid */
7987
7988/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
7989FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7990/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
7991FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
7992/* Opcode 0xf3 0x0f 0xc4 - invalid */
7993/* Opcode 0xf2 0x0f 0xc4 - invalid */
7994
7995/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7996FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7997/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
7998FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
7999/* Opcode 0xf3 0x0f 0xc5 - invalid */
8000/* Opcode 0xf2 0x0f 0xc5 - invalid */
8001
8002/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8003FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8004/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8005FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8006/* Opcode 0xf3 0x0f 0xc6 - invalid */
8007/* Opcode 0xf2 0x0f 0xc6 - invalid */
8008
8009
8010/** Opcode 0x0f 0xc7 !11/1. */
8011FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8012{
8013 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8014
8015 IEM_MC_BEGIN(4, 3);
8016 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8017 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8018 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8019 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8020 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8021 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8023
8024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8025 IEMOP_HLP_DONE_DECODING();
8026 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8027
8028 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8029 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8030 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8031
8032 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8033 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8034 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8035
8036 IEM_MC_FETCH_EFLAGS(EFlags);
8037 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8038 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8039 else
8040 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8041
8042 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8043 IEM_MC_COMMIT_EFLAGS(EFlags);
8044 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8045 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8046 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8047 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8048 IEM_MC_ENDIF();
8049 IEM_MC_ADVANCE_RIP();
8050
8051 IEM_MC_END();
8052 return VINF_SUCCESS;
8053}
8054
8055
8056/** Opcode REX.W 0x0f 0xc7 !11/1. */
8057FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8058{
8059 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8060 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8061 {
8062#if 0
8063 RT_NOREF(bRm);
8064 IEMOP_BITCH_ABOUT_STUB();
8065 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8066#else
8067 IEM_MC_BEGIN(4, 3);
8068 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8069 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8070 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8071 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8072 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8073 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8075
8076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8077 IEMOP_HLP_DONE_DECODING();
8078 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8079 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8080
8081 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8082 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8083 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8084
8085 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8086 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8087 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8088
8089 IEM_MC_FETCH_EFLAGS(EFlags);
8090# ifdef RT_ARCH_AMD64
8091 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8092 {
8093 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8094 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8095 else
8096 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8097 }
8098 else
8099# endif
8100 {
8101 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8102 accesses and not all all atomic, which works fine on in UNI CPU guest
8103 configuration (ignoring DMA). If guest SMP is active we have no choice
8104 but to use a rendezvous callback here. Sigh. */
8105 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8106 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8107 else
8108 {
8109 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8110 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8111 }
8112 }
8113
8114 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8115 IEM_MC_COMMIT_EFLAGS(EFlags);
8116 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8117 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8118 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8119 IEM_MC_ENDIF();
8120 IEM_MC_ADVANCE_RIP();
8121
8122 IEM_MC_END();
8123 return VINF_SUCCESS;
8124#endif
8125 }
8126 Log(("cmpxchg16b -> #UD\n"));
8127 return IEMOP_RAISE_INVALID_OPCODE();
8128}
8129
8130FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8131{
8132 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8133 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8134 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8135}
8136
8137/** Opcode 0x0f 0xc7 11/6. */
8138FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8139
8140/** Opcode 0x0f 0xc7 !11/6. */
8141FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8142
8143/** Opcode 0x66 0x0f 0xc7 !11/6. */
8144FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8145
8146/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8147FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8148
8149/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8150FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8151
8152/** Opcode 0x0f 0xc7 11/7. */
8153FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8154
8155
8156/**
8157 * Group 9 jump table for register variant.
8158 */
8159IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8160{ /* pfx: none, 066h, 0f3h, 0f2h */
8161 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8162 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8163 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8164 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8165 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8166 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8167 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8168 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8169};
8170AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8171
8172
8173/**
8174 * Group 9 jump table for memory variant.
8175 */
8176IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8177{ /* pfx: none, 066h, 0f3h, 0f2h */
8178 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8179 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8180 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8181 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8182 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8183 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8184 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8185 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8186};
8187AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8188
8189
8190/** Opcode 0x0f 0xc7. */
8191FNIEMOP_DEF(iemOp_Grp9)
8192{
8193 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8194 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8195 /* register, register */
8196 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8197 + pVCpu->iem.s.idxPrefix], bRm);
8198 /* memory, register */
8199 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8200 + pVCpu->iem.s.idxPrefix], bRm);
8201}
8202
8203
8204/**
8205 * Common 'bswap register' helper.
8206 */
8207FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8208{
8209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8210 switch (pVCpu->iem.s.enmEffOpSize)
8211 {
8212 case IEMMODE_16BIT:
8213 IEM_MC_BEGIN(1, 0);
8214 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8215 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8216 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8217 IEM_MC_ADVANCE_RIP();
8218 IEM_MC_END();
8219 return VINF_SUCCESS;
8220
8221 case IEMMODE_32BIT:
8222 IEM_MC_BEGIN(1, 0);
8223 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8224 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8225 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8226 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8227 IEM_MC_ADVANCE_RIP();
8228 IEM_MC_END();
8229 return VINF_SUCCESS;
8230
8231 case IEMMODE_64BIT:
8232 IEM_MC_BEGIN(1, 0);
8233 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8234 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8235 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8236 IEM_MC_ADVANCE_RIP();
8237 IEM_MC_END();
8238 return VINF_SUCCESS;
8239
8240 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8241 }
8242}
8243
8244
8245/** Opcode 0x0f 0xc8. */
8246FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8247{
8248 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8249 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8250 prefix. REX.B is the correct prefix it appears. For a parallel
8251 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8252 IEMOP_HLP_MIN_486();
8253 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8254}
8255
8256
8257/** Opcode 0x0f 0xc9. */
8258FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8259{
8260 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8261 IEMOP_HLP_MIN_486();
8262 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8263}
8264
8265
8266/** Opcode 0x0f 0xca. */
8267FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8268{
8269 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8270 IEMOP_HLP_MIN_486();
8271 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8272}
8273
8274
8275/** Opcode 0x0f 0xcb. */
8276FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8277{
8278 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8279 IEMOP_HLP_MIN_486();
8280 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8281}
8282
8283
8284/** Opcode 0x0f 0xcc. */
8285FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8286{
8287 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8288 IEMOP_HLP_MIN_486();
8289 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8290}
8291
8292
8293/** Opcode 0x0f 0xcd. */
8294FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8295{
8296 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8297 IEMOP_HLP_MIN_486();
8298 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8299}
8300
8301
8302/** Opcode 0x0f 0xce. */
8303FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8304{
8305 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8306 IEMOP_HLP_MIN_486();
8307 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8308}
8309
8310
8311/** Opcode 0x0f 0xcf. */
8312FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8313{
8314 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8315 IEMOP_HLP_MIN_486();
8316 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8317}
8318
8319
8320/* Opcode 0x0f 0xd0 - invalid */
8321/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8322FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8323/* Opcode 0xf3 0x0f 0xd0 - invalid */
8324/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8325FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8326
8327/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8328FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8329/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8330FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8331/* Opcode 0xf3 0x0f 0xd1 - invalid */
8332/* Opcode 0xf2 0x0f 0xd1 - invalid */
8333
8334/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8335FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8336/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8337FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8338/* Opcode 0xf3 0x0f 0xd2 - invalid */
8339/* Opcode 0xf2 0x0f 0xd2 - invalid */
8340
8341/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8342FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8343/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8344FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8345/* Opcode 0xf3 0x0f 0xd3 - invalid */
8346/* Opcode 0xf2 0x0f 0xd3 - invalid */
8347
8348/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8349FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8350/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8351FNIEMOP_STUB(iemOp_paddq_Vx_W);
8352/* Opcode 0xf3 0x0f 0xd4 - invalid */
8353/* Opcode 0xf2 0x0f 0xd4 - invalid */
8354
8355/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8356FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8357/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8358FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8359/* Opcode 0xf3 0x0f 0xd5 - invalid */
8360/* Opcode 0xf2 0x0f 0xd5 - invalid */
8361
8362/* Opcode 0x0f 0xd6 - invalid */
8363
8364/**
8365 * @opcode 0xd6
8366 * @oppfx 0x66
8367 * @opcpuid sse2
8368 * @opgroup og_sse2_pcksclr_datamove
8369 * @opxcpttype none
8370 * @optest op1=-1 op2=2 -> op1=2
8371 * @optest op1=0 op2=-42 -> op1=-42
8372 */
8373FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8374{
8375 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
8376 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8377 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8378 {
8379 /*
8380 * Register, register.
8381 */
8382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8383 IEM_MC_BEGIN(0, 2);
8384 IEM_MC_LOCAL(uint64_t, uSrc);
8385
8386 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8387 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8388
8389 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8390 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8391
8392 IEM_MC_ADVANCE_RIP();
8393 IEM_MC_END();
8394 }
8395 else
8396 {
8397 /*
8398 * Memory, register.
8399 */
8400 IEM_MC_BEGIN(0, 2);
8401 IEM_MC_LOCAL(uint64_t, uSrc);
8402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8403
8404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8406 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8407 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8408
8409 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8410 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8411
8412 IEM_MC_ADVANCE_RIP();
8413 IEM_MC_END();
8414 }
8415 return VINF_SUCCESS;
8416}
8417
8418
8419/**
8420 * @opcode 0xd6
8421 * @opcodesub 11 mr/reg
8422 * @oppfx f3
8423 * @opcpuid sse2
8424 * @opgroup og_sse2_simdint_datamove
8425 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8426 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8427 */
8428FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8429{
8430 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8431 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8432 {
8433 /*
8434 * Register, register.
8435 */
8436 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
8437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8438 IEM_MC_BEGIN(0, 1);
8439 IEM_MC_LOCAL(uint64_t, uSrc);
8440
8441 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8442 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8443
8444 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8445 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8446 IEM_MC_FPU_TO_MMX_MODE();
8447
8448 IEM_MC_ADVANCE_RIP();
8449 IEM_MC_END();
8450 return VINF_SUCCESS;
8451 }
8452
8453 /**
8454 * @opdone
8455 * @opmnemonic udf30fd6mem
8456 * @opcode 0xd6
8457 * @opcodesub !11 mr/reg
8458 * @oppfx f3
8459 * @opunused intel-modrm
8460 * @opcpuid sse
8461 * @optest ->
8462 */
8463 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8464}
8465
8466
8467/**
8468 * @opcode 0xd6
8469 * @opcodesub 11 mr/reg
8470 * @oppfx f2
8471 * @opcpuid sse2
8472 * @opgroup og_sse2_simdint_datamove
8473 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8474 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8475 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
8476 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
8477 * @optest op1=-42 op2=0xfedcba9876543210
8478 * -> op1=0xfedcba9876543210 ftw=0xff
8479 */
8480FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
8481{
8482 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8483 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8484 {
8485 /*
8486 * Register, register.
8487 */
8488 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
8489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8490 IEM_MC_BEGIN(0, 1);
8491 IEM_MC_LOCAL(uint64_t, uSrc);
8492
8493 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8494 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8495
8496 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8497 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
8498 IEM_MC_FPU_TO_MMX_MODE();
8499
8500 IEM_MC_ADVANCE_RIP();
8501 IEM_MC_END();
8502 return VINF_SUCCESS;
8503 }
8504
8505 /**
8506 * @opdone
8507 * @opmnemonic udf20fd6mem
8508 * @opcode 0xd6
8509 * @opcodesub !11 mr/reg
8510 * @oppfx f2
8511 * @opunused intel-modrm
8512 * @opcpuid sse
8513 * @optest ->
8514 */
8515 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8516}
8517
8518/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8519FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8520{
8521 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8522 /** @todo testcase: Check that the instruction implicitly clears the high
8523 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8524 * and opcode modifications are made to work with the whole width (not
8525 * just 128). */
8526 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8527 /* Docs says register only. */
8528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8529 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8530 {
8531 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8532 IEM_MC_BEGIN(2, 0);
8533 IEM_MC_ARG(uint64_t *, pDst, 0);
8534 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8535 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8536 IEM_MC_PREPARE_FPU_USAGE();
8537 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8538 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8539 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8540 IEM_MC_ADVANCE_RIP();
8541 IEM_MC_END();
8542 return VINF_SUCCESS;
8543 }
8544 return IEMOP_RAISE_INVALID_OPCODE();
8545}
8546
8547/** Opcode 0x66 0x0f 0xd7 - */
8548FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8549{
8550 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8551 /** @todo testcase: Check that the instruction implicitly clears the high
8552 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8553 * and opcode modifications are made to work with the whole width (not
8554 * just 128). */
8555 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8556 /* Docs says register only. */
8557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8558 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8559 {
8560 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8561 IEM_MC_BEGIN(2, 0);
8562 IEM_MC_ARG(uint64_t *, pDst, 0);
8563 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8564 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8565 IEM_MC_PREPARE_SSE_USAGE();
8566 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8567 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8568 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8569 IEM_MC_ADVANCE_RIP();
8570 IEM_MC_END();
8571 return VINF_SUCCESS;
8572 }
8573 return IEMOP_RAISE_INVALID_OPCODE();
8574}
8575
8576/* Opcode 0xf3 0x0f 0xd7 - invalid */
8577/* Opcode 0xf2 0x0f 0xd7 - invalid */
8578
8579
8580/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8581FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8582/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8583FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8584/* Opcode 0xf3 0x0f 0xd8 - invalid */
8585/* Opcode 0xf2 0x0f 0xd8 - invalid */
8586
8587/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8588FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8589/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8590FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8591/* Opcode 0xf3 0x0f 0xd9 - invalid */
8592/* Opcode 0xf2 0x0f 0xd9 - invalid */
8593
8594/** Opcode 0x0f 0xda - pminub Pq, Qq */
8595FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8596/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8597FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8598/* Opcode 0xf3 0x0f 0xda - invalid */
8599/* Opcode 0xf2 0x0f 0xda - invalid */
8600
8601/** Opcode 0x0f 0xdb - pand Pq, Qq */
8602FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8603/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8604FNIEMOP_STUB(iemOp_pand_Vx_W);
8605/* Opcode 0xf3 0x0f 0xdb - invalid */
8606/* Opcode 0xf2 0x0f 0xdb - invalid */
8607
8608/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8609FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8610/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8611FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8612/* Opcode 0xf3 0x0f 0xdc - invalid */
8613/* Opcode 0xf2 0x0f 0xdc - invalid */
8614
8615/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8616FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8617/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8618FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8619/* Opcode 0xf3 0x0f 0xdd - invalid */
8620/* Opcode 0xf2 0x0f 0xdd - invalid */
8621
8622/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8623FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8624/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8625FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8626/* Opcode 0xf3 0x0f 0xde - invalid */
8627/* Opcode 0xf2 0x0f 0xde - invalid */
8628
8629/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8630FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8631/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8632FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8633/* Opcode 0xf3 0x0f 0xdf - invalid */
8634/* Opcode 0xf2 0x0f 0xdf - invalid */
8635
8636/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8637FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8638/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8639FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8640/* Opcode 0xf3 0x0f 0xe0 - invalid */
8641/* Opcode 0xf2 0x0f 0xe0 - invalid */
8642
8643/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8644FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8645/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8646FNIEMOP_STUB(iemOp_psraw_Vx_W);
8647/* Opcode 0xf3 0x0f 0xe1 - invalid */
8648/* Opcode 0xf2 0x0f 0xe1 - invalid */
8649
8650/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8651FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8652/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8653FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8654/* Opcode 0xf3 0x0f 0xe2 - invalid */
8655/* Opcode 0xf2 0x0f 0xe2 - invalid */
8656
8657/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8658FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8659/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8660FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8661/* Opcode 0xf3 0x0f 0xe3 - invalid */
8662/* Opcode 0xf2 0x0f 0xe3 - invalid */
8663
8664/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8665FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8666/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8667FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8668/* Opcode 0xf3 0x0f 0xe4 - invalid */
8669/* Opcode 0xf2 0x0f 0xe4 - invalid */
8670
8671/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8672FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8673/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8674FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8675/* Opcode 0xf3 0x0f 0xe5 - invalid */
8676/* Opcode 0xf2 0x0f 0xe5 - invalid */
8677
8678/* Opcode 0x0f 0xe6 - invalid */
8679/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8680FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8681/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8682FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8683/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8684FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8685
8686
8687/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8688FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8689{
8690 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8691 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8692 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8693 {
8694 /* Register, memory. */
8695 IEM_MC_BEGIN(0, 2);
8696 IEM_MC_LOCAL(uint64_t, uSrc);
8697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8698
8699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8701 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8702 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8703
8704 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8705 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8706
8707 IEM_MC_ADVANCE_RIP();
8708 IEM_MC_END();
8709 return VINF_SUCCESS;
8710 }
8711 /* The register, register encoding is invalid. */
8712 return IEMOP_RAISE_INVALID_OPCODE();
8713}
8714
8715/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
8716FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
8717{
8718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8719 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8720 {
8721 /* Register, memory. */
8722 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
8723 IEM_MC_BEGIN(0, 2);
8724 IEM_MC_LOCAL(RTUINT128U, uSrc);
8725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8726
8727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8729 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8730 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8731
8732 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8733 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8734
8735 IEM_MC_ADVANCE_RIP();
8736 IEM_MC_END();
8737 return VINF_SUCCESS;
8738 }
8739
8740 /* The register, register encoding is invalid. */
8741 return IEMOP_RAISE_INVALID_OPCODE();
8742}
8743
8744/* Opcode 0xf3 0x0f 0xe7 - invalid */
8745/* Opcode 0xf2 0x0f 0xe7 - invalid */
8746
8747
8748/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8749FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8750/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8751FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8752/* Opcode 0xf3 0x0f 0xe8 - invalid */
8753/* Opcode 0xf2 0x0f 0xe8 - invalid */
8754
8755/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8756FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8757/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8758FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8759/* Opcode 0xf3 0x0f 0xe9 - invalid */
8760/* Opcode 0xf2 0x0f 0xe9 - invalid */
8761
8762/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8763FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8764/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8765FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8766/* Opcode 0xf3 0x0f 0xea - invalid */
8767/* Opcode 0xf2 0x0f 0xea - invalid */
8768
8769/** Opcode 0x0f 0xeb - por Pq, Qq */
8770FNIEMOP_STUB(iemOp_por_Pq_Qq);
8771/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8772FNIEMOP_STUB(iemOp_por_Vx_W);
8773/* Opcode 0xf3 0x0f 0xeb - invalid */
8774/* Opcode 0xf2 0x0f 0xeb - invalid */
8775
8776/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8777FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8778/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8779FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8780/* Opcode 0xf3 0x0f 0xec - invalid */
8781/* Opcode 0xf2 0x0f 0xec - invalid */
8782
8783/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8784FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8785/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8786FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8787/* Opcode 0xf3 0x0f 0xed - invalid */
8788/* Opcode 0xf2 0x0f 0xed - invalid */
8789
8790/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8791FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8792/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8793FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8794/* Opcode 0xf3 0x0f 0xee - invalid */
8795/* Opcode 0xf2 0x0f 0xee - invalid */
8796
8797
8798/** Opcode 0x0f 0xef - pxor Pq, Qq */
8799FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8800{
8801 IEMOP_MNEMONIC(pxor, "pxor");
8802 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8803}
8804
8805/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8806FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8807{
8808 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8809 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8810}
8811
8812/* Opcode 0xf3 0x0f 0xef - invalid */
8813/* Opcode 0xf2 0x0f 0xef - invalid */
8814
8815/* Opcode 0x0f 0xf0 - invalid */
8816/* Opcode 0x66 0x0f 0xf0 - invalid */
8817/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8818FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8819
8820/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8821FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8822/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8823FNIEMOP_STUB(iemOp_psllw_Vx_W);
8824/* Opcode 0xf2 0x0f 0xf1 - invalid */
8825
8826/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8827FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8828/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8829FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8830/* Opcode 0xf2 0x0f 0xf2 - invalid */
8831
8832/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8833FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8834/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8835FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8836/* Opcode 0xf2 0x0f 0xf3 - invalid */
8837
8838/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8839FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8840/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8841FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8842/* Opcode 0xf2 0x0f 0xf4 - invalid */
8843
8844/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8845FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8846/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
8847FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
8848/* Opcode 0xf2 0x0f 0xf5 - invalid */
8849
8850/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8851FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8852/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
8853FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
8854/* Opcode 0xf2 0x0f 0xf6 - invalid */
8855
8856/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8857FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8858/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
8859FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
8860/* Opcode 0xf2 0x0f 0xf7 - invalid */
8861
8862/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8863FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8864/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
8865FNIEMOP_STUB(iemOp_psubb_Vx_W);
8866/* Opcode 0xf2 0x0f 0xf8 - invalid */
8867
8868/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8869FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8870/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
8871FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
8872/* Opcode 0xf2 0x0f 0xf9 - invalid */
8873
8874/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8875FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8876/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
8877FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
8878/* Opcode 0xf2 0x0f 0xfa - invalid */
8879
8880/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8881FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8882/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
8883FNIEMOP_STUB(iemOp_psubq_Vx_W);
8884/* Opcode 0xf2 0x0f 0xfb - invalid */
8885
8886/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8887FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8888/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
8889FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
8890/* Opcode 0xf2 0x0f 0xfc - invalid */
8891
8892/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8893FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8894/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
8895FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
8896/* Opcode 0xf2 0x0f 0xfd - invalid */
8897
8898/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8899FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8900/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
8901FNIEMOP_STUB(iemOp_paddd_Vx_W);
8902/* Opcode 0xf2 0x0f 0xfe - invalid */
8903
8904
8905/** Opcode **** 0x0f 0xff - UD0 */
8906FNIEMOP_DEF(iemOp_ud0)
8907{
8908 IEMOP_MNEMONIC(ud0, "ud0");
8909 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8910 {
8911 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8912#ifndef TST_IEM_CHECK_MC
8913 RTGCPTR GCPtrEff;
8914 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8915 if (rcStrict != VINF_SUCCESS)
8916 return rcStrict;
8917#endif
8918 IEMOP_HLP_DONE_DECODING();
8919 }
8920 return IEMOP_RAISE_INVALID_OPCODE();
8921}
8922
8923
8924
8925/**
8926 * Two byte opcode map, first byte 0x0f.
8927 *
8928 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
8929 * check if it needs updating as well when making changes.
8930 */
8931IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8932{
8933 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8934 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8935 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8936 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8937 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8938 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8939 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8940 /* 0x06 */ IEMOP_X4(iemOp_clts),
8941 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8942 /* 0x08 */ IEMOP_X4(iemOp_invd),
8943 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8944 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8945 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8946 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8947 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8948 /* 0x0e */ IEMOP_X4(iemOp_femms),
8949 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8950
8951 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
8952 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
8953 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
8954 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8955 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8956 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8957 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
8958 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8959 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8960 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8961 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8962 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8963 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8964 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8965 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8966 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8967
8968 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8969 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8970 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8971 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8972 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8973 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8974 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8975 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8976 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8977 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8978 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
8979 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8980 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
8981 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
8982 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8983 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8984
8985 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8986 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8987 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8988 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8989 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8990 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8991 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8992 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8993 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
8994 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8995 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
8996 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8997 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8998 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8999 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9000 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9001
9002 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9003 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9004 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9005 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9006 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9007 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9008 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9009 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9010 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9011 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9012 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9013 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9014 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9015 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9016 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9017 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9018
9019 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9020 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9021 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9022 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9023 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9024 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9025 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9026 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9027 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9028 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9029 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9030 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9031 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9032 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9033 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9034 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9035
9036 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9037 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9038 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9039 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9040 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9041 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9042 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9043 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9044 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9045 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9046 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9047 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9048 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9049 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9050 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9051 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vx_Wx, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM,
9052
9053 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9054 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9055 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9056 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9057 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9058 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9059 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9060 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9061
9062 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9063 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9064 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9065 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9066 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9067 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9068 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9069 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9070
9071 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9072 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9073 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9074 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9075 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9076 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9077 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9078 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9079 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9080 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9081 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9082 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9083 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9084 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9085 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9086 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9087
9088 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9089 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9090 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9091 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9092 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9093 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9094 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9095 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9096 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9097 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9098 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9099 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9100 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9101 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9102 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9103 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9104
9105 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9106 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9107 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9108 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9109 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9110 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9111 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9112 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9113 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9114 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9115 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9116 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9117 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9118 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9119 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9120 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9121
9122 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9123 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9124 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9125 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9126 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9127 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9128 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9129 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9130 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9131 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9132 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9133 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9134 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9135 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9136 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9137 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9138
9139 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9140 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9141 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9142 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9143 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9144 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9145 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9146 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9147 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9148 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9149 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9150 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9151 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9152 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9153 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9154 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9155
9156 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9157 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9158 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9159 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9160 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9161 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9162 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9163 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9164 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9165 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9166 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9167 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9168 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9169 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9170 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9171 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9172
9173 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9174 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9175 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9176 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9177 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9178 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9179 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9180 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9181 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9182 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9183 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9184 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9185 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9186 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9187 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9188 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9189
9190 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9191 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9192 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9193 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9194 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9195 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9196 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9197 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9198 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9199 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9200 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9201 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9202 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9203 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9204 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9205 /* 0xff */ IEMOP_X4(iemOp_ud0),
9206};
9207AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9208
9209/** @} */
9210
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette