VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66745

Last change on this file since 66745 was 66745, checked in by vboxsync, 8 years ago

IEM: Implemented movupd Vpd,Wpd (0x66 0x0f 0x10).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 309.9 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66745 2017-05-02 11:36:57Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.215389.xyz. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
38 switch (pVCpu->iem.s.enmEffOpSize)
39 {
40 case IEMMODE_16BIT:
41 IEM_MC_BEGIN(0, 1);
42 IEM_MC_LOCAL(uint16_t, u16Ldtr);
43 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
44 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
45 IEM_MC_ADVANCE_RIP();
46 IEM_MC_END();
47 break;
48
49 case IEMMODE_32BIT:
50 IEM_MC_BEGIN(0, 1);
51 IEM_MC_LOCAL(uint32_t, u32Ldtr);
52 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
53 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
54 IEM_MC_ADVANCE_RIP();
55 IEM_MC_END();
56 break;
57
58 case IEMMODE_64BIT:
59 IEM_MC_BEGIN(0, 1);
60 IEM_MC_LOCAL(uint64_t, u64Ldtr);
61 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
62 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 break;
66
67 IEM_NOT_REACHED_DEFAULT_CASE_RET();
68 }
69 }
70 else
71 {
72 IEM_MC_BEGIN(0, 2);
73 IEM_MC_LOCAL(uint16_t, u16Ldtr);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
76 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
77 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
78 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
79 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/** Opcode 0x0f 0x00 /1. */
88FNIEMOPRM_DEF(iemOp_Grp6_str)
89{
90 IEMOP_MNEMONIC(str, "str Rv/Mw");
91 IEMOP_HLP_MIN_286();
92 IEMOP_HLP_NO_REAL_OR_V86_MODE();
93
94 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
95 {
96 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
97 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
98 switch (pVCpu->iem.s.enmEffOpSize)
99 {
100 case IEMMODE_16BIT:
101 IEM_MC_BEGIN(0, 1);
102 IEM_MC_LOCAL(uint16_t, u16Tr);
103 IEM_MC_FETCH_TR_U16(u16Tr);
104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
105 IEM_MC_ADVANCE_RIP();
106 IEM_MC_END();
107 break;
108
109 case IEMMODE_32BIT:
110 IEM_MC_BEGIN(0, 1);
111 IEM_MC_LOCAL(uint32_t, u32Tr);
112 IEM_MC_FETCH_TR_U32(u32Tr);
113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
114 IEM_MC_ADVANCE_RIP();
115 IEM_MC_END();
116 break;
117
118 case IEMMODE_64BIT:
119 IEM_MC_BEGIN(0, 1);
120 IEM_MC_LOCAL(uint64_t, u64Tr);
121 IEM_MC_FETCH_TR_U64(u64Tr);
122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 break;
126
127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
128 }
129 }
130 else
131 {
132 IEM_MC_BEGIN(0, 2);
133 IEM_MC_LOCAL(uint16_t, u16Tr);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
136 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
137 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
138 IEM_MC_FETCH_TR_U16(u16Tr);
139 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
140 IEM_MC_ADVANCE_RIP();
141 IEM_MC_END();
142 }
143 return VINF_SUCCESS;
144}
145
146
147/** Opcode 0x0f 0x00 /2. */
148FNIEMOPRM_DEF(iemOp_Grp6_lldt)
149{
150 IEMOP_MNEMONIC(lldt, "lldt Ew");
151 IEMOP_HLP_MIN_286();
152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
153
154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
155 {
156 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
157 IEM_MC_BEGIN(1, 0);
158 IEM_MC_ARG(uint16_t, u16Sel, 0);
159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
161 IEM_MC_END();
162 }
163 else
164 {
165 IEM_MC_BEGIN(1, 1);
166 IEM_MC_ARG(uint16_t, u16Sel, 0);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
169 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
170 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
171 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
172 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
173 IEM_MC_END();
174 }
175 return VINF_SUCCESS;
176}
177
178
179/** Opcode 0x0f 0x00 /3. */
180FNIEMOPRM_DEF(iemOp_Grp6_ltr)
181{
182 IEMOP_MNEMONIC(ltr, "ltr Ew");
183 IEMOP_HLP_MIN_286();
184 IEMOP_HLP_NO_REAL_OR_V86_MODE();
185
186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
187 {
188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
189 IEM_MC_BEGIN(1, 0);
190 IEM_MC_ARG(uint16_t, u16Sel, 0);
191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
192 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
193 IEM_MC_END();
194 }
195 else
196 {
197 IEM_MC_BEGIN(1, 1);
198 IEM_MC_ARG(uint16_t, u16Sel, 0);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
202 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
203 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
204 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
205 IEM_MC_END();
206 }
207 return VINF_SUCCESS;
208}
209
210
211/** Opcode 0x0f 0x00 /3. */
212FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
213{
214 IEMOP_HLP_MIN_286();
215 IEMOP_HLP_NO_REAL_OR_V86_MODE();
216
217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
218 {
219 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
220 IEM_MC_BEGIN(2, 0);
221 IEM_MC_ARG(uint16_t, u16Sel, 0);
222 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
223 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
224 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
225 IEM_MC_END();
226 }
227 else
228 {
229 IEM_MC_BEGIN(2, 1);
230 IEM_MC_ARG(uint16_t, u16Sel, 0);
231 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
234 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
235 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
236 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
237 IEM_MC_END();
238 }
239 return VINF_SUCCESS;
240}
241
242
243/** Opcode 0x0f 0x00 /4. */
244FNIEMOPRM_DEF(iemOp_Grp6_verr)
245{
246 IEMOP_MNEMONIC(verr, "verr Ew");
247 IEMOP_HLP_MIN_286();
248 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
249}
250
251
252/** Opcode 0x0f 0x00 /5. */
253FNIEMOPRM_DEF(iemOp_Grp6_verw)
254{
255 IEMOP_MNEMONIC(verw, "verw Ew");
256 IEMOP_HLP_MIN_286();
257 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
258}
259
260
261/**
262 * Group 6 jump table.
263 */
264IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
265{
266 iemOp_Grp6_sldt,
267 iemOp_Grp6_str,
268 iemOp_Grp6_lldt,
269 iemOp_Grp6_ltr,
270 iemOp_Grp6_verr,
271 iemOp_Grp6_verw,
272 iemOp_InvalidWithRM,
273 iemOp_InvalidWithRM
274};
275
276/** Opcode 0x0f 0x00. */
277FNIEMOP_DEF(iemOp_Grp6)
278{
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
280 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
281}
282
283
284/** Opcode 0x0f 0x01 /0. */
285FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
286{
287 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
288 IEMOP_HLP_MIN_286();
289 IEMOP_HLP_64BIT_OP_SIZE();
290 IEM_MC_BEGIN(2, 1);
291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
292 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
295 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
296 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
297 IEM_MC_END();
298 return VINF_SUCCESS;
299}
300
301
302/** Opcode 0x0f 0x01 /0. */
303FNIEMOP_DEF(iemOp_Grp7_vmcall)
304{
305 IEMOP_BITCH_ABOUT_STUB();
306 return IEMOP_RAISE_INVALID_OPCODE();
307}
308
309
310/** Opcode 0x0f 0x01 /0. */
311FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
312{
313 IEMOP_BITCH_ABOUT_STUB();
314 return IEMOP_RAISE_INVALID_OPCODE();
315}
316
317
318/** Opcode 0x0f 0x01 /0. */
319FNIEMOP_DEF(iemOp_Grp7_vmresume)
320{
321 IEMOP_BITCH_ABOUT_STUB();
322 return IEMOP_RAISE_INVALID_OPCODE();
323}
324
325
326/** Opcode 0x0f 0x01 /0. */
327FNIEMOP_DEF(iemOp_Grp7_vmxoff)
328{
329 IEMOP_BITCH_ABOUT_STUB();
330 return IEMOP_RAISE_INVALID_OPCODE();
331}
332
333
334/** Opcode 0x0f 0x01 /1. */
335FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
336{
337 IEMOP_MNEMONIC(sidt, "sidt Ms");
338 IEMOP_HLP_MIN_286();
339 IEMOP_HLP_64BIT_OP_SIZE();
340 IEM_MC_BEGIN(2, 1);
341 IEM_MC_ARG(uint8_t, iEffSeg, 0);
342 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
345 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
346 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
347 IEM_MC_END();
348 return VINF_SUCCESS;
349}
350
351
352/** Opcode 0x0f 0x01 /1. */
353FNIEMOP_DEF(iemOp_Grp7_monitor)
354{
355 IEMOP_MNEMONIC(monitor, "monitor");
356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
357 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
358}
359
360
361/** Opcode 0x0f 0x01 /1. */
362FNIEMOP_DEF(iemOp_Grp7_mwait)
363{
364 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
367}
368
369
370/** Opcode 0x0f 0x01 /2. */
371FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
372{
373 IEMOP_MNEMONIC(lgdt, "lgdt");
374 IEMOP_HLP_64BIT_OP_SIZE();
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd0. */
389FNIEMOP_DEF(iemOp_Grp7_xgetbv)
390{
391 IEMOP_MNEMONIC(xgetbv, "xgetbv");
392 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
393 {
394 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
396 }
397 return IEMOP_RAISE_INVALID_OPCODE();
398}
399
400
401/** Opcode 0x0f 0x01 0xd1. */
402FNIEMOP_DEF(iemOp_Grp7_xsetbv)
403{
404 IEMOP_MNEMONIC(xsetbv, "xsetbv");
405 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
406 {
407 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
408 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
409 }
410 return IEMOP_RAISE_INVALID_OPCODE();
411}
412
413
414/** Opcode 0x0f 0x01 /3. */
415FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
416{
417 IEMOP_MNEMONIC(lidt, "lidt");
418 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
419 ? IEMMODE_64BIT
420 : pVCpu->iem.s.enmEffOpSize;
421 IEM_MC_BEGIN(3, 1);
422 IEM_MC_ARG(uint8_t, iEffSeg, 0);
423 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
424 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
428 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
429 IEM_MC_END();
430 return VINF_SUCCESS;
431}
432
433
434#ifdef VBOX_WITH_NESTED_HWVIRT
435/** Opcode 0x0f 0x01 0xd8. */
436FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
437{
438 IEMOP_MNEMONIC(vmrun, "vmrun");
439 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
440}
441
442/** Opcode 0x0f 0x01 0xd9. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
444{
445 IEMOP_MNEMONIC(vmmcall, "vmmcall");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
447}
448
449
450/** Opcode 0x0f 0x01 0xda. */
451FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
452{
453 IEMOP_MNEMONIC(vmload, "vmload");
454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
455}
456
457
458/** Opcode 0x0f 0x01 0xdb. */
459FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
460{
461 IEMOP_MNEMONIC(vmsave, "vmsave");
462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
463}
464
465
466/** Opcode 0x0f 0x01 0xdc. */
467FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
468{
469 IEMOP_MNEMONIC(stgi, "stgi");
470 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
471}
472
473
474/** Opcode 0x0f 0x01 0xdd. */
475FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
476{
477 IEMOP_MNEMONIC(clgi, "clgi");
478 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
479}
480
481
482/** Opcode 0x0f 0x01 0xdf. */
483FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
484{
485 IEMOP_MNEMONIC(invlpga, "invlpga");
486 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
487}
488
489
490/** Opcode 0x0f 0x01 0xde. */
491FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
492{
493 IEMOP_MNEMONIC(skinit, "skinit");
494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
495}
496#else
497/** Opcode 0x0f 0x01 0xd8. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
499
500/** Opcode 0x0f 0x01 0xd9. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
502
503/** Opcode 0x0f 0x01 0xda. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
505
506/** Opcode 0x0f 0x01 0xdb. */
507FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
508
509/** Opcode 0x0f 0x01 0xdc. */
510FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
511
512/** Opcode 0x0f 0x01 0xdd. */
513FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
514
515/** Opcode 0x0f 0x01 0xdf. */
516FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520#endif /* VBOX_WITH_NESTED_HWVIRT */
521
522/** Opcode 0x0f 0x01 /4. */
523FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
524{
525 IEMOP_MNEMONIC(smsw, "smsw");
526 IEMOP_HLP_MIN_286();
527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
528 {
529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
530 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
531 switch (pVCpu->iem.s.enmEffOpSize)
532 {
533 case IEMMODE_16BIT:
534 IEM_MC_BEGIN(0, 1);
535 IEM_MC_LOCAL(uint16_t, u16Tmp);
536 IEM_MC_FETCH_CR0_U16(u16Tmp);
537 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
538 { /* likely */ }
539 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
541 else
542 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
543 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
544 IEM_MC_ADVANCE_RIP();
545 IEM_MC_END();
546 return VINF_SUCCESS;
547
548 case IEMMODE_32BIT:
549 IEM_MC_BEGIN(0, 1);
550 IEM_MC_LOCAL(uint32_t, u32Tmp);
551 IEM_MC_FETCH_CR0_U32(u32Tmp);
552 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 return VINF_SUCCESS;
556
557 case IEMMODE_64BIT:
558 IEM_MC_BEGIN(0, 1);
559 IEM_MC_LOCAL(uint64_t, u64Tmp);
560 IEM_MC_FETCH_CR0_U64(u64Tmp);
561 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
562 IEM_MC_ADVANCE_RIP();
563 IEM_MC_END();
564 return VINF_SUCCESS;
565
566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
567 }
568 }
569 else
570 {
571 /* Ignore operand size here, memory refs are always 16-bit. */
572 IEM_MC_BEGIN(0, 2);
573 IEM_MC_LOCAL(uint16_t, u16Tmp);
574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
578 IEM_MC_FETCH_CR0_U16(u16Tmp);
579 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
580 { /* likely */ }
581 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
582 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
583 else
584 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
585 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
586 IEM_MC_ADVANCE_RIP();
587 IEM_MC_END();
588 return VINF_SUCCESS;
589 }
590}
591
592
593/** Opcode 0x0f 0x01 /6. */
594FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
595{
596 /* The operand size is effectively ignored, all is 16-bit and only the
597 lower 3-bits are used. */
598 IEMOP_MNEMONIC(lmsw, "lmsw");
599 IEMOP_HLP_MIN_286();
600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
601 {
602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
603 IEM_MC_BEGIN(1, 0);
604 IEM_MC_ARG(uint16_t, u16Tmp, 0);
605 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
606 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
607 IEM_MC_END();
608 }
609 else
610 {
611 IEM_MC_BEGIN(1, 1);
612 IEM_MC_ARG(uint16_t, u16Tmp, 0);
613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
617 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
618 IEM_MC_END();
619 }
620 return VINF_SUCCESS;
621}
622
623
624/** Opcode 0x0f 0x01 /7. */
625FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
626{
627 IEMOP_MNEMONIC(invlpg, "invlpg");
628 IEMOP_HLP_MIN_486();
629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
630 IEM_MC_BEGIN(1, 1);
631 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
633 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
634 IEM_MC_END();
635 return VINF_SUCCESS;
636}
637
638
639/** Opcode 0x0f 0x01 /7. */
640FNIEMOP_DEF(iemOp_Grp7_swapgs)
641{
642 IEMOP_MNEMONIC(swapgs, "swapgs");
643 IEMOP_HLP_ONLY_64BIT();
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
646}
647
648
649/** Opcode 0x0f 0x01 /7. */
650FNIEMOP_DEF(iemOp_Grp7_rdtscp)
651{
652 IEMOP_MNEMONIC(rdtscp, "rdtscp");
653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
654 /** @todo SVM intercept removal from here. */
655 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RDTSCP, SVM_EXIT_RDTSCP, 0, 0);
656 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
657}
658
659
660/**
661 * Group 7 jump table, memory variant.
662 */
663IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
664{
665 iemOp_Grp7_sgdt,
666 iemOp_Grp7_sidt,
667 iemOp_Grp7_lgdt,
668 iemOp_Grp7_lidt,
669 iemOp_Grp7_smsw,
670 iemOp_InvalidWithRM,
671 iemOp_Grp7_lmsw,
672 iemOp_Grp7_invlpg
673};
674
675
676/** Opcode 0x0f 0x01. */
677FNIEMOP_DEF(iemOp_Grp7)
678{
679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
680 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
681 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
682
683 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
684 {
685 case 0:
686 switch (bRm & X86_MODRM_RM_MASK)
687 {
688 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
689 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
690 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
691 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 1:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
700 }
701 return IEMOP_RAISE_INVALID_OPCODE();
702
703 case 2:
704 switch (bRm & X86_MODRM_RM_MASK)
705 {
706 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
707 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
708 }
709 return IEMOP_RAISE_INVALID_OPCODE();
710
711 case 3:
712 switch (bRm & X86_MODRM_RM_MASK)
713 {
714 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
715 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
716 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
717 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
718 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
719 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
720 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
721 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
723 }
724
725 case 4:
726 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
727
728 case 5:
729 return IEMOP_RAISE_INVALID_OPCODE();
730
731 case 6:
732 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
733
734 case 7:
735 switch (bRm & X86_MODRM_RM_MASK)
736 {
737 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
738 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
739 }
740 return IEMOP_RAISE_INVALID_OPCODE();
741
742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
743 }
744}
745
746/** Opcode 0x0f 0x00 /3. */
747FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
748{
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
751
752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
753 {
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 switch (pVCpu->iem.s.enmEffOpSize)
756 {
757 case IEMMODE_16BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 case IEMMODE_32BIT:
773 case IEMMODE_64BIT:
774 {
775 IEM_MC_BEGIN(3, 0);
776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
777 IEM_MC_ARG(uint16_t, u16Sel, 1);
778 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
779
780 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
781 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
782 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
783
784 IEM_MC_END();
785 return VINF_SUCCESS;
786 }
787
788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
789 }
790 }
791 else
792 {
793 switch (pVCpu->iem.s.enmEffOpSize)
794 {
795 case IEMMODE_16BIT:
796 {
797 IEM_MC_BEGIN(3, 1);
798 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
799 IEM_MC_ARG(uint16_t, u16Sel, 1);
800 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
802
803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
804 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
805
806 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
807 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
808 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
809
810 IEM_MC_END();
811 return VINF_SUCCESS;
812 }
813
814 case IEMMODE_32BIT:
815 case IEMMODE_64BIT:
816 {
817 IEM_MC_BEGIN(3, 1);
818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 1);
820 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
822
823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
824 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
825/** @todo testcase: make sure it's a 16-bit read. */
826
827 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
828 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
829 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
830
831 IEM_MC_END();
832 return VINF_SUCCESS;
833 }
834
835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
836 }
837 }
838}
839
840
841
842/** Opcode 0x0f 0x02. */
843FNIEMOP_DEF(iemOp_lar_Gv_Ew)
844{
845 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
846 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
847}
848
849
850/** Opcode 0x0f 0x03. */
851FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
852{
853 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
854 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
855}
856
857
858/** Opcode 0x0f 0x05. */
859FNIEMOP_DEF(iemOp_syscall)
860{
861 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
863 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
864}
865
866
867/** Opcode 0x0f 0x06. */
868FNIEMOP_DEF(iemOp_clts)
869{
870 IEMOP_MNEMONIC(clts, "clts");
871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
872 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
873}
874
875
876/** Opcode 0x0f 0x07. */
877FNIEMOP_DEF(iemOp_sysret)
878{
879 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
881 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
882}
883
884
885/** Opcode 0x0f 0x08. */
886FNIEMOP_DEF(iemOp_invd)
887{
888 IEMOP_MNEMONIC(invd, "invd");
889#ifdef VBOX_WITH_NESTED_HWVIRT
890 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
891 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
892#else
893 RT_NOREF_PV(pVCpu);
894#endif
895 /** @todo implement invd for the regular case (above only handles nested SVM
896 * exits). */
897 IEMOP_BITCH_ABOUT_STUB();
898 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
899}
900
901// IEMOP_HLP_MIN_486();
902
903
904/** Opcode 0x0f 0x09. */
905FNIEMOP_DEF(iemOp_wbinvd)
906{
907 IEMOP_MNEMONIC(wbinvd, "wbinvd");
908 IEMOP_HLP_MIN_486();
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
910 IEM_MC_BEGIN(0, 0);
911 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
912 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
913 IEM_MC_ADVANCE_RIP();
914 IEM_MC_END();
915 return VINF_SUCCESS; /* ignore for now */
916}
917
918
919/** Opcode 0x0f 0x0b. */
920FNIEMOP_DEF(iemOp_ud2)
921{
922 IEMOP_MNEMONIC(ud2, "ud2");
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926/** Opcode 0x0f 0x0d. */
927FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
928{
929 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
931 {
932 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
933 return IEMOP_RAISE_INVALID_OPCODE();
934 }
935
936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
938 {
939 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
940 return IEMOP_RAISE_INVALID_OPCODE();
941 }
942
943 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
944 {
945 case 2: /* Aliased to /0 for the time being. */
946 case 4: /* Aliased to /0 for the time being. */
947 case 5: /* Aliased to /0 for the time being. */
948 case 6: /* Aliased to /0 for the time being. */
949 case 7: /* Aliased to /0 for the time being. */
950 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
951 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
952 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
954 }
955
956 IEM_MC_BEGIN(0, 1);
957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
960 /* Currently a NOP. */
961 NOREF(GCPtrEffSrc);
962 IEM_MC_ADVANCE_RIP();
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x0e. */
969FNIEMOP_STUB(iemOp_femms);
970
971
972/** Opcode 0x0f 0x0f. */
973FNIEMOP_DEF(iemOp_3Dnow)
974{
975 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
976 {
977 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
978 return IEMOP_RAISE_INVALID_OPCODE();
979 }
980
981#ifdef IEM_WITH_3DNOW
982 /* This is pretty sparse, use switch instead of table. */
983 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
984 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
985#else
986 IEMOP_BITCH_ABOUT_STUB();
987 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
988#endif
989}
990
991
992/**
993 * @opcode 0x10
994 * @oppfx none
995 * @opcpuid sse
996 * @opgroup og_sse_simdfp_datamove
997 * @opxcpttype 4UA
998 * @optest op1=1 op2=2 -> op1=2
999 * @optest op1=0 op2=-22 -> op1=-22
1000 */
1001FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1002{
1003 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1006 {
1007 /*
1008 * Register, register.
1009 */
1010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1011 IEM_MC_BEGIN(0, 0);
1012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1013 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1014 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1015 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1016 IEM_MC_ADVANCE_RIP();
1017 IEM_MC_END();
1018 }
1019 else
1020 {
1021 /*
1022 * Memory, register.
1023 */
1024 IEM_MC_BEGIN(0, 2);
1025 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1027
1028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1032
1033 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1034 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1035
1036 IEM_MC_ADVANCE_RIP();
1037 IEM_MC_END();
1038 }
1039 return VINF_SUCCESS;
1040
1041}
1042
1043
1044/**
1045 * @opcode 0x10
1046 * @oppfx 0x66
1047 * @opcpuid sse2
1048 * @opgroup og_sse2_pcksclr_datamove
1049 * @opxcpttype 4UA
1050 * @optest op1=1 op2=2 -> op1=2
1051 * @optest op1=0 op2=-42 -> op1=-42
1052 * @oponlytest
1053 */
1054FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1055{
1056 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1057 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1058 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1059 {
1060 /*
1061 * Register, register.
1062 */
1063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1064 IEM_MC_BEGIN(0, 0);
1065 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1066 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1067 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1068 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1069 IEM_MC_ADVANCE_RIP();
1070 IEM_MC_END();
1071 }
1072 else
1073 {
1074 /*
1075 * Memory, register.
1076 */
1077 IEM_MC_BEGIN(0, 2);
1078 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1080
1081 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1083 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1084 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1085
1086 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1087 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1088
1089 IEM_MC_ADVANCE_RIP();
1090 IEM_MC_END();
1091 }
1092 return VINF_SUCCESS;
1093}
1094
1095
1096/**
1097 * @opcode 0x10
1098 * @oppfx 0xf3
1099 * @opcpuid sse
1100 * @opgroup og_sse_simdfp_datamove
1101 * @opxcpttype 5
1102 * @optest op1=1 op2=2 -> op1=2
1103 * @optest op1=0 op2=-22 -> op1=-22
1104 */
1105FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1106{
1107 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZxReg, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1109 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1110 {
1111 /*
1112 * Register, register.
1113 */
1114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1115 IEM_MC_BEGIN(0, 1);
1116 IEM_MC_LOCAL(uint32_t, uSrc);
1117
1118 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1119 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1120 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1121 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1122
1123 IEM_MC_ADVANCE_RIP();
1124 IEM_MC_END();
1125 }
1126 else
1127 {
1128 /*
1129 * Memory, register.
1130 */
1131 IEM_MC_BEGIN(0, 2);
1132 IEM_MC_LOCAL(uint32_t, uSrc);
1133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1134
1135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1137 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1138 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1139
1140 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1141 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1142
1143 IEM_MC_ADVANCE_RIP();
1144 IEM_MC_END();
1145 }
1146 return VINF_SUCCESS;
1147}
1148
1149
1150/**
1151 * @opcode 0x10
1152 * @oppfx 0xf2
1153 * @opcpuid sse2
1154 * @opgroup og_sse2_pcksclr_datamove
1155 * @opxcpttype 5
1156 * @optest op1=1 op2=2 -> op1=2
1157 * @optest op1=0 op2=-42 -> op1=-42
1158 */
1159FNIEMOP_DEF(iemOp_movsd_Vx_Wsd)
1160{
1161 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZxReg, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1163 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1164 {
1165 /*
1166 * Register, register.
1167 */
1168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1169 IEM_MC_BEGIN(0, 1);
1170 IEM_MC_LOCAL(uint64_t, uSrc);
1171
1172 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1173 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1174 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1175 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1176
1177 IEM_MC_ADVANCE_RIP();
1178 IEM_MC_END();
1179 }
1180 else
1181 {
1182 /*
1183 * Memory, register.
1184 */
1185 IEM_MC_BEGIN(0, 2);
1186 IEM_MC_LOCAL(uint64_t, uSrc);
1187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1188
1189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1191 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1192 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1193
1194 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1195 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1196
1197 IEM_MC_ADVANCE_RIP();
1198 IEM_MC_END();
1199 }
1200 return VINF_SUCCESS;
1201}
1202
1203
1204/**
1205 * @opcode 0x11
1206 * @oppfx none
1207 * @opcpuid sse
1208 * @opgroup og_sse_simdfp_datamove
1209 * @opxcpttype 4UA
1210 * @optest op1=1 op2=2 -> op1=2
1211 * @optest op1=0 op2=-42 -> op1=-42
1212 */
1213FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1214{
1215 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1216 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1218 {
1219 /*
1220 * Register, register.
1221 */
1222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1223 IEM_MC_BEGIN(0, 0);
1224 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1225 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1226 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1227 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1228 IEM_MC_ADVANCE_RIP();
1229 IEM_MC_END();
1230 }
1231 else
1232 {
1233 /*
1234 * Memory, register.
1235 */
1236 IEM_MC_BEGIN(0, 2);
1237 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1239
1240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1242 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1243 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1244
1245 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1246 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1247
1248 IEM_MC_ADVANCE_RIP();
1249 IEM_MC_END();
1250 }
1251 return VINF_SUCCESS;
1252}
1253
1254
1255/**
1256 * @opcode 0x11
1257 * @oppfx 0x66
1258 * @opcpuid sse2
1259 * @opgroup og_sse2_pcksclr_datamove
1260 * @opxcpttype 4UA
1261 * @optest op1=1 op2=2 -> op1=2
1262 * @optest op1=0 op2=-42 -> op1=-42
1263 */
1264FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1265{
1266 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1267 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1269 {
1270 /*
1271 * Register, register.
1272 */
1273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1274 IEM_MC_BEGIN(0, 0);
1275 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1276 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1277 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1278 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1279 IEM_MC_ADVANCE_RIP();
1280 IEM_MC_END();
1281 }
1282 else
1283 {
1284 /*
1285 * Memory, register.
1286 */
1287 IEM_MC_BEGIN(0, 2);
1288 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1290
1291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1293 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1294 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1295
1296 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1297 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1298
1299 IEM_MC_ADVANCE_RIP();
1300 IEM_MC_END();
1301 }
1302 return VINF_SUCCESS;
1303}
1304
1305
1306/**
1307 * @opcode 0x11
1308 * @oppfx 0xf3
1309 * @opcpuid sse
1310 * @opgroup og_sse_simdfp_datamove
1311 * @opxcpttype 5
1312 * @optest op1=1 op2=2 -> op1=2
1313 * @optest op1=0 op2=-22 -> op1=-22
1314 */
1315FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1316{
1317 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1319 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1320 {
1321 /*
1322 * Register, register.
1323 */
1324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1325 IEM_MC_BEGIN(0, 1);
1326 IEM_MC_LOCAL(uint32_t, uSrc);
1327
1328 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1329 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1330 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1331 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1332
1333 IEM_MC_ADVANCE_RIP();
1334 IEM_MC_END();
1335 }
1336 else
1337 {
1338 /*
1339 * Memory, register.
1340 */
1341 IEM_MC_BEGIN(0, 2);
1342 IEM_MC_LOCAL(uint32_t, uSrc);
1343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1344
1345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1347 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1348 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1349
1350 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1351 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1352
1353 IEM_MC_ADVANCE_RIP();
1354 IEM_MC_END();
1355 }
1356 return VINF_SUCCESS;
1357}
1358
1359
1360/**
1361 * @opcode 0x11
1362 * @oppfx 0xf2
1363 * @opcpuid sse2
1364 * @opgroup og_sse2_pcksclr_datamove
1365 * @opxcpttype 5
1366 * @optest op1=1 op2=2 -> op1=2
1367 * @optest op1=0 op2=-42 -> op1=-42
1368 */
1369FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1370{
1371 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1372 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1373 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1374 {
1375 /*
1376 * Register, register.
1377 */
1378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1379 IEM_MC_BEGIN(0, 1);
1380 IEM_MC_LOCAL(uint64_t, uSrc);
1381
1382 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1383 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1384 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1385 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1386
1387 IEM_MC_ADVANCE_RIP();
1388 IEM_MC_END();
1389 }
1390 else
1391 {
1392 /*
1393 * Memory, register.
1394 */
1395 IEM_MC_BEGIN(0, 2);
1396 IEM_MC_LOCAL(uint64_t, uSrc);
1397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1398
1399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1401 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1402 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1403
1404 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1405 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1406
1407 IEM_MC_ADVANCE_RIP();
1408 IEM_MC_END();
1409 }
1410 return VINF_SUCCESS;
1411}
1412
1413
1414FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1415{
1416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1417 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1418 {
1419 /**
1420 * @opcode 0x12
1421 * @opcodesub 11 mr/reg
1422 * @oppfx none
1423 * @opcpuid sse
1424 * @opgroup og_sse_simdfp_datamove
1425 * @opxcpttype 5
1426 * @optest op1=1 op2=2 -> op1=2
1427 * @optest op1=0 op2=-42 -> op1=-42
1428 */
1429 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1430
1431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1432 IEM_MC_BEGIN(0, 1);
1433 IEM_MC_LOCAL(uint64_t, uSrc);
1434
1435 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1436 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1437 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1438 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1439
1440 IEM_MC_ADVANCE_RIP();
1441 IEM_MC_END();
1442 }
1443 else
1444 {
1445 /**
1446 * @opdone
1447 * @opcode 0x12
1448 * @opcodesub !11 mr/reg
1449 * @oppfx none
1450 * @opcpuid sse
1451 * @opgroup og_sse_simdfp_datamove
1452 * @opxcpttype 5
1453 * @optest op1=1 op2=2 -> op1=2
1454 * @optest op1=0 op2=-42 -> op1=-42
1455 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1456 */
1457 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1458
1459 IEM_MC_BEGIN(0, 2);
1460 IEM_MC_LOCAL(uint64_t, uSrc);
1461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1462
1463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1465 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1466 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1467
1468 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1469 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1470
1471 IEM_MC_ADVANCE_RIP();
1472 IEM_MC_END();
1473 }
1474 return VINF_SUCCESS;
1475}
1476
1477
1478/**
1479 * @opcode 0x12
1480 * @opcodesub !11 mr/reg
1481 * @oppfx 0x66
1482 * @opcpuid sse2
1483 * @opgroup og_sse2_pcksclr_datamove
1484 * @opxcpttype 5
1485 * @optest op1=1 op2=2 -> op1=2
1486 * @optest op1=0 op2=-42 -> op1=-42
1487 */
1488FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1489{
1490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1491 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1492 {
1493 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1494
1495 IEM_MC_BEGIN(0, 2);
1496 IEM_MC_LOCAL(uint64_t, uSrc);
1497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1498
1499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1501 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1502 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1503
1504 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1505 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1506
1507 IEM_MC_ADVANCE_RIP();
1508 IEM_MC_END();
1509 return VINF_SUCCESS;
1510 }
1511
1512 /**
1513 * @opdone
1514 * @opmnemonic ud660f12m3
1515 * @opcode 0x12
1516 * @opcodesub 11 mr/reg
1517 * @oppfx 0x66
1518 * @opunused immediate
1519 * @opcpuid sse
1520 * @optest ->
1521 */
1522 return IEMOP_RAISE_INVALID_OPCODE();
1523}
1524
1525
1526/**
1527 * @opcode 0x12
1528 * @oppfx 0xf3
1529 * @opcpuid sse3
1530 * @opgroup og_sse3_pcksclr_datamove
1531 * @opxcpttype 4
1532 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1533 * op1=0x00000002000000020000000100000001
1534 */
1535FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1536{
1537 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1538 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1539 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1540 {
1541 /*
1542 * Register, register.
1543 */
1544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1545 IEM_MC_BEGIN(2, 0);
1546 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1547 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1548
1549 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1550 IEM_MC_PREPARE_SSE_USAGE();
1551
1552 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1553 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1554 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1555
1556 IEM_MC_ADVANCE_RIP();
1557 IEM_MC_END();
1558 }
1559 else
1560 {
1561 /*
1562 * Register, memory.
1563 */
1564 IEM_MC_BEGIN(2, 2);
1565 IEM_MC_LOCAL(RTUINT128U, uSrc);
1566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1567 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1568 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1569
1570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1572 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1573 IEM_MC_PREPARE_SSE_USAGE();
1574
1575 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1576 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1577 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1578
1579 IEM_MC_ADVANCE_RIP();
1580 IEM_MC_END();
1581 }
1582 return VINF_SUCCESS;
1583}
1584
1585
1586/**
1587 * @opcode 0x12
1588 * @oppfx 0xf2
1589 * @opcpuid sse3
1590 * @opgroup og_sse3_pcksclr_datamove
1591 * @opxcpttype 5
1592 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1593 * op1=0x22222222111111112222222211111111
1594 */
1595FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1596{
1597 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1598 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1599 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1600 {
1601 /*
1602 * Register, register.
1603 */
1604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1605 IEM_MC_BEGIN(2, 0);
1606 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1607 IEM_MC_ARG(uint64_t, uSrc, 1);
1608
1609 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1610 IEM_MC_PREPARE_SSE_USAGE();
1611
1612 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1613 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1614 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1615
1616 IEM_MC_ADVANCE_RIP();
1617 IEM_MC_END();
1618 }
1619 else
1620 {
1621 /*
1622 * Register, memory.
1623 */
1624 IEM_MC_BEGIN(2, 2);
1625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1626 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1627 IEM_MC_ARG(uint64_t, uSrc, 1);
1628
1629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1631 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1632 IEM_MC_PREPARE_SSE_USAGE();
1633
1634 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1635 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1636 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1637
1638 IEM_MC_ADVANCE_RIP();
1639 IEM_MC_END();
1640 }
1641 return VINF_SUCCESS;
1642}
1643
1644
1645/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1646FNIEMOP_STUB(iemOp_movlps_Mq_Vq);
1647
1648/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1649FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1650{
1651 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1652 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1653 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1654 {
1655#if 0
1656 /*
1657 * Register, register.
1658 */
1659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1660 IEM_MC_BEGIN(0, 1);
1661 IEM_MC_LOCAL(uint64_t, uSrc);
1662 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1663 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1664 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1665 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1666 IEM_MC_ADVANCE_RIP();
1667 IEM_MC_END();
1668#else
1669 return IEMOP_RAISE_INVALID_OPCODE();
1670#endif
1671 }
1672 else
1673 {
1674 /*
1675 * Memory, register.
1676 */
1677 IEM_MC_BEGIN(0, 2);
1678 IEM_MC_LOCAL(uint64_t, uSrc);
1679 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1680
1681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1683 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1684 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1685
1686 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1687 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1688
1689 IEM_MC_ADVANCE_RIP();
1690 IEM_MC_END();
1691 }
1692 return VINF_SUCCESS;
1693}
1694
1695/* Opcode 0xf3 0x0f 0x13 - invalid */
1696/* Opcode 0xf2 0x0f 0x13 - invalid */
1697
1698/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1699FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1700/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1701FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1702/* Opcode 0xf3 0x0f 0x14 - invalid */
1703/* Opcode 0xf2 0x0f 0x14 - invalid */
1704/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1705FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1706/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1707FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1708/* Opcode 0xf3 0x0f 0x15 - invalid */
1709/* Opcode 0xf2 0x0f 0x15 - invalid */
1710/** Opcode 0x0f 0x16 - movhpsv1 Vdq, Mq movlhps Vdq, Uq */
1711FNIEMOP_STUB(iemOp_movhpsv1_Vdq_Mq__movlhps_Vdq_Uq); //NEXT
1712/** Opcode 0x66 0x0f 0x16 - movhpdv1 Vdq, Mq */
1713FNIEMOP_STUB(iemOp_movhpdv1_Vdq_Mq); //NEXT
1714/** Opcode 0xf3 0x0f 0x16 - movshdup Vx, Wx */
1715FNIEMOP_STUB(iemOp_movshdup_Vx_Wx); //NEXT
1716/* Opcode 0xf2 0x0f 0x16 - invalid */
1717/** Opcode 0x0f 0x17 - movhpsv1 Mq, Vq */
1718FNIEMOP_STUB(iemOp_movhpsv1_Mq_Vq); //NEXT
1719/** Opcode 0x66 0x0f 0x17 - movhpdv1 Mq, Vq */
1720FNIEMOP_STUB(iemOp_movhpdv1_Mq_Vq); //NEXT
1721/* Opcode 0xf3 0x0f 0x17 - invalid */
1722/* Opcode 0xf2 0x0f 0x17 - invalid */
1723
1724
1725/** Opcode 0x0f 0x18. */
1726FNIEMOP_DEF(iemOp_prefetch_Grp16)
1727{
1728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1729 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1730 {
1731 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1732 {
1733 case 4: /* Aliased to /0 for the time being according to AMD. */
1734 case 5: /* Aliased to /0 for the time being according to AMD. */
1735 case 6: /* Aliased to /0 for the time being according to AMD. */
1736 case 7: /* Aliased to /0 for the time being according to AMD. */
1737 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1738 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1739 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1740 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1741 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1742 }
1743
1744 IEM_MC_BEGIN(0, 1);
1745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1748 /* Currently a NOP. */
1749 NOREF(GCPtrEffSrc);
1750 IEM_MC_ADVANCE_RIP();
1751 IEM_MC_END();
1752 return VINF_SUCCESS;
1753 }
1754
1755 return IEMOP_RAISE_INVALID_OPCODE();
1756}
1757
1758
1759/** Opcode 0x0f 0x19..0x1f. */
1760FNIEMOP_DEF(iemOp_nop_Ev)
1761{
1762 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1764 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1765 {
1766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1767 IEM_MC_BEGIN(0, 0);
1768 IEM_MC_ADVANCE_RIP();
1769 IEM_MC_END();
1770 }
1771 else
1772 {
1773 IEM_MC_BEGIN(0, 1);
1774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1777 /* Currently a NOP. */
1778 NOREF(GCPtrEffSrc);
1779 IEM_MC_ADVANCE_RIP();
1780 IEM_MC_END();
1781 }
1782 return VINF_SUCCESS;
1783}
1784
1785
1786/** Opcode 0x0f 0x20. */
1787FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1788{
1789 /* mod is ignored, as is operand size overrides. */
1790 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1791 IEMOP_HLP_MIN_386();
1792 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1793 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1794 else
1795 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1796
1797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1798 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1799 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1800 {
1801 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1802 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1803 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1804 iCrReg |= 8;
1805 }
1806 switch (iCrReg)
1807 {
1808 case 0: case 2: case 3: case 4: case 8:
1809 break;
1810 default:
1811 return IEMOP_RAISE_INVALID_OPCODE();
1812 }
1813 IEMOP_HLP_DONE_DECODING();
1814
1815 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1816}
1817
1818
1819/** Opcode 0x0f 0x21. */
1820FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1821{
1822 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1823 IEMOP_HLP_MIN_386();
1824 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1826 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1827 return IEMOP_RAISE_INVALID_OPCODE();
1828 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1829 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1830 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1831}
1832
1833
1834/** Opcode 0x0f 0x22. */
1835FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1836{
1837 /* mod is ignored, as is operand size overrides. */
1838 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1839 IEMOP_HLP_MIN_386();
1840 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1841 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1842 else
1843 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1844
1845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1846 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1847 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1848 {
1849 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1850 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1851 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1852 iCrReg |= 8;
1853 }
1854 switch (iCrReg)
1855 {
1856 case 0: case 2: case 3: case 4: case 8:
1857 break;
1858 default:
1859 return IEMOP_RAISE_INVALID_OPCODE();
1860 }
1861 IEMOP_HLP_DONE_DECODING();
1862
1863 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1864}
1865
1866
1867/** Opcode 0x0f 0x23. */
1868FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1869{
1870 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1871 IEMOP_HLP_MIN_386();
1872 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1874 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1875 return IEMOP_RAISE_INVALID_OPCODE();
1876 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1877 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1878 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1879}
1880
1881
1882/** Opcode 0x0f 0x24. */
1883FNIEMOP_DEF(iemOp_mov_Rd_Td)
1884{
1885 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1886 /** @todo works on 386 and 486. */
1887 /* The RM byte is not considered, see testcase. */
1888 return IEMOP_RAISE_INVALID_OPCODE();
1889}
1890
1891
1892/** Opcode 0x0f 0x26. */
1893FNIEMOP_DEF(iemOp_mov_Td_Rd)
1894{
1895 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1896 /** @todo works on 386 and 486. */
1897 /* The RM byte is not considered, see testcase. */
1898 return IEMOP_RAISE_INVALID_OPCODE();
1899}
1900
1901
1902/** Opcode 0x0f 0x28 - movaps Vps, Wps */
1903FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
1904{
1905 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1906 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1907 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1908 {
1909 /*
1910 * Register, register.
1911 */
1912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1913 IEM_MC_BEGIN(0, 0);
1914 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1915 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1916 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1917 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1918 IEM_MC_ADVANCE_RIP();
1919 IEM_MC_END();
1920 }
1921 else
1922 {
1923 /*
1924 * Register, memory.
1925 */
1926 IEM_MC_BEGIN(0, 2);
1927 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1928 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1929
1930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1932 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1933 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1934
1935 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1936 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1937
1938 IEM_MC_ADVANCE_RIP();
1939 IEM_MC_END();
1940 }
1941 return VINF_SUCCESS;
1942}
1943
1944/** Opcode 0x66 0x0f 0x28 - movapd Vpd, Wpd */
1945FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
1946{
1947 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1949 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1950 {
1951 /*
1952 * Register, register.
1953 */
1954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1955 IEM_MC_BEGIN(0, 0);
1956 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1957 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1958 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1959 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1960 IEM_MC_ADVANCE_RIP();
1961 IEM_MC_END();
1962 }
1963 else
1964 {
1965 /*
1966 * Register, memory.
1967 */
1968 IEM_MC_BEGIN(0, 2);
1969 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1971
1972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1974 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1975 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1976
1977 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1978 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1979
1980 IEM_MC_ADVANCE_RIP();
1981 IEM_MC_END();
1982 }
1983 return VINF_SUCCESS;
1984}
1985
1986/* Opcode 0xf3 0x0f 0x28 - invalid */
1987/* Opcode 0xf2 0x0f 0x28 - invalid */
1988
1989/** Opcode 0x0f 0x29 - movaps Wps, Vps */
1990FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
1991{
1992 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1993 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1994 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1995 {
1996 /*
1997 * Register, register.
1998 */
1999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2000 IEM_MC_BEGIN(0, 0);
2001 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2002 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2003 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2004 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2005 IEM_MC_ADVANCE_RIP();
2006 IEM_MC_END();
2007 }
2008 else
2009 {
2010 /*
2011 * Memory, register.
2012 */
2013 IEM_MC_BEGIN(0, 2);
2014 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2016
2017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2019 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2020 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2021
2022 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2023 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2024
2025 IEM_MC_ADVANCE_RIP();
2026 IEM_MC_END();
2027 }
2028 return VINF_SUCCESS;
2029}
2030
2031/** Opcode 0x66 0x0f 0x29 - movapd Wpd,Vpd */
2032FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2033{
2034 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
2035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2036 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2037 {
2038 /*
2039 * Register, register.
2040 */
2041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2042 IEM_MC_BEGIN(0, 0);
2043 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2044 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2045 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2046 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2047 IEM_MC_ADVANCE_RIP();
2048 IEM_MC_END();
2049 }
2050 else
2051 {
2052 /*
2053 * Memory, register.
2054 */
2055 IEM_MC_BEGIN(0, 2);
2056 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2058
2059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2061 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2062 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2063
2064 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2065 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2066
2067 IEM_MC_ADVANCE_RIP();
2068 IEM_MC_END();
2069 }
2070 return VINF_SUCCESS;
2071}
2072
2073/* Opcode 0xf3 0x0f 0x29 - invalid */
2074/* Opcode 0xf2 0x0f 0x29 - invalid */
2075
2076
2077/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2078FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2079/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2080FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2081/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2082FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2083/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2084FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2085
2086
2087/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
2088FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2089{
2090 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2091 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2092 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2093 {
2094 /*
2095 * memory, register.
2096 */
2097 IEM_MC_BEGIN(0, 2);
2098 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2100
2101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2103 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2104 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2105
2106 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2107 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2108
2109 IEM_MC_ADVANCE_RIP();
2110 IEM_MC_END();
2111 }
2112 /* The register, register encoding is invalid. */
2113 else
2114 return IEMOP_RAISE_INVALID_OPCODE();
2115 return VINF_SUCCESS;
2116}
2117
2118/** Opcode 0x66 0x0f 0x2b - movntpd Mpd, Vpd */
2119FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2120{
2121 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2123 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2124 {
2125 /*
2126 * memory, register.
2127 */
2128 IEM_MC_BEGIN(0, 2);
2129 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2131
2132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2134 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2135 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2136
2137 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2138 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2139
2140 IEM_MC_ADVANCE_RIP();
2141 IEM_MC_END();
2142 }
2143 /* The register, register encoding is invalid. */
2144 else
2145 return IEMOP_RAISE_INVALID_OPCODE();
2146 return VINF_SUCCESS;
2147}
2148/* Opcode 0xf3 0x0f 0x2b - invalid */
2149/* Opcode 0xf2 0x0f 0x2b - invalid */
2150
2151
2152/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2153FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2154/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2155FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2156/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2157FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2158/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2159FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2160
2161/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2162FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2163/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2164FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2165/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2166FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2167/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2168FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2169
2170/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2171FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2172/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2173FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2174/* Opcode 0xf3 0x0f 0x2e - invalid */
2175/* Opcode 0xf2 0x0f 0x2e - invalid */
2176
2177/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2178FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2179/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2180FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2181/* Opcode 0xf3 0x0f 0x2f - invalid */
2182/* Opcode 0xf2 0x0f 0x2f - invalid */
2183
2184/** Opcode 0x0f 0x30. */
2185FNIEMOP_DEF(iemOp_wrmsr)
2186{
2187 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2189 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2190}
2191
2192
2193/** Opcode 0x0f 0x31. */
2194FNIEMOP_DEF(iemOp_rdtsc)
2195{
2196 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2198 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2199}
2200
2201
2202/** Opcode 0x0f 0x33. */
2203FNIEMOP_DEF(iemOp_rdmsr)
2204{
2205 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2207 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2208}
2209
2210
2211/** Opcode 0x0f 0x34. */
2212FNIEMOP_DEF(iemOp_rdpmc)
2213{
2214 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2216 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2217}
2218
2219
2220/** Opcode 0x0f 0x34. */
2221FNIEMOP_STUB(iemOp_sysenter);
2222/** Opcode 0x0f 0x35. */
2223FNIEMOP_STUB(iemOp_sysexit);
2224/** Opcode 0x0f 0x37. */
2225FNIEMOP_STUB(iemOp_getsec);
2226
2227
2228/** Opcode 0x0f 0x38. */
2229FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2230{
2231#ifdef IEM_WITH_THREE_0F_38
2232 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2233 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2234#else
2235 IEMOP_BITCH_ABOUT_STUB();
2236 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2237#endif
2238}
2239
2240
2241/** Opcode 0x0f 0x3a. */
2242FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2243{
2244#ifdef IEM_WITH_THREE_0F_3A
2245 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2246 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2247#else
2248 IEMOP_BITCH_ABOUT_STUB();
2249 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2250#endif
2251}
2252
2253
2254/**
2255 * Implements a conditional move.
2256 *
2257 * Wish there was an obvious way to do this where we could share and reduce
2258 * code bloat.
2259 *
2260 * @param a_Cnd The conditional "microcode" operation.
2261 */
2262#define CMOV_X(a_Cnd) \
2263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2264 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2265 { \
2266 switch (pVCpu->iem.s.enmEffOpSize) \
2267 { \
2268 case IEMMODE_16BIT: \
2269 IEM_MC_BEGIN(0, 1); \
2270 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2271 a_Cnd { \
2272 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2273 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2274 } IEM_MC_ENDIF(); \
2275 IEM_MC_ADVANCE_RIP(); \
2276 IEM_MC_END(); \
2277 return VINF_SUCCESS; \
2278 \
2279 case IEMMODE_32BIT: \
2280 IEM_MC_BEGIN(0, 1); \
2281 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2282 a_Cnd { \
2283 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2284 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2285 } IEM_MC_ELSE() { \
2286 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2287 } IEM_MC_ENDIF(); \
2288 IEM_MC_ADVANCE_RIP(); \
2289 IEM_MC_END(); \
2290 return VINF_SUCCESS; \
2291 \
2292 case IEMMODE_64BIT: \
2293 IEM_MC_BEGIN(0, 1); \
2294 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2295 a_Cnd { \
2296 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2297 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2298 } IEM_MC_ENDIF(); \
2299 IEM_MC_ADVANCE_RIP(); \
2300 IEM_MC_END(); \
2301 return VINF_SUCCESS; \
2302 \
2303 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2304 } \
2305 } \
2306 else \
2307 { \
2308 switch (pVCpu->iem.s.enmEffOpSize) \
2309 { \
2310 case IEMMODE_16BIT: \
2311 IEM_MC_BEGIN(0, 2); \
2312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2313 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2315 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2316 a_Cnd { \
2317 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2318 } IEM_MC_ENDIF(); \
2319 IEM_MC_ADVANCE_RIP(); \
2320 IEM_MC_END(); \
2321 return VINF_SUCCESS; \
2322 \
2323 case IEMMODE_32BIT: \
2324 IEM_MC_BEGIN(0, 2); \
2325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2326 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2328 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2329 a_Cnd { \
2330 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2331 } IEM_MC_ELSE() { \
2332 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2333 } IEM_MC_ENDIF(); \
2334 IEM_MC_ADVANCE_RIP(); \
2335 IEM_MC_END(); \
2336 return VINF_SUCCESS; \
2337 \
2338 case IEMMODE_64BIT: \
2339 IEM_MC_BEGIN(0, 2); \
2340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2341 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2343 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2344 a_Cnd { \
2345 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2346 } IEM_MC_ENDIF(); \
2347 IEM_MC_ADVANCE_RIP(); \
2348 IEM_MC_END(); \
2349 return VINF_SUCCESS; \
2350 \
2351 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2352 } \
2353 } do {} while (0)
2354
2355
2356
2357/** Opcode 0x0f 0x40. */
2358FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2359{
2360 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2361 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2362}
2363
2364
2365/** Opcode 0x0f 0x41. */
2366FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2367{
2368 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2369 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2370}
2371
2372
2373/** Opcode 0x0f 0x42. */
2374FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2375{
2376 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2377 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2378}
2379
2380
2381/** Opcode 0x0f 0x43. */
2382FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2383{
2384 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2385 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2386}
2387
2388
2389/** Opcode 0x0f 0x44. */
2390FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2391{
2392 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2393 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2394}
2395
2396
2397/** Opcode 0x0f 0x45. */
2398FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2399{
2400 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2401 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2402}
2403
2404
2405/** Opcode 0x0f 0x46. */
2406FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2407{
2408 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2409 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2410}
2411
2412
2413/** Opcode 0x0f 0x47. */
2414FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2415{
2416 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2417 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2418}
2419
2420
2421/** Opcode 0x0f 0x48. */
2422FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2423{
2424 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2425 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2426}
2427
2428
2429/** Opcode 0x0f 0x49. */
2430FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2431{
2432 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2433 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2434}
2435
2436
2437/** Opcode 0x0f 0x4a. */
2438FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2439{
2440 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2441 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2442}
2443
2444
2445/** Opcode 0x0f 0x4b. */
2446FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2447{
2448 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2449 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2450}
2451
2452
2453/** Opcode 0x0f 0x4c. */
2454FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2455{
2456 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2457 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2458}
2459
2460
2461/** Opcode 0x0f 0x4d. */
2462FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2463{
2464 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2465 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2466}
2467
2468
2469/** Opcode 0x0f 0x4e. */
2470FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2471{
2472 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2473 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2474}
2475
2476
2477/** Opcode 0x0f 0x4f. */
2478FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2479{
2480 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2481 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2482}
2483
2484#undef CMOV_X
2485
2486/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2487FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2488/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2489FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2490/* Opcode 0xf3 0x0f 0x50 - invalid */
2491/* Opcode 0xf2 0x0f 0x50 - invalid */
2492
2493/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2494FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2495/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2496FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2497/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2498FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2499/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2500FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2501
2502/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2503FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2504/* Opcode 0x66 0x0f 0x52 - invalid */
2505/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2506FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2507/* Opcode 0xf2 0x0f 0x52 - invalid */
2508
2509/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2510FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2511/* Opcode 0x66 0x0f 0x53 - invalid */
2512/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2513FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2514/* Opcode 0xf2 0x0f 0x53 - invalid */
2515
2516/** Opcode 0x0f 0x54 - andps Vps, Wps */
2517FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2518/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2519FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2520/* Opcode 0xf3 0x0f 0x54 - invalid */
2521/* Opcode 0xf2 0x0f 0x54 - invalid */
2522
2523/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2524FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2525/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2526FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2527/* Opcode 0xf3 0x0f 0x55 - invalid */
2528/* Opcode 0xf2 0x0f 0x55 - invalid */
2529
2530/** Opcode 0x0f 0x56 - orps Vps, Wps */
2531FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2532/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2533FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2534/* Opcode 0xf3 0x0f 0x56 - invalid */
2535/* Opcode 0xf2 0x0f 0x56 - invalid */
2536
2537/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2538FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2539/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2540FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2541/* Opcode 0xf3 0x0f 0x57 - invalid */
2542/* Opcode 0xf2 0x0f 0x57 - invalid */
2543
2544/** Opcode 0x0f 0x58 - addps Vps, Wps */
2545FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2546/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2547FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2548/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2549FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2550/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2551FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2552
2553/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2554FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2555/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2556FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2557/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2558FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2559/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2560FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2561
2562/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2563FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2564/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2565FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2566/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2567FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2568/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2569FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2570
2571/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2572FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2573/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2574FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2575/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2576FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2577/* Opcode 0xf2 0x0f 0x5b - invalid */
2578
2579/** Opcode 0x0f 0x5c - subps Vps, Wps */
2580FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2581/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2582FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2583/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2584FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2585/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2586FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2587
2588/** Opcode 0x0f 0x5d - minps Vps, Wps */
2589FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2590/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2591FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2592/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2593FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2594/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2595FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2596
2597/** Opcode 0x0f 0x5e - divps Vps, Wps */
2598FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2599/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2600FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2601/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2602FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2603/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2604FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2605
2606/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2607FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2608/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2609FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2610/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2611FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2612/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2613FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2614
2615/**
2616 * Common worker for MMX instructions on the forms:
2617 * pxxxx mm1, mm2/mem32
2618 *
2619 * The 2nd operand is the first half of a register, which in the memory case
2620 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2621 * memory accessed for MMX.
2622 *
2623 * Exceptions type 4.
2624 */
2625FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2626{
2627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2628 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2629 {
2630 /*
2631 * Register, register.
2632 */
2633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2634 IEM_MC_BEGIN(2, 0);
2635 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2636 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2637 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2638 IEM_MC_PREPARE_SSE_USAGE();
2639 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2640 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2641 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2642 IEM_MC_ADVANCE_RIP();
2643 IEM_MC_END();
2644 }
2645 else
2646 {
2647 /*
2648 * Register, memory.
2649 */
2650 IEM_MC_BEGIN(2, 2);
2651 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2652 IEM_MC_LOCAL(uint64_t, uSrc);
2653 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2655
2656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2658 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2659 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2660
2661 IEM_MC_PREPARE_SSE_USAGE();
2662 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2663 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2664
2665 IEM_MC_ADVANCE_RIP();
2666 IEM_MC_END();
2667 }
2668 return VINF_SUCCESS;
2669}
2670
2671
2672/**
2673 * Common worker for SSE2 instructions on the forms:
2674 * pxxxx xmm1, xmm2/mem128
2675 *
2676 * The 2nd operand is the first half of a register, which in the memory case
2677 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2678 * memory accessed for MMX.
2679 *
2680 * Exceptions type 4.
2681 */
2682FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2683{
2684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2685 if (!pImpl->pfnU64)
2686 return IEMOP_RAISE_INVALID_OPCODE();
2687 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2688 {
2689 /*
2690 * Register, register.
2691 */
2692 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2693 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2695 IEM_MC_BEGIN(2, 0);
2696 IEM_MC_ARG(uint64_t *, pDst, 0);
2697 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2698 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2699 IEM_MC_PREPARE_FPU_USAGE();
2700 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2701 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2702 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2703 IEM_MC_ADVANCE_RIP();
2704 IEM_MC_END();
2705 }
2706 else
2707 {
2708 /*
2709 * Register, memory.
2710 */
2711 IEM_MC_BEGIN(2, 2);
2712 IEM_MC_ARG(uint64_t *, pDst, 0);
2713 IEM_MC_LOCAL(uint32_t, uSrc);
2714 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2716
2717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2719 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2720 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2721
2722 IEM_MC_PREPARE_FPU_USAGE();
2723 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2724 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2725
2726 IEM_MC_ADVANCE_RIP();
2727 IEM_MC_END();
2728 }
2729 return VINF_SUCCESS;
2730}
2731
2732
2733/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2734FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2735{
2736 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2737 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2738}
2739
2740/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
2741FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
2742{
2743 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
2744 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2745}
2746
2747/* Opcode 0xf3 0x0f 0x60 - invalid */
2748
2749
2750/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2751FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2752{
2753 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2754 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2755}
2756
2757/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
2758FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
2759{
2760 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
2761 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2762}
2763
2764/* Opcode 0xf3 0x0f 0x61 - invalid */
2765
2766
2767/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2768FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2769{
2770 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2771 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2772}
2773
2774/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
2775FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
2776{
2777 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
2778 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2779}
2780
2781/* Opcode 0xf3 0x0f 0x62 - invalid */
2782
2783
2784
2785/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2786FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2787/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
2788FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
2789/* Opcode 0xf3 0x0f 0x63 - invalid */
2790
2791/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2792FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2793/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
2794FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
2795/* Opcode 0xf3 0x0f 0x64 - invalid */
2796
2797/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2798FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2799/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
2800FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
2801/* Opcode 0xf3 0x0f 0x65 - invalid */
2802
2803/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2804FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2805/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
2806FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
2807/* Opcode 0xf3 0x0f 0x66 - invalid */
2808
2809/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2810FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2811/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
2812FNIEMOP_STUB(iemOp_packuswb_Vx_W);
2813/* Opcode 0xf3 0x0f 0x67 - invalid */
2814
2815
2816/**
2817 * Common worker for MMX instructions on the form:
2818 * pxxxx mm1, mm2/mem64
2819 *
2820 * The 2nd operand is the second half of a register, which in the memory case
2821 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2822 * where it may read the full 128 bits or only the upper 64 bits.
2823 *
2824 * Exceptions type 4.
2825 */
2826FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2827{
2828 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2829 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2830 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2831 {
2832 /*
2833 * Register, register.
2834 */
2835 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2836 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2838 IEM_MC_BEGIN(2, 0);
2839 IEM_MC_ARG(uint64_t *, pDst, 0);
2840 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2841 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2842 IEM_MC_PREPARE_FPU_USAGE();
2843 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2844 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2845 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2846 IEM_MC_ADVANCE_RIP();
2847 IEM_MC_END();
2848 }
2849 else
2850 {
2851 /*
2852 * Register, memory.
2853 */
2854 IEM_MC_BEGIN(2, 2);
2855 IEM_MC_ARG(uint64_t *, pDst, 0);
2856 IEM_MC_LOCAL(uint64_t, uSrc);
2857 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2859
2860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2862 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2863 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2864
2865 IEM_MC_PREPARE_FPU_USAGE();
2866 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2867 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2868
2869 IEM_MC_ADVANCE_RIP();
2870 IEM_MC_END();
2871 }
2872 return VINF_SUCCESS;
2873}
2874
2875
2876/**
2877 * Common worker for SSE2 instructions on the form:
2878 * pxxxx xmm1, xmm2/mem128
2879 *
2880 * The 2nd operand is the second half of a register, which in the memory case
2881 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2882 * where it may read the full 128 bits or only the upper 64 bits.
2883 *
2884 * Exceptions type 4.
2885 */
2886FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2887{
2888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2889 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2890 {
2891 /*
2892 * Register, register.
2893 */
2894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2895 IEM_MC_BEGIN(2, 0);
2896 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2897 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2898 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2899 IEM_MC_PREPARE_SSE_USAGE();
2900 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2901 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2902 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2903 IEM_MC_ADVANCE_RIP();
2904 IEM_MC_END();
2905 }
2906 else
2907 {
2908 /*
2909 * Register, memory.
2910 */
2911 IEM_MC_BEGIN(2, 2);
2912 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2913 IEM_MC_LOCAL(RTUINT128U, uSrc);
2914 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2916
2917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2919 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2920 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2921
2922 IEM_MC_PREPARE_SSE_USAGE();
2923 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2924 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2925
2926 IEM_MC_ADVANCE_RIP();
2927 IEM_MC_END();
2928 }
2929 return VINF_SUCCESS;
2930}
2931
2932
2933/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2934FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2935{
2936 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2937 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2938}
2939
2940/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
2941FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
2942{
2943 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
2944 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2945}
2946/* Opcode 0xf3 0x0f 0x68 - invalid */
2947
2948
2949/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2950FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2951{
2952 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2953 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2954}
2955
2956/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
2957FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
2958{
2959 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
2960 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2961
2962}
2963/* Opcode 0xf3 0x0f 0x69 - invalid */
2964
2965
2966/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2967FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2968{
2969 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2970 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2971}
2972
2973/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
2974FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
2975{
2976 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
2977 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2978}
2979/* Opcode 0xf3 0x0f 0x6a - invalid */
2980
2981
2982/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2983FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2984/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
2985FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
2986/* Opcode 0xf3 0x0f 0x6b - invalid */
2987
2988
2989/* Opcode 0x0f 0x6c - invalid */
2990
2991/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
2992FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
2993{
2994 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
2995 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2996}
2997
2998/* Opcode 0xf3 0x0f 0x6c - invalid */
2999/* Opcode 0xf2 0x0f 0x6c - invalid */
3000
3001
3002/* Opcode 0x0f 0x6d - invalid */
3003
3004/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3005FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3006{
3007 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3008 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3009}
3010
3011/* Opcode 0xf3 0x0f 0x6d - invalid */
3012
3013
3014/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
3015FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3016{
3017 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3018 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3019 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
3020 else
3021 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
3022 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3023 {
3024 /* MMX, greg */
3025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3026 IEM_MC_BEGIN(0, 1);
3027 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3028 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3029 IEM_MC_LOCAL(uint64_t, u64Tmp);
3030 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3031 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3032 else
3033 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3034 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3035 IEM_MC_ADVANCE_RIP();
3036 IEM_MC_END();
3037 }
3038 else
3039 {
3040 /* MMX, [mem] */
3041 IEM_MC_BEGIN(0, 2);
3042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3043 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3046 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3047 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3048 {
3049 IEM_MC_LOCAL(uint64_t, u64Tmp);
3050 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3051 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3052 }
3053 else
3054 {
3055 IEM_MC_LOCAL(uint32_t, u32Tmp);
3056 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3057 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3058 }
3059 IEM_MC_ADVANCE_RIP();
3060 IEM_MC_END();
3061 }
3062 return VINF_SUCCESS;
3063}
3064
3065/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
3066FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3067{
3068 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3069 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3070 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
3071 else
3072 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
3073 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3074 {
3075 /* XMM, greg*/
3076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3077 IEM_MC_BEGIN(0, 1);
3078 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3079 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3080 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3081 {
3082 IEM_MC_LOCAL(uint64_t, u64Tmp);
3083 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3084 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3085 }
3086 else
3087 {
3088 IEM_MC_LOCAL(uint32_t, u32Tmp);
3089 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3090 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3091 }
3092 IEM_MC_ADVANCE_RIP();
3093 IEM_MC_END();
3094 }
3095 else
3096 {
3097 /* XMM, [mem] */
3098 IEM_MC_BEGIN(0, 2);
3099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3100 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3103 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3104 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3105 {
3106 IEM_MC_LOCAL(uint64_t, u64Tmp);
3107 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3108 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3109 }
3110 else
3111 {
3112 IEM_MC_LOCAL(uint32_t, u32Tmp);
3113 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3114 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3115 }
3116 IEM_MC_ADVANCE_RIP();
3117 IEM_MC_END();
3118 }
3119 return VINF_SUCCESS;
3120}
3121
3122/* Opcode 0xf3 0x0f 0x6e - invalid */
3123
3124
3125/** Opcode 0x0f 0x6f - movq Pq, Qq */
3126FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3127{
3128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3129 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3130 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3131 {
3132 /*
3133 * Register, register.
3134 */
3135 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3136 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3138 IEM_MC_BEGIN(0, 1);
3139 IEM_MC_LOCAL(uint64_t, u64Tmp);
3140 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3141 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3142 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3143 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3144 IEM_MC_ADVANCE_RIP();
3145 IEM_MC_END();
3146 }
3147 else
3148 {
3149 /*
3150 * Register, memory.
3151 */
3152 IEM_MC_BEGIN(0, 2);
3153 IEM_MC_LOCAL(uint64_t, u64Tmp);
3154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3155
3156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3158 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3159 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3160 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3161 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3162
3163 IEM_MC_ADVANCE_RIP();
3164 IEM_MC_END();
3165 }
3166 return VINF_SUCCESS;
3167}
3168
3169/** Opcode 0x66 0x0f 0x6f - movdqa Vx, Wx */
3170FNIEMOP_DEF(iemOp_movdqa_Vx_Wx)
3171{
3172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3173 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3174 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3175 {
3176 /*
3177 * Register, register.
3178 */
3179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3180 IEM_MC_BEGIN(0, 0);
3181 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3182 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3183 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3184 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3185 IEM_MC_ADVANCE_RIP();
3186 IEM_MC_END();
3187 }
3188 else
3189 {
3190 /*
3191 * Register, memory.
3192 */
3193 IEM_MC_BEGIN(0, 2);
3194 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3196
3197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3199 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3200 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3201 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3202 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3203
3204 IEM_MC_ADVANCE_RIP();
3205 IEM_MC_END();
3206 }
3207 return VINF_SUCCESS;
3208}
3209
3210/** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */
3211FNIEMOP_DEF(iemOp_movdqu_Vx_Wx)
3212{
3213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3214 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3215 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3216 {
3217 /*
3218 * Register, register.
3219 */
3220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3221 IEM_MC_BEGIN(0, 0);
3222 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3223 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3224 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3225 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3226 IEM_MC_ADVANCE_RIP();
3227 IEM_MC_END();
3228 }
3229 else
3230 {
3231 /*
3232 * Register, memory.
3233 */
3234 IEM_MC_BEGIN(0, 2);
3235 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3237
3238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3240 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3241 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3242 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3243 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3244
3245 IEM_MC_ADVANCE_RIP();
3246 IEM_MC_END();
3247 }
3248 return VINF_SUCCESS;
3249}
3250
3251
3252/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3253FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3254{
3255 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3257 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3258 {
3259 /*
3260 * Register, register.
3261 */
3262 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3264
3265 IEM_MC_BEGIN(3, 0);
3266 IEM_MC_ARG(uint64_t *, pDst, 0);
3267 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3268 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3269 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3270 IEM_MC_PREPARE_FPU_USAGE();
3271 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3272 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3273 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3274 IEM_MC_ADVANCE_RIP();
3275 IEM_MC_END();
3276 }
3277 else
3278 {
3279 /*
3280 * Register, memory.
3281 */
3282 IEM_MC_BEGIN(3, 2);
3283 IEM_MC_ARG(uint64_t *, pDst, 0);
3284 IEM_MC_LOCAL(uint64_t, uSrc);
3285 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3287
3288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3289 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3290 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3292 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3293
3294 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3295 IEM_MC_PREPARE_FPU_USAGE();
3296 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3297 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3298
3299 IEM_MC_ADVANCE_RIP();
3300 IEM_MC_END();
3301 }
3302 return VINF_SUCCESS;
3303}
3304
3305/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3306FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3307{
3308 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3309 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3310 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3311 {
3312 /*
3313 * Register, register.
3314 */
3315 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3317
3318 IEM_MC_BEGIN(3, 0);
3319 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3320 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3321 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3322 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3323 IEM_MC_PREPARE_SSE_USAGE();
3324 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3325 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3326 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3327 IEM_MC_ADVANCE_RIP();
3328 IEM_MC_END();
3329 }
3330 else
3331 {
3332 /*
3333 * Register, memory.
3334 */
3335 IEM_MC_BEGIN(3, 2);
3336 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3337 IEM_MC_LOCAL(RTUINT128U, uSrc);
3338 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3340
3341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3342 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3343 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3345 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3346
3347 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3348 IEM_MC_PREPARE_SSE_USAGE();
3349 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3350 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3351
3352 IEM_MC_ADVANCE_RIP();
3353 IEM_MC_END();
3354 }
3355 return VINF_SUCCESS;
3356}
3357
3358/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3359FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3360{
3361 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3363 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3364 {
3365 /*
3366 * Register, register.
3367 */
3368 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3370
3371 IEM_MC_BEGIN(3, 0);
3372 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3373 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3374 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3375 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3376 IEM_MC_PREPARE_SSE_USAGE();
3377 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3378 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3379 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3380 IEM_MC_ADVANCE_RIP();
3381 IEM_MC_END();
3382 }
3383 else
3384 {
3385 /*
3386 * Register, memory.
3387 */
3388 IEM_MC_BEGIN(3, 2);
3389 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3390 IEM_MC_LOCAL(RTUINT128U, uSrc);
3391 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3393
3394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3395 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3396 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3398 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3399
3400 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3401 IEM_MC_PREPARE_SSE_USAGE();
3402 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3403 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3404
3405 IEM_MC_ADVANCE_RIP();
3406 IEM_MC_END();
3407 }
3408 return VINF_SUCCESS;
3409}
3410
3411/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3412FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3413{
3414 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3416 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3417 {
3418 /*
3419 * Register, register.
3420 */
3421 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3423
3424 IEM_MC_BEGIN(3, 0);
3425 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3426 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3427 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3428 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3429 IEM_MC_PREPARE_SSE_USAGE();
3430 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3431 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3432 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3433 IEM_MC_ADVANCE_RIP();
3434 IEM_MC_END();
3435 }
3436 else
3437 {
3438 /*
3439 * Register, memory.
3440 */
3441 IEM_MC_BEGIN(3, 2);
3442 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3443 IEM_MC_LOCAL(RTUINT128U, uSrc);
3444 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3446
3447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3448 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3449 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3451 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3452
3453 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3454 IEM_MC_PREPARE_SSE_USAGE();
3455 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3456 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3457
3458 IEM_MC_ADVANCE_RIP();
3459 IEM_MC_END();
3460 }
3461 return VINF_SUCCESS;
3462}
3463
3464
3465/** Opcode 0x0f 0x71 11/2. */
3466FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3467
3468/** Opcode 0x66 0x0f 0x71 11/2. */
3469FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3470
3471/** Opcode 0x0f 0x71 11/4. */
3472FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3473
3474/** Opcode 0x66 0x0f 0x71 11/4. */
3475FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3476
3477/** Opcode 0x0f 0x71 11/6. */
3478FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3479
3480/** Opcode 0x66 0x0f 0x71 11/6. */
3481FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3482
3483
3484/**
3485 * Group 12 jump table for register variant.
3486 */
3487IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3488{
3489 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3490 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3491 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3492 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3493 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3494 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3495 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3496 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3497};
3498AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3499
3500
3501/** Opcode 0x0f 0x71. */
3502FNIEMOP_DEF(iemOp_Grp12)
3503{
3504 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3505 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3506 /* register, register */
3507 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3508 + pVCpu->iem.s.idxPrefix], bRm);
3509 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3510}
3511
3512
3513/** Opcode 0x0f 0x72 11/2. */
3514FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3515
3516/** Opcode 0x66 0x0f 0x72 11/2. */
3517FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
3518
3519/** Opcode 0x0f 0x72 11/4. */
3520FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3521
3522/** Opcode 0x66 0x0f 0x72 11/4. */
3523FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
3524
3525/** Opcode 0x0f 0x72 11/6. */
3526FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3527
3528/** Opcode 0x66 0x0f 0x72 11/6. */
3529FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
3530
3531
3532/**
3533 * Group 13 jump table for register variant.
3534 */
3535IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3536{
3537 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3538 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3539 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3540 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3541 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3542 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3543 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3544 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3545};
3546AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3547
3548/** Opcode 0x0f 0x72. */
3549FNIEMOP_DEF(iemOp_Grp13)
3550{
3551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3552 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3553 /* register, register */
3554 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3555 + pVCpu->iem.s.idxPrefix], bRm);
3556 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3557}
3558
3559
3560/** Opcode 0x0f 0x73 11/2. */
3561FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3562
3563/** Opcode 0x66 0x0f 0x73 11/2. */
3564FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
3565
3566/** Opcode 0x66 0x0f 0x73 11/3. */
3567FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
3568
3569/** Opcode 0x0f 0x73 11/6. */
3570FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3571
3572/** Opcode 0x66 0x0f 0x73 11/6. */
3573FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
3574
3575/** Opcode 0x66 0x0f 0x73 11/7. */
3576FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
3577
3578/**
3579 * Group 14 jump table for register variant.
3580 */
3581IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
3582{
3583 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3584 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3585 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3586 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3587 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3588 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3589 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3590 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3591};
3592AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
3593
3594
3595/** Opcode 0x0f 0x73. */
3596FNIEMOP_DEF(iemOp_Grp14)
3597{
3598 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3599 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3600 /* register, register */
3601 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3602 + pVCpu->iem.s.idxPrefix], bRm);
3603 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3604}
3605
3606
3607/**
3608 * Common worker for MMX instructions on the form:
3609 * pxxx mm1, mm2/mem64
3610 */
3611FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3612{
3613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3614 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3615 {
3616 /*
3617 * Register, register.
3618 */
3619 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3620 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3622 IEM_MC_BEGIN(2, 0);
3623 IEM_MC_ARG(uint64_t *, pDst, 0);
3624 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3625 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3626 IEM_MC_PREPARE_FPU_USAGE();
3627 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3628 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3629 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3630 IEM_MC_ADVANCE_RIP();
3631 IEM_MC_END();
3632 }
3633 else
3634 {
3635 /*
3636 * Register, memory.
3637 */
3638 IEM_MC_BEGIN(2, 2);
3639 IEM_MC_ARG(uint64_t *, pDst, 0);
3640 IEM_MC_LOCAL(uint64_t, uSrc);
3641 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3643
3644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3646 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3647 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3648
3649 IEM_MC_PREPARE_FPU_USAGE();
3650 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3651 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3652
3653 IEM_MC_ADVANCE_RIP();
3654 IEM_MC_END();
3655 }
3656 return VINF_SUCCESS;
3657}
3658
3659
3660/**
3661 * Common worker for SSE2 instructions on the forms:
3662 * pxxx xmm1, xmm2/mem128
3663 *
3664 * Proper alignment of the 128-bit operand is enforced.
3665 * Exceptions type 4. SSE2 cpuid checks.
3666 */
3667FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3668{
3669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3670 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3671 {
3672 /*
3673 * Register, register.
3674 */
3675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3676 IEM_MC_BEGIN(2, 0);
3677 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3678 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3679 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3680 IEM_MC_PREPARE_SSE_USAGE();
3681 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3682 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3683 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3684 IEM_MC_ADVANCE_RIP();
3685 IEM_MC_END();
3686 }
3687 else
3688 {
3689 /*
3690 * Register, memory.
3691 */
3692 IEM_MC_BEGIN(2, 2);
3693 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3694 IEM_MC_LOCAL(RTUINT128U, uSrc);
3695 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3697
3698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3700 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3701 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3702
3703 IEM_MC_PREPARE_SSE_USAGE();
3704 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3705 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3706
3707 IEM_MC_ADVANCE_RIP();
3708 IEM_MC_END();
3709 }
3710 return VINF_SUCCESS;
3711}
3712
3713
3714/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3715FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3716{
3717 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3718 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3719}
3720
3721/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
3722FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
3723{
3724 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
3725 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3726}
3727
3728/* Opcode 0xf3 0x0f 0x74 - invalid */
3729/* Opcode 0xf2 0x0f 0x74 - invalid */
3730
3731
3732/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3733FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3734{
3735 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3736 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3737}
3738
3739/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
3740FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
3741{
3742 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
3743 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3744}
3745
3746/* Opcode 0xf3 0x0f 0x75 - invalid */
3747/* Opcode 0xf2 0x0f 0x75 - invalid */
3748
3749
3750/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3751FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3752{
3753 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3754 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3755}
3756
3757/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
3758FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
3759{
3760 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
3761 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3762}
3763
3764/* Opcode 0xf3 0x0f 0x76 - invalid */
3765/* Opcode 0xf2 0x0f 0x76 - invalid */
3766
3767
3768/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
3769FNIEMOP_STUB(iemOp_emms);
3770/* Opcode 0x66 0x0f 0x77 - invalid */
3771/* Opcode 0xf3 0x0f 0x77 - invalid */
3772/* Opcode 0xf2 0x0f 0x77 - invalid */
3773
3774/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3775FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3776/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3777FNIEMOP_STUB(iemOp_AmdGrp17);
3778/* Opcode 0xf3 0x0f 0x78 - invalid */
3779/* Opcode 0xf2 0x0f 0x78 - invalid */
3780
3781/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3782FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3783/* Opcode 0x66 0x0f 0x79 - invalid */
3784/* Opcode 0xf3 0x0f 0x79 - invalid */
3785/* Opcode 0xf2 0x0f 0x79 - invalid */
3786
3787/* Opcode 0x0f 0x7a - invalid */
3788/* Opcode 0x66 0x0f 0x7a - invalid */
3789/* Opcode 0xf3 0x0f 0x7a - invalid */
3790/* Opcode 0xf2 0x0f 0x7a - invalid */
3791
3792/* Opcode 0x0f 0x7b - invalid */
3793/* Opcode 0x66 0x0f 0x7b - invalid */
3794/* Opcode 0xf3 0x0f 0x7b - invalid */
3795/* Opcode 0xf2 0x0f 0x7b - invalid */
3796
3797/* Opcode 0x0f 0x7c - invalid */
3798/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
3799FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
3800/* Opcode 0xf3 0x0f 0x7c - invalid */
3801/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
3802FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
3803
3804/* Opcode 0x0f 0x7d - invalid */
3805/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
3806FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
3807/* Opcode 0xf3 0x0f 0x7d - invalid */
3808/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
3809FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
3810
3811
3812/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3813FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3814{
3815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3816 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3817 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3818 else
3819 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3820 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3821 {
3822 /* greg, MMX */
3823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3824 IEM_MC_BEGIN(0, 1);
3825 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3826 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3827 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3828 {
3829 IEM_MC_LOCAL(uint64_t, u64Tmp);
3830 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3831 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3832 }
3833 else
3834 {
3835 IEM_MC_LOCAL(uint32_t, u32Tmp);
3836 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3837 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3838 }
3839 IEM_MC_ADVANCE_RIP();
3840 IEM_MC_END();
3841 }
3842 else
3843 {
3844 /* [mem], MMX */
3845 IEM_MC_BEGIN(0, 2);
3846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3847 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3850 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3851 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3852 {
3853 IEM_MC_LOCAL(uint64_t, u64Tmp);
3854 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3855 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3856 }
3857 else
3858 {
3859 IEM_MC_LOCAL(uint32_t, u32Tmp);
3860 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3861 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3862 }
3863 IEM_MC_ADVANCE_RIP();
3864 IEM_MC_END();
3865 }
3866 return VINF_SUCCESS;
3867}
3868
3869/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
3870FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
3871{
3872 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3873 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3874 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3875 else
3876 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3877 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3878 {
3879 /* greg, XMM */
3880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3881 IEM_MC_BEGIN(0, 1);
3882 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3883 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3884 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3885 {
3886 IEM_MC_LOCAL(uint64_t, u64Tmp);
3887 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3888 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3889 }
3890 else
3891 {
3892 IEM_MC_LOCAL(uint32_t, u32Tmp);
3893 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3894 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3895 }
3896 IEM_MC_ADVANCE_RIP();
3897 IEM_MC_END();
3898 }
3899 else
3900 {
3901 /* [mem], XMM */
3902 IEM_MC_BEGIN(0, 2);
3903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3904 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3907 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3908 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3909 {
3910 IEM_MC_LOCAL(uint64_t, u64Tmp);
3911 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3912 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3913 }
3914 else
3915 {
3916 IEM_MC_LOCAL(uint32_t, u32Tmp);
3917 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3918 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3919 }
3920 IEM_MC_ADVANCE_RIP();
3921 IEM_MC_END();
3922 }
3923 return VINF_SUCCESS;
3924}
3925
3926/** Opcode 0xf3 0x0f 0x7e - movq Vq, Wq */
3927FNIEMOP_STUB(iemOp_movq_Vq_Wq);
3928/* Opcode 0xf2 0x0f 0x7e - invalid */
3929
3930
3931/** Opcode 0x0f 0x7f - movq Qq, Pq */
3932FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3933{
3934 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3935 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3936 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3937 {
3938 /*
3939 * Register, register.
3940 */
3941 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3942 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3944 IEM_MC_BEGIN(0, 1);
3945 IEM_MC_LOCAL(uint64_t, u64Tmp);
3946 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3947 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3948 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3949 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3950 IEM_MC_ADVANCE_RIP();
3951 IEM_MC_END();
3952 }
3953 else
3954 {
3955 /*
3956 * Register, memory.
3957 */
3958 IEM_MC_BEGIN(0, 2);
3959 IEM_MC_LOCAL(uint64_t, u64Tmp);
3960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3961
3962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3964 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3965 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3966
3967 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3968 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3969
3970 IEM_MC_ADVANCE_RIP();
3971 IEM_MC_END();
3972 }
3973 return VINF_SUCCESS;
3974}
3975
3976/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
3977FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
3978{
3979 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
3980 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3981 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3982 {
3983 /*
3984 * Register, register.
3985 */
3986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3987 IEM_MC_BEGIN(0, 0);
3988 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3989 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3990 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3991 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3992 IEM_MC_ADVANCE_RIP();
3993 IEM_MC_END();
3994 }
3995 else
3996 {
3997 /*
3998 * Register, memory.
3999 */
4000 IEM_MC_BEGIN(0, 2);
4001 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4003
4004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4006 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4007 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4008
4009 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4010 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4011
4012 IEM_MC_ADVANCE_RIP();
4013 IEM_MC_END();
4014 }
4015 return VINF_SUCCESS;
4016}
4017
4018/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4019FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4020{
4021 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4022 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4023 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4024 {
4025 /*
4026 * Register, register.
4027 */
4028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4029 IEM_MC_BEGIN(0, 0);
4030 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4032 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4033 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4034 IEM_MC_ADVANCE_RIP();
4035 IEM_MC_END();
4036 }
4037 else
4038 {
4039 /*
4040 * Register, memory.
4041 */
4042 IEM_MC_BEGIN(0, 2);
4043 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4045
4046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4048 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4049 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4050
4051 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4052 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4053
4054 IEM_MC_ADVANCE_RIP();
4055 IEM_MC_END();
4056 }
4057 return VINF_SUCCESS;
4058}
4059
4060/* Opcode 0xf2 0x0f 0x7f - invalid */
4061
4062
4063
4064/** Opcode 0x0f 0x80. */
4065FNIEMOP_DEF(iemOp_jo_Jv)
4066{
4067 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4068 IEMOP_HLP_MIN_386();
4069 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4070 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4071 {
4072 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4074
4075 IEM_MC_BEGIN(0, 0);
4076 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4077 IEM_MC_REL_JMP_S16(i16Imm);
4078 } IEM_MC_ELSE() {
4079 IEM_MC_ADVANCE_RIP();
4080 } IEM_MC_ENDIF();
4081 IEM_MC_END();
4082 }
4083 else
4084 {
4085 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4087
4088 IEM_MC_BEGIN(0, 0);
4089 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4090 IEM_MC_REL_JMP_S32(i32Imm);
4091 } IEM_MC_ELSE() {
4092 IEM_MC_ADVANCE_RIP();
4093 } IEM_MC_ENDIF();
4094 IEM_MC_END();
4095 }
4096 return VINF_SUCCESS;
4097}
4098
4099
4100/** Opcode 0x0f 0x81. */
4101FNIEMOP_DEF(iemOp_jno_Jv)
4102{
4103 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4104 IEMOP_HLP_MIN_386();
4105 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4106 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4107 {
4108 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4110
4111 IEM_MC_BEGIN(0, 0);
4112 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4113 IEM_MC_ADVANCE_RIP();
4114 } IEM_MC_ELSE() {
4115 IEM_MC_REL_JMP_S16(i16Imm);
4116 } IEM_MC_ENDIF();
4117 IEM_MC_END();
4118 }
4119 else
4120 {
4121 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4123
4124 IEM_MC_BEGIN(0, 0);
4125 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4126 IEM_MC_ADVANCE_RIP();
4127 } IEM_MC_ELSE() {
4128 IEM_MC_REL_JMP_S32(i32Imm);
4129 } IEM_MC_ENDIF();
4130 IEM_MC_END();
4131 }
4132 return VINF_SUCCESS;
4133}
4134
4135
4136/** Opcode 0x0f 0x82. */
4137FNIEMOP_DEF(iemOp_jc_Jv)
4138{
4139 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4140 IEMOP_HLP_MIN_386();
4141 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4142 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4143 {
4144 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4146
4147 IEM_MC_BEGIN(0, 0);
4148 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4149 IEM_MC_REL_JMP_S16(i16Imm);
4150 } IEM_MC_ELSE() {
4151 IEM_MC_ADVANCE_RIP();
4152 } IEM_MC_ENDIF();
4153 IEM_MC_END();
4154 }
4155 else
4156 {
4157 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4159
4160 IEM_MC_BEGIN(0, 0);
4161 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4162 IEM_MC_REL_JMP_S32(i32Imm);
4163 } IEM_MC_ELSE() {
4164 IEM_MC_ADVANCE_RIP();
4165 } IEM_MC_ENDIF();
4166 IEM_MC_END();
4167 }
4168 return VINF_SUCCESS;
4169}
4170
4171
4172/** Opcode 0x0f 0x83. */
4173FNIEMOP_DEF(iemOp_jnc_Jv)
4174{
4175 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4176 IEMOP_HLP_MIN_386();
4177 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4178 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4179 {
4180 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4182
4183 IEM_MC_BEGIN(0, 0);
4184 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4185 IEM_MC_ADVANCE_RIP();
4186 } IEM_MC_ELSE() {
4187 IEM_MC_REL_JMP_S16(i16Imm);
4188 } IEM_MC_ENDIF();
4189 IEM_MC_END();
4190 }
4191 else
4192 {
4193 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4195
4196 IEM_MC_BEGIN(0, 0);
4197 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4198 IEM_MC_ADVANCE_RIP();
4199 } IEM_MC_ELSE() {
4200 IEM_MC_REL_JMP_S32(i32Imm);
4201 } IEM_MC_ENDIF();
4202 IEM_MC_END();
4203 }
4204 return VINF_SUCCESS;
4205}
4206
4207
4208/** Opcode 0x0f 0x84. */
4209FNIEMOP_DEF(iemOp_je_Jv)
4210{
4211 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4212 IEMOP_HLP_MIN_386();
4213 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4214 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4215 {
4216 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4218
4219 IEM_MC_BEGIN(0, 0);
4220 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4221 IEM_MC_REL_JMP_S16(i16Imm);
4222 } IEM_MC_ELSE() {
4223 IEM_MC_ADVANCE_RIP();
4224 } IEM_MC_ENDIF();
4225 IEM_MC_END();
4226 }
4227 else
4228 {
4229 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4231
4232 IEM_MC_BEGIN(0, 0);
4233 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4234 IEM_MC_REL_JMP_S32(i32Imm);
4235 } IEM_MC_ELSE() {
4236 IEM_MC_ADVANCE_RIP();
4237 } IEM_MC_ENDIF();
4238 IEM_MC_END();
4239 }
4240 return VINF_SUCCESS;
4241}
4242
4243
4244/** Opcode 0x0f 0x85. */
4245FNIEMOP_DEF(iemOp_jne_Jv)
4246{
4247 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4248 IEMOP_HLP_MIN_386();
4249 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4250 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4251 {
4252 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4254
4255 IEM_MC_BEGIN(0, 0);
4256 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4257 IEM_MC_ADVANCE_RIP();
4258 } IEM_MC_ELSE() {
4259 IEM_MC_REL_JMP_S16(i16Imm);
4260 } IEM_MC_ENDIF();
4261 IEM_MC_END();
4262 }
4263 else
4264 {
4265 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4267
4268 IEM_MC_BEGIN(0, 0);
4269 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4270 IEM_MC_ADVANCE_RIP();
4271 } IEM_MC_ELSE() {
4272 IEM_MC_REL_JMP_S32(i32Imm);
4273 } IEM_MC_ENDIF();
4274 IEM_MC_END();
4275 }
4276 return VINF_SUCCESS;
4277}
4278
4279
4280/** Opcode 0x0f 0x86. */
4281FNIEMOP_DEF(iemOp_jbe_Jv)
4282{
4283 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4284 IEMOP_HLP_MIN_386();
4285 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4286 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4287 {
4288 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4290
4291 IEM_MC_BEGIN(0, 0);
4292 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4293 IEM_MC_REL_JMP_S16(i16Imm);
4294 } IEM_MC_ELSE() {
4295 IEM_MC_ADVANCE_RIP();
4296 } IEM_MC_ENDIF();
4297 IEM_MC_END();
4298 }
4299 else
4300 {
4301 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4303
4304 IEM_MC_BEGIN(0, 0);
4305 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4306 IEM_MC_REL_JMP_S32(i32Imm);
4307 } IEM_MC_ELSE() {
4308 IEM_MC_ADVANCE_RIP();
4309 } IEM_MC_ENDIF();
4310 IEM_MC_END();
4311 }
4312 return VINF_SUCCESS;
4313}
4314
4315
4316/** Opcode 0x0f 0x87. */
4317FNIEMOP_DEF(iemOp_jnbe_Jv)
4318{
4319 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4320 IEMOP_HLP_MIN_386();
4321 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4322 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4323 {
4324 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4326
4327 IEM_MC_BEGIN(0, 0);
4328 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4329 IEM_MC_ADVANCE_RIP();
4330 } IEM_MC_ELSE() {
4331 IEM_MC_REL_JMP_S16(i16Imm);
4332 } IEM_MC_ENDIF();
4333 IEM_MC_END();
4334 }
4335 else
4336 {
4337 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4339
4340 IEM_MC_BEGIN(0, 0);
4341 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4342 IEM_MC_ADVANCE_RIP();
4343 } IEM_MC_ELSE() {
4344 IEM_MC_REL_JMP_S32(i32Imm);
4345 } IEM_MC_ENDIF();
4346 IEM_MC_END();
4347 }
4348 return VINF_SUCCESS;
4349}
4350
4351
4352/** Opcode 0x0f 0x88. */
4353FNIEMOP_DEF(iemOp_js_Jv)
4354{
4355 IEMOP_MNEMONIC(js_Jv, "js Jv");
4356 IEMOP_HLP_MIN_386();
4357 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4358 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4359 {
4360 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4362
4363 IEM_MC_BEGIN(0, 0);
4364 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4365 IEM_MC_REL_JMP_S16(i16Imm);
4366 } IEM_MC_ELSE() {
4367 IEM_MC_ADVANCE_RIP();
4368 } IEM_MC_ENDIF();
4369 IEM_MC_END();
4370 }
4371 else
4372 {
4373 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4375
4376 IEM_MC_BEGIN(0, 0);
4377 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4378 IEM_MC_REL_JMP_S32(i32Imm);
4379 } IEM_MC_ELSE() {
4380 IEM_MC_ADVANCE_RIP();
4381 } IEM_MC_ENDIF();
4382 IEM_MC_END();
4383 }
4384 return VINF_SUCCESS;
4385}
4386
4387
4388/** Opcode 0x0f 0x89. */
4389FNIEMOP_DEF(iemOp_jns_Jv)
4390{
4391 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4392 IEMOP_HLP_MIN_386();
4393 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4394 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4395 {
4396 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4398
4399 IEM_MC_BEGIN(0, 0);
4400 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4401 IEM_MC_ADVANCE_RIP();
4402 } IEM_MC_ELSE() {
4403 IEM_MC_REL_JMP_S16(i16Imm);
4404 } IEM_MC_ENDIF();
4405 IEM_MC_END();
4406 }
4407 else
4408 {
4409 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4411
4412 IEM_MC_BEGIN(0, 0);
4413 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4414 IEM_MC_ADVANCE_RIP();
4415 } IEM_MC_ELSE() {
4416 IEM_MC_REL_JMP_S32(i32Imm);
4417 } IEM_MC_ENDIF();
4418 IEM_MC_END();
4419 }
4420 return VINF_SUCCESS;
4421}
4422
4423
4424/** Opcode 0x0f 0x8a. */
4425FNIEMOP_DEF(iemOp_jp_Jv)
4426{
4427 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4428 IEMOP_HLP_MIN_386();
4429 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4430 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4431 {
4432 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4434
4435 IEM_MC_BEGIN(0, 0);
4436 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4437 IEM_MC_REL_JMP_S16(i16Imm);
4438 } IEM_MC_ELSE() {
4439 IEM_MC_ADVANCE_RIP();
4440 } IEM_MC_ENDIF();
4441 IEM_MC_END();
4442 }
4443 else
4444 {
4445 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4447
4448 IEM_MC_BEGIN(0, 0);
4449 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4450 IEM_MC_REL_JMP_S32(i32Imm);
4451 } IEM_MC_ELSE() {
4452 IEM_MC_ADVANCE_RIP();
4453 } IEM_MC_ENDIF();
4454 IEM_MC_END();
4455 }
4456 return VINF_SUCCESS;
4457}
4458
4459
4460/** Opcode 0x0f 0x8b. */
4461FNIEMOP_DEF(iemOp_jnp_Jv)
4462{
4463 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4464 IEMOP_HLP_MIN_386();
4465 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4466 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4467 {
4468 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4470
4471 IEM_MC_BEGIN(0, 0);
4472 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4473 IEM_MC_ADVANCE_RIP();
4474 } IEM_MC_ELSE() {
4475 IEM_MC_REL_JMP_S16(i16Imm);
4476 } IEM_MC_ENDIF();
4477 IEM_MC_END();
4478 }
4479 else
4480 {
4481 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4483
4484 IEM_MC_BEGIN(0, 0);
4485 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4486 IEM_MC_ADVANCE_RIP();
4487 } IEM_MC_ELSE() {
4488 IEM_MC_REL_JMP_S32(i32Imm);
4489 } IEM_MC_ENDIF();
4490 IEM_MC_END();
4491 }
4492 return VINF_SUCCESS;
4493}
4494
4495
4496/** Opcode 0x0f 0x8c. */
4497FNIEMOP_DEF(iemOp_jl_Jv)
4498{
4499 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4500 IEMOP_HLP_MIN_386();
4501 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4502 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4503 {
4504 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4506
4507 IEM_MC_BEGIN(0, 0);
4508 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4509 IEM_MC_REL_JMP_S16(i16Imm);
4510 } IEM_MC_ELSE() {
4511 IEM_MC_ADVANCE_RIP();
4512 } IEM_MC_ENDIF();
4513 IEM_MC_END();
4514 }
4515 else
4516 {
4517 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4519
4520 IEM_MC_BEGIN(0, 0);
4521 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4522 IEM_MC_REL_JMP_S32(i32Imm);
4523 } IEM_MC_ELSE() {
4524 IEM_MC_ADVANCE_RIP();
4525 } IEM_MC_ENDIF();
4526 IEM_MC_END();
4527 }
4528 return VINF_SUCCESS;
4529}
4530
4531
4532/** Opcode 0x0f 0x8d. */
4533FNIEMOP_DEF(iemOp_jnl_Jv)
4534{
4535 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4536 IEMOP_HLP_MIN_386();
4537 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4538 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4539 {
4540 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4542
4543 IEM_MC_BEGIN(0, 0);
4544 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4545 IEM_MC_ADVANCE_RIP();
4546 } IEM_MC_ELSE() {
4547 IEM_MC_REL_JMP_S16(i16Imm);
4548 } IEM_MC_ENDIF();
4549 IEM_MC_END();
4550 }
4551 else
4552 {
4553 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4555
4556 IEM_MC_BEGIN(0, 0);
4557 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4558 IEM_MC_ADVANCE_RIP();
4559 } IEM_MC_ELSE() {
4560 IEM_MC_REL_JMP_S32(i32Imm);
4561 } IEM_MC_ENDIF();
4562 IEM_MC_END();
4563 }
4564 return VINF_SUCCESS;
4565}
4566
4567
4568/** Opcode 0x0f 0x8e. */
4569FNIEMOP_DEF(iemOp_jle_Jv)
4570{
4571 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4572 IEMOP_HLP_MIN_386();
4573 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4574 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4575 {
4576 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4578
4579 IEM_MC_BEGIN(0, 0);
4580 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4581 IEM_MC_REL_JMP_S16(i16Imm);
4582 } IEM_MC_ELSE() {
4583 IEM_MC_ADVANCE_RIP();
4584 } IEM_MC_ENDIF();
4585 IEM_MC_END();
4586 }
4587 else
4588 {
4589 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4591
4592 IEM_MC_BEGIN(0, 0);
4593 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4594 IEM_MC_REL_JMP_S32(i32Imm);
4595 } IEM_MC_ELSE() {
4596 IEM_MC_ADVANCE_RIP();
4597 } IEM_MC_ENDIF();
4598 IEM_MC_END();
4599 }
4600 return VINF_SUCCESS;
4601}
4602
4603
4604/** Opcode 0x0f 0x8f. */
4605FNIEMOP_DEF(iemOp_jnle_Jv)
4606{
4607 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4608 IEMOP_HLP_MIN_386();
4609 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4610 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4611 {
4612 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4614
4615 IEM_MC_BEGIN(0, 0);
4616 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4617 IEM_MC_ADVANCE_RIP();
4618 } IEM_MC_ELSE() {
4619 IEM_MC_REL_JMP_S16(i16Imm);
4620 } IEM_MC_ENDIF();
4621 IEM_MC_END();
4622 }
4623 else
4624 {
4625 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4627
4628 IEM_MC_BEGIN(0, 0);
4629 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4630 IEM_MC_ADVANCE_RIP();
4631 } IEM_MC_ELSE() {
4632 IEM_MC_REL_JMP_S32(i32Imm);
4633 } IEM_MC_ENDIF();
4634 IEM_MC_END();
4635 }
4636 return VINF_SUCCESS;
4637}
4638
4639
4640/** Opcode 0x0f 0x90. */
4641FNIEMOP_DEF(iemOp_seto_Eb)
4642{
4643 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4644 IEMOP_HLP_MIN_386();
4645 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4646
4647 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4648 * any way. AMD says it's "unused", whatever that means. We're
4649 * ignoring for now. */
4650 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4651 {
4652 /* register target */
4653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4654 IEM_MC_BEGIN(0, 0);
4655 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4656 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4657 } IEM_MC_ELSE() {
4658 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4659 } IEM_MC_ENDIF();
4660 IEM_MC_ADVANCE_RIP();
4661 IEM_MC_END();
4662 }
4663 else
4664 {
4665 /* memory target */
4666 IEM_MC_BEGIN(0, 1);
4667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4670 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4671 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4672 } IEM_MC_ELSE() {
4673 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4674 } IEM_MC_ENDIF();
4675 IEM_MC_ADVANCE_RIP();
4676 IEM_MC_END();
4677 }
4678 return VINF_SUCCESS;
4679}
4680
4681
4682/** Opcode 0x0f 0x91. */
4683FNIEMOP_DEF(iemOp_setno_Eb)
4684{
4685 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4686 IEMOP_HLP_MIN_386();
4687 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4688
4689 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4690 * any way. AMD says it's "unused", whatever that means. We're
4691 * ignoring for now. */
4692 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4693 {
4694 /* register target */
4695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4696 IEM_MC_BEGIN(0, 0);
4697 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4698 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4699 } IEM_MC_ELSE() {
4700 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4701 } IEM_MC_ENDIF();
4702 IEM_MC_ADVANCE_RIP();
4703 IEM_MC_END();
4704 }
4705 else
4706 {
4707 /* memory target */
4708 IEM_MC_BEGIN(0, 1);
4709 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4712 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4713 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4714 } IEM_MC_ELSE() {
4715 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4716 } IEM_MC_ENDIF();
4717 IEM_MC_ADVANCE_RIP();
4718 IEM_MC_END();
4719 }
4720 return VINF_SUCCESS;
4721}
4722
4723
4724/** Opcode 0x0f 0x92. */
4725FNIEMOP_DEF(iemOp_setc_Eb)
4726{
4727 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4728 IEMOP_HLP_MIN_386();
4729 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4730
4731 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4732 * any way. AMD says it's "unused", whatever that means. We're
4733 * ignoring for now. */
4734 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4735 {
4736 /* register target */
4737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4738 IEM_MC_BEGIN(0, 0);
4739 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4740 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4741 } IEM_MC_ELSE() {
4742 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4743 } IEM_MC_ENDIF();
4744 IEM_MC_ADVANCE_RIP();
4745 IEM_MC_END();
4746 }
4747 else
4748 {
4749 /* memory target */
4750 IEM_MC_BEGIN(0, 1);
4751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4754 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4755 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4756 } IEM_MC_ELSE() {
4757 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4758 } IEM_MC_ENDIF();
4759 IEM_MC_ADVANCE_RIP();
4760 IEM_MC_END();
4761 }
4762 return VINF_SUCCESS;
4763}
4764
4765
4766/** Opcode 0x0f 0x93. */
4767FNIEMOP_DEF(iemOp_setnc_Eb)
4768{
4769 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4770 IEMOP_HLP_MIN_386();
4771 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4772
4773 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4774 * any way. AMD says it's "unused", whatever that means. We're
4775 * ignoring for now. */
4776 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4777 {
4778 /* register target */
4779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4780 IEM_MC_BEGIN(0, 0);
4781 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4782 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4783 } IEM_MC_ELSE() {
4784 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4785 } IEM_MC_ENDIF();
4786 IEM_MC_ADVANCE_RIP();
4787 IEM_MC_END();
4788 }
4789 else
4790 {
4791 /* memory target */
4792 IEM_MC_BEGIN(0, 1);
4793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4796 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4797 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4798 } IEM_MC_ELSE() {
4799 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4800 } IEM_MC_ENDIF();
4801 IEM_MC_ADVANCE_RIP();
4802 IEM_MC_END();
4803 }
4804 return VINF_SUCCESS;
4805}
4806
4807
4808/** Opcode 0x0f 0x94. */
4809FNIEMOP_DEF(iemOp_sete_Eb)
4810{
4811 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4812 IEMOP_HLP_MIN_386();
4813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4814
4815 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4816 * any way. AMD says it's "unused", whatever that means. We're
4817 * ignoring for now. */
4818 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4819 {
4820 /* register target */
4821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4822 IEM_MC_BEGIN(0, 0);
4823 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4824 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4825 } IEM_MC_ELSE() {
4826 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4827 } IEM_MC_ENDIF();
4828 IEM_MC_ADVANCE_RIP();
4829 IEM_MC_END();
4830 }
4831 else
4832 {
4833 /* memory target */
4834 IEM_MC_BEGIN(0, 1);
4835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4838 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4839 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4840 } IEM_MC_ELSE() {
4841 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4842 } IEM_MC_ENDIF();
4843 IEM_MC_ADVANCE_RIP();
4844 IEM_MC_END();
4845 }
4846 return VINF_SUCCESS;
4847}
4848
4849
4850/** Opcode 0x0f 0x95. */
4851FNIEMOP_DEF(iemOp_setne_Eb)
4852{
4853 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4854 IEMOP_HLP_MIN_386();
4855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4856
4857 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4858 * any way. AMD says it's "unused", whatever that means. We're
4859 * ignoring for now. */
4860 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4861 {
4862 /* register target */
4863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4864 IEM_MC_BEGIN(0, 0);
4865 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4866 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4867 } IEM_MC_ELSE() {
4868 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4869 } IEM_MC_ENDIF();
4870 IEM_MC_ADVANCE_RIP();
4871 IEM_MC_END();
4872 }
4873 else
4874 {
4875 /* memory target */
4876 IEM_MC_BEGIN(0, 1);
4877 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4880 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4881 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4882 } IEM_MC_ELSE() {
4883 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4884 } IEM_MC_ENDIF();
4885 IEM_MC_ADVANCE_RIP();
4886 IEM_MC_END();
4887 }
4888 return VINF_SUCCESS;
4889}
4890
4891
4892/** Opcode 0x0f 0x96. */
4893FNIEMOP_DEF(iemOp_setbe_Eb)
4894{
4895 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4896 IEMOP_HLP_MIN_386();
4897 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4898
4899 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4900 * any way. AMD says it's "unused", whatever that means. We're
4901 * ignoring for now. */
4902 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4903 {
4904 /* register target */
4905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4906 IEM_MC_BEGIN(0, 0);
4907 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4908 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4909 } IEM_MC_ELSE() {
4910 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4911 } IEM_MC_ENDIF();
4912 IEM_MC_ADVANCE_RIP();
4913 IEM_MC_END();
4914 }
4915 else
4916 {
4917 /* memory target */
4918 IEM_MC_BEGIN(0, 1);
4919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4922 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4923 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4924 } IEM_MC_ELSE() {
4925 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4926 } IEM_MC_ENDIF();
4927 IEM_MC_ADVANCE_RIP();
4928 IEM_MC_END();
4929 }
4930 return VINF_SUCCESS;
4931}
4932
4933
4934/** Opcode 0x0f 0x97. */
4935FNIEMOP_DEF(iemOp_setnbe_Eb)
4936{
4937 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4938 IEMOP_HLP_MIN_386();
4939 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4940
4941 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4942 * any way. AMD says it's "unused", whatever that means. We're
4943 * ignoring for now. */
4944 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4945 {
4946 /* register target */
4947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4948 IEM_MC_BEGIN(0, 0);
4949 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4950 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4951 } IEM_MC_ELSE() {
4952 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4953 } IEM_MC_ENDIF();
4954 IEM_MC_ADVANCE_RIP();
4955 IEM_MC_END();
4956 }
4957 else
4958 {
4959 /* memory target */
4960 IEM_MC_BEGIN(0, 1);
4961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4964 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4965 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4966 } IEM_MC_ELSE() {
4967 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4968 } IEM_MC_ENDIF();
4969 IEM_MC_ADVANCE_RIP();
4970 IEM_MC_END();
4971 }
4972 return VINF_SUCCESS;
4973}
4974
4975
4976/** Opcode 0x0f 0x98. */
4977FNIEMOP_DEF(iemOp_sets_Eb)
4978{
4979 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4980 IEMOP_HLP_MIN_386();
4981 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4982
4983 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4984 * any way. AMD says it's "unused", whatever that means. We're
4985 * ignoring for now. */
4986 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4987 {
4988 /* register target */
4989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4990 IEM_MC_BEGIN(0, 0);
4991 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4992 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4993 } IEM_MC_ELSE() {
4994 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4995 } IEM_MC_ENDIF();
4996 IEM_MC_ADVANCE_RIP();
4997 IEM_MC_END();
4998 }
4999 else
5000 {
5001 /* memory target */
5002 IEM_MC_BEGIN(0, 1);
5003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5006 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5007 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5008 } IEM_MC_ELSE() {
5009 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5010 } IEM_MC_ENDIF();
5011 IEM_MC_ADVANCE_RIP();
5012 IEM_MC_END();
5013 }
5014 return VINF_SUCCESS;
5015}
5016
5017
5018/** Opcode 0x0f 0x99. */
5019FNIEMOP_DEF(iemOp_setns_Eb)
5020{
5021 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5022 IEMOP_HLP_MIN_386();
5023 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5024
5025 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5026 * any way. AMD says it's "unused", whatever that means. We're
5027 * ignoring for now. */
5028 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5029 {
5030 /* register target */
5031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5032 IEM_MC_BEGIN(0, 0);
5033 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5034 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5035 } IEM_MC_ELSE() {
5036 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5037 } IEM_MC_ENDIF();
5038 IEM_MC_ADVANCE_RIP();
5039 IEM_MC_END();
5040 }
5041 else
5042 {
5043 /* memory target */
5044 IEM_MC_BEGIN(0, 1);
5045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5048 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5049 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5050 } IEM_MC_ELSE() {
5051 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5052 } IEM_MC_ENDIF();
5053 IEM_MC_ADVANCE_RIP();
5054 IEM_MC_END();
5055 }
5056 return VINF_SUCCESS;
5057}
5058
5059
5060/** Opcode 0x0f 0x9a. */
5061FNIEMOP_DEF(iemOp_setp_Eb)
5062{
5063 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5064 IEMOP_HLP_MIN_386();
5065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5066
5067 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5068 * any way. AMD says it's "unused", whatever that means. We're
5069 * ignoring for now. */
5070 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5071 {
5072 /* register target */
5073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5074 IEM_MC_BEGIN(0, 0);
5075 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5076 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5077 } IEM_MC_ELSE() {
5078 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5079 } IEM_MC_ENDIF();
5080 IEM_MC_ADVANCE_RIP();
5081 IEM_MC_END();
5082 }
5083 else
5084 {
5085 /* memory target */
5086 IEM_MC_BEGIN(0, 1);
5087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5090 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5091 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5092 } IEM_MC_ELSE() {
5093 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5094 } IEM_MC_ENDIF();
5095 IEM_MC_ADVANCE_RIP();
5096 IEM_MC_END();
5097 }
5098 return VINF_SUCCESS;
5099}
5100
5101
5102/** Opcode 0x0f 0x9b. */
5103FNIEMOP_DEF(iemOp_setnp_Eb)
5104{
5105 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5106 IEMOP_HLP_MIN_386();
5107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5108
5109 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5110 * any way. AMD says it's "unused", whatever that means. We're
5111 * ignoring for now. */
5112 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5113 {
5114 /* register target */
5115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5116 IEM_MC_BEGIN(0, 0);
5117 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5118 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5119 } IEM_MC_ELSE() {
5120 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5121 } IEM_MC_ENDIF();
5122 IEM_MC_ADVANCE_RIP();
5123 IEM_MC_END();
5124 }
5125 else
5126 {
5127 /* memory target */
5128 IEM_MC_BEGIN(0, 1);
5129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5132 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5133 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5134 } IEM_MC_ELSE() {
5135 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5136 } IEM_MC_ENDIF();
5137 IEM_MC_ADVANCE_RIP();
5138 IEM_MC_END();
5139 }
5140 return VINF_SUCCESS;
5141}
5142
5143
5144/** Opcode 0x0f 0x9c. */
5145FNIEMOP_DEF(iemOp_setl_Eb)
5146{
5147 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5148 IEMOP_HLP_MIN_386();
5149 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5150
5151 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5152 * any way. AMD says it's "unused", whatever that means. We're
5153 * ignoring for now. */
5154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5155 {
5156 /* register target */
5157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5158 IEM_MC_BEGIN(0, 0);
5159 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5160 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5161 } IEM_MC_ELSE() {
5162 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5163 } IEM_MC_ENDIF();
5164 IEM_MC_ADVANCE_RIP();
5165 IEM_MC_END();
5166 }
5167 else
5168 {
5169 /* memory target */
5170 IEM_MC_BEGIN(0, 1);
5171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5174 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5175 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5176 } IEM_MC_ELSE() {
5177 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5178 } IEM_MC_ENDIF();
5179 IEM_MC_ADVANCE_RIP();
5180 IEM_MC_END();
5181 }
5182 return VINF_SUCCESS;
5183}
5184
5185
5186/** Opcode 0x0f 0x9d. */
5187FNIEMOP_DEF(iemOp_setnl_Eb)
5188{
5189 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5190 IEMOP_HLP_MIN_386();
5191 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5192
5193 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5194 * any way. AMD says it's "unused", whatever that means. We're
5195 * ignoring for now. */
5196 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5197 {
5198 /* register target */
5199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5200 IEM_MC_BEGIN(0, 0);
5201 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5202 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5203 } IEM_MC_ELSE() {
5204 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5205 } IEM_MC_ENDIF();
5206 IEM_MC_ADVANCE_RIP();
5207 IEM_MC_END();
5208 }
5209 else
5210 {
5211 /* memory target */
5212 IEM_MC_BEGIN(0, 1);
5213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5216 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5217 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5218 } IEM_MC_ELSE() {
5219 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5220 } IEM_MC_ENDIF();
5221 IEM_MC_ADVANCE_RIP();
5222 IEM_MC_END();
5223 }
5224 return VINF_SUCCESS;
5225}
5226
5227
5228/** Opcode 0x0f 0x9e. */
5229FNIEMOP_DEF(iemOp_setle_Eb)
5230{
5231 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5232 IEMOP_HLP_MIN_386();
5233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5234
5235 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5236 * any way. AMD says it's "unused", whatever that means. We're
5237 * ignoring for now. */
5238 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5239 {
5240 /* register target */
5241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5242 IEM_MC_BEGIN(0, 0);
5243 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5244 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5245 } IEM_MC_ELSE() {
5246 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5247 } IEM_MC_ENDIF();
5248 IEM_MC_ADVANCE_RIP();
5249 IEM_MC_END();
5250 }
5251 else
5252 {
5253 /* memory target */
5254 IEM_MC_BEGIN(0, 1);
5255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5258 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5259 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5260 } IEM_MC_ELSE() {
5261 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5262 } IEM_MC_ENDIF();
5263 IEM_MC_ADVANCE_RIP();
5264 IEM_MC_END();
5265 }
5266 return VINF_SUCCESS;
5267}
5268
5269
5270/** Opcode 0x0f 0x9f. */
5271FNIEMOP_DEF(iemOp_setnle_Eb)
5272{
5273 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5274 IEMOP_HLP_MIN_386();
5275 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5276
5277 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5278 * any way. AMD says it's "unused", whatever that means. We're
5279 * ignoring for now. */
5280 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5281 {
5282 /* register target */
5283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5284 IEM_MC_BEGIN(0, 0);
5285 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5286 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5287 } IEM_MC_ELSE() {
5288 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5289 } IEM_MC_ENDIF();
5290 IEM_MC_ADVANCE_RIP();
5291 IEM_MC_END();
5292 }
5293 else
5294 {
5295 /* memory target */
5296 IEM_MC_BEGIN(0, 1);
5297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5300 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5301 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5302 } IEM_MC_ELSE() {
5303 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5304 } IEM_MC_ENDIF();
5305 IEM_MC_ADVANCE_RIP();
5306 IEM_MC_END();
5307 }
5308 return VINF_SUCCESS;
5309}
5310
5311
5312/**
5313 * Common 'push segment-register' helper.
5314 */
5315FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5316{
5317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5318 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5319 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5320
5321 switch (pVCpu->iem.s.enmEffOpSize)
5322 {
5323 case IEMMODE_16BIT:
5324 IEM_MC_BEGIN(0, 1);
5325 IEM_MC_LOCAL(uint16_t, u16Value);
5326 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5327 IEM_MC_PUSH_U16(u16Value);
5328 IEM_MC_ADVANCE_RIP();
5329 IEM_MC_END();
5330 break;
5331
5332 case IEMMODE_32BIT:
5333 IEM_MC_BEGIN(0, 1);
5334 IEM_MC_LOCAL(uint32_t, u32Value);
5335 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5336 IEM_MC_PUSH_U32_SREG(u32Value);
5337 IEM_MC_ADVANCE_RIP();
5338 IEM_MC_END();
5339 break;
5340
5341 case IEMMODE_64BIT:
5342 IEM_MC_BEGIN(0, 1);
5343 IEM_MC_LOCAL(uint64_t, u64Value);
5344 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5345 IEM_MC_PUSH_U64(u64Value);
5346 IEM_MC_ADVANCE_RIP();
5347 IEM_MC_END();
5348 break;
5349 }
5350
5351 return VINF_SUCCESS;
5352}
5353
5354
5355/** Opcode 0x0f 0xa0. */
5356FNIEMOP_DEF(iemOp_push_fs)
5357{
5358 IEMOP_MNEMONIC(push_fs, "push fs");
5359 IEMOP_HLP_MIN_386();
5360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5361 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5362}
5363
5364
5365/** Opcode 0x0f 0xa1. */
5366FNIEMOP_DEF(iemOp_pop_fs)
5367{
5368 IEMOP_MNEMONIC(pop_fs, "pop fs");
5369 IEMOP_HLP_MIN_386();
5370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5371 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5372}
5373
5374
5375/** Opcode 0x0f 0xa2. */
5376FNIEMOP_DEF(iemOp_cpuid)
5377{
5378 IEMOP_MNEMONIC(cpuid, "cpuid");
5379 IEMOP_HLP_MIN_486(); /* not all 486es. */
5380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5381 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5382}
5383
5384
5385/**
5386 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5387 * iemOp_bts_Ev_Gv.
5388 */
5389FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5390{
5391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5392 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5393
5394 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5395 {
5396 /* register destination. */
5397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5398 switch (pVCpu->iem.s.enmEffOpSize)
5399 {
5400 case IEMMODE_16BIT:
5401 IEM_MC_BEGIN(3, 0);
5402 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5403 IEM_MC_ARG(uint16_t, u16Src, 1);
5404 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5405
5406 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5407 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5408 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5409 IEM_MC_REF_EFLAGS(pEFlags);
5410 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5411
5412 IEM_MC_ADVANCE_RIP();
5413 IEM_MC_END();
5414 return VINF_SUCCESS;
5415
5416 case IEMMODE_32BIT:
5417 IEM_MC_BEGIN(3, 0);
5418 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5419 IEM_MC_ARG(uint32_t, u32Src, 1);
5420 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5421
5422 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5423 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5424 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5425 IEM_MC_REF_EFLAGS(pEFlags);
5426 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5427
5428 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5429 IEM_MC_ADVANCE_RIP();
5430 IEM_MC_END();
5431 return VINF_SUCCESS;
5432
5433 case IEMMODE_64BIT:
5434 IEM_MC_BEGIN(3, 0);
5435 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5436 IEM_MC_ARG(uint64_t, u64Src, 1);
5437 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5438
5439 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5440 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5441 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5442 IEM_MC_REF_EFLAGS(pEFlags);
5443 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5444
5445 IEM_MC_ADVANCE_RIP();
5446 IEM_MC_END();
5447 return VINF_SUCCESS;
5448
5449 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5450 }
5451 }
5452 else
5453 {
5454 /* memory destination. */
5455
5456 uint32_t fAccess;
5457 if (pImpl->pfnLockedU16)
5458 fAccess = IEM_ACCESS_DATA_RW;
5459 else /* BT */
5460 fAccess = IEM_ACCESS_DATA_R;
5461
5462 /** @todo test negative bit offsets! */
5463 switch (pVCpu->iem.s.enmEffOpSize)
5464 {
5465 case IEMMODE_16BIT:
5466 IEM_MC_BEGIN(3, 2);
5467 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5468 IEM_MC_ARG(uint16_t, u16Src, 1);
5469 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5471 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5472
5473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5474 if (pImpl->pfnLockedU16)
5475 IEMOP_HLP_DONE_DECODING();
5476 else
5477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5478 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5479 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5480 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5481 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5482 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5483 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5484 IEM_MC_FETCH_EFLAGS(EFlags);
5485
5486 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5487 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5488 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5489 else
5490 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5491 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5492
5493 IEM_MC_COMMIT_EFLAGS(EFlags);
5494 IEM_MC_ADVANCE_RIP();
5495 IEM_MC_END();
5496 return VINF_SUCCESS;
5497
5498 case IEMMODE_32BIT:
5499 IEM_MC_BEGIN(3, 2);
5500 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5501 IEM_MC_ARG(uint32_t, u32Src, 1);
5502 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5504 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5505
5506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5507 if (pImpl->pfnLockedU16)
5508 IEMOP_HLP_DONE_DECODING();
5509 else
5510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5511 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5512 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5513 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5514 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5515 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5516 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5517 IEM_MC_FETCH_EFLAGS(EFlags);
5518
5519 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5520 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5521 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5522 else
5523 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5524 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5525
5526 IEM_MC_COMMIT_EFLAGS(EFlags);
5527 IEM_MC_ADVANCE_RIP();
5528 IEM_MC_END();
5529 return VINF_SUCCESS;
5530
5531 case IEMMODE_64BIT:
5532 IEM_MC_BEGIN(3, 2);
5533 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5534 IEM_MC_ARG(uint64_t, u64Src, 1);
5535 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5537 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5538
5539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5540 if (pImpl->pfnLockedU16)
5541 IEMOP_HLP_DONE_DECODING();
5542 else
5543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5544 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5545 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5546 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5547 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5548 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5549 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5550 IEM_MC_FETCH_EFLAGS(EFlags);
5551
5552 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5553 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5554 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5555 else
5556 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5557 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5558
5559 IEM_MC_COMMIT_EFLAGS(EFlags);
5560 IEM_MC_ADVANCE_RIP();
5561 IEM_MC_END();
5562 return VINF_SUCCESS;
5563
5564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5565 }
5566 }
5567}
5568
5569
5570/** Opcode 0x0f 0xa3. */
5571FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5572{
5573 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5574 IEMOP_HLP_MIN_386();
5575 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5576}
5577
5578
5579/**
5580 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5581 */
5582FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5583{
5584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5585 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5586
5587 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5588 {
5589 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5591
5592 switch (pVCpu->iem.s.enmEffOpSize)
5593 {
5594 case IEMMODE_16BIT:
5595 IEM_MC_BEGIN(4, 0);
5596 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5597 IEM_MC_ARG(uint16_t, u16Src, 1);
5598 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5599 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5600
5601 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5602 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5603 IEM_MC_REF_EFLAGS(pEFlags);
5604 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5605
5606 IEM_MC_ADVANCE_RIP();
5607 IEM_MC_END();
5608 return VINF_SUCCESS;
5609
5610 case IEMMODE_32BIT:
5611 IEM_MC_BEGIN(4, 0);
5612 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5613 IEM_MC_ARG(uint32_t, u32Src, 1);
5614 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5615 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5616
5617 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5618 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5619 IEM_MC_REF_EFLAGS(pEFlags);
5620 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5621
5622 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5623 IEM_MC_ADVANCE_RIP();
5624 IEM_MC_END();
5625 return VINF_SUCCESS;
5626
5627 case IEMMODE_64BIT:
5628 IEM_MC_BEGIN(4, 0);
5629 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5630 IEM_MC_ARG(uint64_t, u64Src, 1);
5631 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5632 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5633
5634 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5635 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5636 IEM_MC_REF_EFLAGS(pEFlags);
5637 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5638
5639 IEM_MC_ADVANCE_RIP();
5640 IEM_MC_END();
5641 return VINF_SUCCESS;
5642
5643 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5644 }
5645 }
5646 else
5647 {
5648 switch (pVCpu->iem.s.enmEffOpSize)
5649 {
5650 case IEMMODE_16BIT:
5651 IEM_MC_BEGIN(4, 2);
5652 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5653 IEM_MC_ARG(uint16_t, u16Src, 1);
5654 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5655 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5657
5658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5659 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5660 IEM_MC_ASSIGN(cShiftArg, cShift);
5661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5662 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5663 IEM_MC_FETCH_EFLAGS(EFlags);
5664 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5665 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5666
5667 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5668 IEM_MC_COMMIT_EFLAGS(EFlags);
5669 IEM_MC_ADVANCE_RIP();
5670 IEM_MC_END();
5671 return VINF_SUCCESS;
5672
5673 case IEMMODE_32BIT:
5674 IEM_MC_BEGIN(4, 2);
5675 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5676 IEM_MC_ARG(uint32_t, u32Src, 1);
5677 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5678 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5679 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5680
5681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5682 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5683 IEM_MC_ASSIGN(cShiftArg, cShift);
5684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5685 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5686 IEM_MC_FETCH_EFLAGS(EFlags);
5687 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5688 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5689
5690 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5691 IEM_MC_COMMIT_EFLAGS(EFlags);
5692 IEM_MC_ADVANCE_RIP();
5693 IEM_MC_END();
5694 return VINF_SUCCESS;
5695
5696 case IEMMODE_64BIT:
5697 IEM_MC_BEGIN(4, 2);
5698 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5699 IEM_MC_ARG(uint64_t, u64Src, 1);
5700 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5701 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5703
5704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5705 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5706 IEM_MC_ASSIGN(cShiftArg, cShift);
5707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5708 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5709 IEM_MC_FETCH_EFLAGS(EFlags);
5710 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5711 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5712
5713 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5714 IEM_MC_COMMIT_EFLAGS(EFlags);
5715 IEM_MC_ADVANCE_RIP();
5716 IEM_MC_END();
5717 return VINF_SUCCESS;
5718
5719 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5720 }
5721 }
5722}
5723
5724
5725/**
5726 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5727 */
5728FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5729{
5730 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5731 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5732
5733 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5734 {
5735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5736
5737 switch (pVCpu->iem.s.enmEffOpSize)
5738 {
5739 case IEMMODE_16BIT:
5740 IEM_MC_BEGIN(4, 0);
5741 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5742 IEM_MC_ARG(uint16_t, u16Src, 1);
5743 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5744 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5745
5746 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5747 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5748 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5749 IEM_MC_REF_EFLAGS(pEFlags);
5750 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5751
5752 IEM_MC_ADVANCE_RIP();
5753 IEM_MC_END();
5754 return VINF_SUCCESS;
5755
5756 case IEMMODE_32BIT:
5757 IEM_MC_BEGIN(4, 0);
5758 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5759 IEM_MC_ARG(uint32_t, u32Src, 1);
5760 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5761 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5762
5763 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5764 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5765 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5766 IEM_MC_REF_EFLAGS(pEFlags);
5767 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5768
5769 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5770 IEM_MC_ADVANCE_RIP();
5771 IEM_MC_END();
5772 return VINF_SUCCESS;
5773
5774 case IEMMODE_64BIT:
5775 IEM_MC_BEGIN(4, 0);
5776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5777 IEM_MC_ARG(uint64_t, u64Src, 1);
5778 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5779 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5780
5781 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5782 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5783 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5784 IEM_MC_REF_EFLAGS(pEFlags);
5785 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5786
5787 IEM_MC_ADVANCE_RIP();
5788 IEM_MC_END();
5789 return VINF_SUCCESS;
5790
5791 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5792 }
5793 }
5794 else
5795 {
5796 switch (pVCpu->iem.s.enmEffOpSize)
5797 {
5798 case IEMMODE_16BIT:
5799 IEM_MC_BEGIN(4, 2);
5800 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5801 IEM_MC_ARG(uint16_t, u16Src, 1);
5802 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5803 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5805
5806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5808 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5809 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5810 IEM_MC_FETCH_EFLAGS(EFlags);
5811 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5812 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5813
5814 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5815 IEM_MC_COMMIT_EFLAGS(EFlags);
5816 IEM_MC_ADVANCE_RIP();
5817 IEM_MC_END();
5818 return VINF_SUCCESS;
5819
5820 case IEMMODE_32BIT:
5821 IEM_MC_BEGIN(4, 2);
5822 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5823 IEM_MC_ARG(uint32_t, u32Src, 1);
5824 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5825 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5827
5828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5830 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5831 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5832 IEM_MC_FETCH_EFLAGS(EFlags);
5833 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5834 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5835
5836 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5837 IEM_MC_COMMIT_EFLAGS(EFlags);
5838 IEM_MC_ADVANCE_RIP();
5839 IEM_MC_END();
5840 return VINF_SUCCESS;
5841
5842 case IEMMODE_64BIT:
5843 IEM_MC_BEGIN(4, 2);
5844 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5845 IEM_MC_ARG(uint64_t, u64Src, 1);
5846 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5847 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5849
5850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5852 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5853 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5854 IEM_MC_FETCH_EFLAGS(EFlags);
5855 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5856 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5857
5858 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5859 IEM_MC_COMMIT_EFLAGS(EFlags);
5860 IEM_MC_ADVANCE_RIP();
5861 IEM_MC_END();
5862 return VINF_SUCCESS;
5863
5864 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5865 }
5866 }
5867}
5868
5869
5870
5871/** Opcode 0x0f 0xa4. */
5872FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5873{
5874 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5875 IEMOP_HLP_MIN_386();
5876 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5877}
5878
5879
5880/** Opcode 0x0f 0xa5. */
5881FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5882{
5883 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5884 IEMOP_HLP_MIN_386();
5885 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5886}
5887
5888
5889/** Opcode 0x0f 0xa8. */
5890FNIEMOP_DEF(iemOp_push_gs)
5891{
5892 IEMOP_MNEMONIC(push_gs, "push gs");
5893 IEMOP_HLP_MIN_386();
5894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5895 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5896}
5897
5898
5899/** Opcode 0x0f 0xa9. */
5900FNIEMOP_DEF(iemOp_pop_gs)
5901{
5902 IEMOP_MNEMONIC(pop_gs, "pop gs");
5903 IEMOP_HLP_MIN_386();
5904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5905 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5906}
5907
5908
5909/** Opcode 0x0f 0xaa. */
5910FNIEMOP_DEF(iemOp_rsm)
5911{
5912 IEMOP_MNEMONIC(rsm, "rsm");
5913 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
5914 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
5915 * intercept). */
5916 IEMOP_BITCH_ABOUT_STUB();
5917 return IEMOP_RAISE_INVALID_OPCODE();
5918}
5919
5920//IEMOP_HLP_MIN_386();
5921
5922
5923/** Opcode 0x0f 0xab. */
5924FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5925{
5926 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5927 IEMOP_HLP_MIN_386();
5928 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5929}
5930
5931
5932/** Opcode 0x0f 0xac. */
5933FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5934{
5935 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5936 IEMOP_HLP_MIN_386();
5937 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5938}
5939
5940
5941/** Opcode 0x0f 0xad. */
5942FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5943{
5944 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5945 IEMOP_HLP_MIN_386();
5946 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5947}
5948
5949
5950/** Opcode 0x0f 0xae mem/0. */
5951FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5952{
5953 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5954 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5955 return IEMOP_RAISE_INVALID_OPCODE();
5956
5957 IEM_MC_BEGIN(3, 1);
5958 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5959 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5960 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5963 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
5964 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5965 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5966 IEM_MC_END();
5967 return VINF_SUCCESS;
5968}
5969
5970
5971/** Opcode 0x0f 0xae mem/1. */
5972FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5973{
5974 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5975 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5976 return IEMOP_RAISE_INVALID_OPCODE();
5977
5978 IEM_MC_BEGIN(3, 1);
5979 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5980 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5981 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5984 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5985 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5986 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5987 IEM_MC_END();
5988 return VINF_SUCCESS;
5989}
5990
5991
5992/**
5993 * @opmaps grp15
5994 * @opcode !11/2
5995 * @oppfx none
5996 * @opcpuid sse
5997 * @opgroup og_sse_mxcsrsm
5998 * @opxcpttype 5
5999 * @optest op1=0 -> mxcsr=0
6000 * @optest op1=0x2083 -> mxcsr=0x2083
6001 * @optest op1=0xfffffffe -> value.xcpt=0xd
6002 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6003 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6004 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6005 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6006 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6007 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6008 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6009 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6010 */
6011FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6012{
6013 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6014 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6015 return IEMOP_RAISE_INVALID_OPCODE();
6016
6017 IEM_MC_BEGIN(2, 0);
6018 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6019 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6022 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6023 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6024 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6025 IEM_MC_END();
6026 return VINF_SUCCESS;
6027}
6028
6029
6030/**
6031 * @opmaps grp15
6032 * @opcode !11/3
6033 * @oppfx none
6034 * @opcpuid sse
6035 * @opgroup og_sse_mxcsrsm
6036 * @opxcpttype 5
6037 * @optest mxcsr=0 -> op1=0
6038 * @optest mxcsr=0x2083 -> op1=0x2083
6039 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6040 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6041 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6042 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6043 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6044 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6045 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6046 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6047 */
6048FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6049{
6050 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, MdWO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6051 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6052 return IEMOP_RAISE_INVALID_OPCODE();
6053
6054 IEM_MC_BEGIN(2, 0);
6055 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6056 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6059 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6060 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6061 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6062 IEM_MC_END();
6063 return VINF_SUCCESS;
6064}
6065
6066
6067/**
6068 * @opmaps grp15
6069 * @opcode !11/4
6070 * @oppfx none
6071 * @opcpuid xsave
6072 * @opgroup og_system
6073 * @opxcpttype none
6074 */
6075FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6076{
6077 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, MRW, DISOPTYPE_HARMLESS, 0);
6078 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6079 return IEMOP_RAISE_INVALID_OPCODE();
6080
6081 IEM_MC_BEGIN(3, 0);
6082 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6083 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6084 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6087 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6088 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6089 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6090 IEM_MC_END();
6091 return VINF_SUCCESS;
6092}
6093
6094
6095/**
6096 * @opmaps grp15
6097 * @opcode !11/5
6098 * @oppfx none
6099 * @opcpuid xsave
6100 * @opgroup og_system
6101 * @opxcpttype none
6102 */
6103FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6104{
6105 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, MRO, DISOPTYPE_HARMLESS, 0);
6106 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6107 return IEMOP_RAISE_INVALID_OPCODE();
6108
6109 IEM_MC_BEGIN(3, 0);
6110 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6111 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6112 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6115 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6116 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6117 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6118 IEM_MC_END();
6119 return VINF_SUCCESS;
6120}
6121
6122/** Opcode 0x0f 0xae mem/6. */
6123FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6124
6125/**
6126 * @opmaps grp15
6127 * @opcode !11/7
6128 * @oppfx none
6129 * @opcpuid clfsh
6130 * @opgroup og_cachectl
6131 * @optest op1=1 ->
6132 */
6133FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6134{
6135 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6136 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6137 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6138
6139 IEM_MC_BEGIN(2, 0);
6140 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6141 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6144 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6145 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6146 IEM_MC_END();
6147 return VINF_SUCCESS;
6148}
6149
6150/**
6151 * @opmaps grp15
6152 * @opcode !11/7
6153 * @oppfx 0x66
6154 * @opcpuid clflushopt
6155 * @opgroup og_cachectl
6156 * @optest op1=1 ->
6157 */
6158FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6159{
6160 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6161 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6162 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6163
6164 IEM_MC_BEGIN(2, 0);
6165 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6166 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6169 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6170 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6171 IEM_MC_END();
6172 return VINF_SUCCESS;
6173}
6174
6175
6176/** Opcode 0x0f 0xae 11b/5. */
6177FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6178{
6179 RT_NOREF_PV(bRm);
6180 IEMOP_MNEMONIC(lfence, "lfence");
6181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6182 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6183 return IEMOP_RAISE_INVALID_OPCODE();
6184
6185 IEM_MC_BEGIN(0, 0);
6186 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6187 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6188 else
6189 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6190 IEM_MC_ADVANCE_RIP();
6191 IEM_MC_END();
6192 return VINF_SUCCESS;
6193}
6194
6195
6196/** Opcode 0x0f 0xae 11b/6. */
6197FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6198{
6199 RT_NOREF_PV(bRm);
6200 IEMOP_MNEMONIC(mfence, "mfence");
6201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6202 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6203 return IEMOP_RAISE_INVALID_OPCODE();
6204
6205 IEM_MC_BEGIN(0, 0);
6206 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6207 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6208 else
6209 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6210 IEM_MC_ADVANCE_RIP();
6211 IEM_MC_END();
6212 return VINF_SUCCESS;
6213}
6214
6215
6216/** Opcode 0x0f 0xae 11b/7. */
6217FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6218{
6219 RT_NOREF_PV(bRm);
6220 IEMOP_MNEMONIC(sfence, "sfence");
6221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6222 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6223 return IEMOP_RAISE_INVALID_OPCODE();
6224
6225 IEM_MC_BEGIN(0, 0);
6226 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6227 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6228 else
6229 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6230 IEM_MC_ADVANCE_RIP();
6231 IEM_MC_END();
6232 return VINF_SUCCESS;
6233}
6234
6235
6236/** Opcode 0xf3 0x0f 0xae 11b/0. */
6237FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6238
6239/** Opcode 0xf3 0x0f 0xae 11b/1. */
6240FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6241
6242/** Opcode 0xf3 0x0f 0xae 11b/2. */
6243FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6244
6245/** Opcode 0xf3 0x0f 0xae 11b/3. */
6246FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6247
6248
6249/**
6250 * Group 15 jump table for register variant.
6251 */
6252IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6253{ /* pfx: none, 066h, 0f3h, 0f2h */
6254 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6255 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6256 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6257 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6258 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6259 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6260 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6261 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6262};
6263AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6264
6265
6266/**
6267 * Group 15 jump table for memory variant.
6268 */
6269IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6270{ /* pfx: none, 066h, 0f3h, 0f2h */
6271 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6272 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6273 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6274 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6275 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6276 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6277 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6278 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6279};
6280AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6281
6282
6283/** Opcode 0x0f 0xae. */
6284FNIEMOP_DEF(iemOp_Grp15)
6285{
6286 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6287 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6288 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6289 /* register, register */
6290 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6291 + pVCpu->iem.s.idxPrefix], bRm);
6292 /* memory, register */
6293 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6294 + pVCpu->iem.s.idxPrefix], bRm);
6295}
6296
6297
6298/** Opcode 0x0f 0xaf. */
6299FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6300{
6301 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6302 IEMOP_HLP_MIN_386();
6303 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6304 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6305}
6306
6307
6308/** Opcode 0x0f 0xb0. */
6309FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6310{
6311 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6312 IEMOP_HLP_MIN_486();
6313 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6314
6315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6316 {
6317 IEMOP_HLP_DONE_DECODING();
6318 IEM_MC_BEGIN(4, 0);
6319 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6320 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6321 IEM_MC_ARG(uint8_t, u8Src, 2);
6322 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6323
6324 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6325 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6326 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6327 IEM_MC_REF_EFLAGS(pEFlags);
6328 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6329 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6330 else
6331 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6332
6333 IEM_MC_ADVANCE_RIP();
6334 IEM_MC_END();
6335 }
6336 else
6337 {
6338 IEM_MC_BEGIN(4, 3);
6339 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6340 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6341 IEM_MC_ARG(uint8_t, u8Src, 2);
6342 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6344 IEM_MC_LOCAL(uint8_t, u8Al);
6345
6346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6347 IEMOP_HLP_DONE_DECODING();
6348 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6349 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6350 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6351 IEM_MC_FETCH_EFLAGS(EFlags);
6352 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6353 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6354 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6355 else
6356 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6357
6358 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6359 IEM_MC_COMMIT_EFLAGS(EFlags);
6360 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6361 IEM_MC_ADVANCE_RIP();
6362 IEM_MC_END();
6363 }
6364 return VINF_SUCCESS;
6365}
6366
6367/** Opcode 0x0f 0xb1. */
6368FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6369{
6370 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6371 IEMOP_HLP_MIN_486();
6372 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6373
6374 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6375 {
6376 IEMOP_HLP_DONE_DECODING();
6377 switch (pVCpu->iem.s.enmEffOpSize)
6378 {
6379 case IEMMODE_16BIT:
6380 IEM_MC_BEGIN(4, 0);
6381 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6382 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6383 IEM_MC_ARG(uint16_t, u16Src, 2);
6384 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6385
6386 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6387 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6388 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6389 IEM_MC_REF_EFLAGS(pEFlags);
6390 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6391 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6392 else
6393 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6394
6395 IEM_MC_ADVANCE_RIP();
6396 IEM_MC_END();
6397 return VINF_SUCCESS;
6398
6399 case IEMMODE_32BIT:
6400 IEM_MC_BEGIN(4, 0);
6401 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6402 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6403 IEM_MC_ARG(uint32_t, u32Src, 2);
6404 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6405
6406 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6407 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6408 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6409 IEM_MC_REF_EFLAGS(pEFlags);
6410 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6411 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6412 else
6413 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6414
6415 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6416 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6417 IEM_MC_ADVANCE_RIP();
6418 IEM_MC_END();
6419 return VINF_SUCCESS;
6420
6421 case IEMMODE_64BIT:
6422 IEM_MC_BEGIN(4, 0);
6423 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6424 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6425#ifdef RT_ARCH_X86
6426 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6427#else
6428 IEM_MC_ARG(uint64_t, u64Src, 2);
6429#endif
6430 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6431
6432 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6433 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6434 IEM_MC_REF_EFLAGS(pEFlags);
6435#ifdef RT_ARCH_X86
6436 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6437 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6438 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6439 else
6440 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6441#else
6442 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6443 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6444 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6445 else
6446 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6447#endif
6448
6449 IEM_MC_ADVANCE_RIP();
6450 IEM_MC_END();
6451 return VINF_SUCCESS;
6452
6453 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6454 }
6455 }
6456 else
6457 {
6458 switch (pVCpu->iem.s.enmEffOpSize)
6459 {
6460 case IEMMODE_16BIT:
6461 IEM_MC_BEGIN(4, 3);
6462 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6463 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6464 IEM_MC_ARG(uint16_t, u16Src, 2);
6465 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6467 IEM_MC_LOCAL(uint16_t, u16Ax);
6468
6469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6470 IEMOP_HLP_DONE_DECODING();
6471 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6472 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6473 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6474 IEM_MC_FETCH_EFLAGS(EFlags);
6475 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6476 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6477 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6478 else
6479 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6480
6481 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6482 IEM_MC_COMMIT_EFLAGS(EFlags);
6483 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6484 IEM_MC_ADVANCE_RIP();
6485 IEM_MC_END();
6486 return VINF_SUCCESS;
6487
6488 case IEMMODE_32BIT:
6489 IEM_MC_BEGIN(4, 3);
6490 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6491 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6492 IEM_MC_ARG(uint32_t, u32Src, 2);
6493 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6495 IEM_MC_LOCAL(uint32_t, u32Eax);
6496
6497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6498 IEMOP_HLP_DONE_DECODING();
6499 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6500 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6501 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6502 IEM_MC_FETCH_EFLAGS(EFlags);
6503 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6504 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6505 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6506 else
6507 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6508
6509 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6510 IEM_MC_COMMIT_EFLAGS(EFlags);
6511 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6512 IEM_MC_ADVANCE_RIP();
6513 IEM_MC_END();
6514 return VINF_SUCCESS;
6515
6516 case IEMMODE_64BIT:
6517 IEM_MC_BEGIN(4, 3);
6518 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6519 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6520#ifdef RT_ARCH_X86
6521 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6522#else
6523 IEM_MC_ARG(uint64_t, u64Src, 2);
6524#endif
6525 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6527 IEM_MC_LOCAL(uint64_t, u64Rax);
6528
6529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6530 IEMOP_HLP_DONE_DECODING();
6531 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6532 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6533 IEM_MC_FETCH_EFLAGS(EFlags);
6534 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6535#ifdef RT_ARCH_X86
6536 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6537 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6538 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6539 else
6540 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6541#else
6542 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6543 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6544 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6545 else
6546 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6547#endif
6548
6549 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6550 IEM_MC_COMMIT_EFLAGS(EFlags);
6551 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6552 IEM_MC_ADVANCE_RIP();
6553 IEM_MC_END();
6554 return VINF_SUCCESS;
6555
6556 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6557 }
6558 }
6559}
6560
6561
6562FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6563{
6564 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6565 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6566
6567 switch (pVCpu->iem.s.enmEffOpSize)
6568 {
6569 case IEMMODE_16BIT:
6570 IEM_MC_BEGIN(5, 1);
6571 IEM_MC_ARG(uint16_t, uSel, 0);
6572 IEM_MC_ARG(uint16_t, offSeg, 1);
6573 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6574 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6575 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6576 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6579 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6580 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6581 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6582 IEM_MC_END();
6583 return VINF_SUCCESS;
6584
6585 case IEMMODE_32BIT:
6586 IEM_MC_BEGIN(5, 1);
6587 IEM_MC_ARG(uint16_t, uSel, 0);
6588 IEM_MC_ARG(uint32_t, offSeg, 1);
6589 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6590 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6591 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6592 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6595 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6596 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6597 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6598 IEM_MC_END();
6599 return VINF_SUCCESS;
6600
6601 case IEMMODE_64BIT:
6602 IEM_MC_BEGIN(5, 1);
6603 IEM_MC_ARG(uint16_t, uSel, 0);
6604 IEM_MC_ARG(uint64_t, offSeg, 1);
6605 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6606 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6607 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6608 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6611 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6612 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6613 else
6614 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6615 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6616 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6617 IEM_MC_END();
6618 return VINF_SUCCESS;
6619
6620 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6621 }
6622}
6623
6624
6625/** Opcode 0x0f 0xb2. */
6626FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6627{
6628 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6629 IEMOP_HLP_MIN_386();
6630 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6631 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6632 return IEMOP_RAISE_INVALID_OPCODE();
6633 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6634}
6635
6636
6637/** Opcode 0x0f 0xb3. */
6638FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6639{
6640 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6641 IEMOP_HLP_MIN_386();
6642 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6643}
6644
6645
6646/** Opcode 0x0f 0xb4. */
6647FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6648{
6649 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6650 IEMOP_HLP_MIN_386();
6651 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6652 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6653 return IEMOP_RAISE_INVALID_OPCODE();
6654 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6655}
6656
6657
6658/** Opcode 0x0f 0xb5. */
6659FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6660{
6661 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6662 IEMOP_HLP_MIN_386();
6663 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6664 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6665 return IEMOP_RAISE_INVALID_OPCODE();
6666 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6667}
6668
6669
6670/** Opcode 0x0f 0xb6. */
6671FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6672{
6673 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6674 IEMOP_HLP_MIN_386();
6675
6676 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6677
6678 /*
6679 * If rm is denoting a register, no more instruction bytes.
6680 */
6681 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6682 {
6683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6684 switch (pVCpu->iem.s.enmEffOpSize)
6685 {
6686 case IEMMODE_16BIT:
6687 IEM_MC_BEGIN(0, 1);
6688 IEM_MC_LOCAL(uint16_t, u16Value);
6689 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6690 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6691 IEM_MC_ADVANCE_RIP();
6692 IEM_MC_END();
6693 return VINF_SUCCESS;
6694
6695 case IEMMODE_32BIT:
6696 IEM_MC_BEGIN(0, 1);
6697 IEM_MC_LOCAL(uint32_t, u32Value);
6698 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6699 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6700 IEM_MC_ADVANCE_RIP();
6701 IEM_MC_END();
6702 return VINF_SUCCESS;
6703
6704 case IEMMODE_64BIT:
6705 IEM_MC_BEGIN(0, 1);
6706 IEM_MC_LOCAL(uint64_t, u64Value);
6707 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6708 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6709 IEM_MC_ADVANCE_RIP();
6710 IEM_MC_END();
6711 return VINF_SUCCESS;
6712
6713 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6714 }
6715 }
6716 else
6717 {
6718 /*
6719 * We're loading a register from memory.
6720 */
6721 switch (pVCpu->iem.s.enmEffOpSize)
6722 {
6723 case IEMMODE_16BIT:
6724 IEM_MC_BEGIN(0, 2);
6725 IEM_MC_LOCAL(uint16_t, u16Value);
6726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6729 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6730 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6731 IEM_MC_ADVANCE_RIP();
6732 IEM_MC_END();
6733 return VINF_SUCCESS;
6734
6735 case IEMMODE_32BIT:
6736 IEM_MC_BEGIN(0, 2);
6737 IEM_MC_LOCAL(uint32_t, u32Value);
6738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6741 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6742 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6743 IEM_MC_ADVANCE_RIP();
6744 IEM_MC_END();
6745 return VINF_SUCCESS;
6746
6747 case IEMMODE_64BIT:
6748 IEM_MC_BEGIN(0, 2);
6749 IEM_MC_LOCAL(uint64_t, u64Value);
6750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6753 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6754 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6755 IEM_MC_ADVANCE_RIP();
6756 IEM_MC_END();
6757 return VINF_SUCCESS;
6758
6759 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6760 }
6761 }
6762}
6763
6764
6765/** Opcode 0x0f 0xb7. */
6766FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6767{
6768 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6769 IEMOP_HLP_MIN_386();
6770
6771 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6772
6773 /** @todo Not entirely sure how the operand size prefix is handled here,
6774 * assuming that it will be ignored. Would be nice to have a few
6775 * test for this. */
6776 /*
6777 * If rm is denoting a register, no more instruction bytes.
6778 */
6779 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6780 {
6781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6782 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6783 {
6784 IEM_MC_BEGIN(0, 1);
6785 IEM_MC_LOCAL(uint32_t, u32Value);
6786 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6787 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6788 IEM_MC_ADVANCE_RIP();
6789 IEM_MC_END();
6790 }
6791 else
6792 {
6793 IEM_MC_BEGIN(0, 1);
6794 IEM_MC_LOCAL(uint64_t, u64Value);
6795 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6796 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6797 IEM_MC_ADVANCE_RIP();
6798 IEM_MC_END();
6799 }
6800 }
6801 else
6802 {
6803 /*
6804 * We're loading a register from memory.
6805 */
6806 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6807 {
6808 IEM_MC_BEGIN(0, 2);
6809 IEM_MC_LOCAL(uint32_t, u32Value);
6810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6813 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6814 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6815 IEM_MC_ADVANCE_RIP();
6816 IEM_MC_END();
6817 }
6818 else
6819 {
6820 IEM_MC_BEGIN(0, 2);
6821 IEM_MC_LOCAL(uint64_t, u64Value);
6822 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6825 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6826 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6827 IEM_MC_ADVANCE_RIP();
6828 IEM_MC_END();
6829 }
6830 }
6831 return VINF_SUCCESS;
6832}
6833
6834
6835/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6836FNIEMOP_UD_STUB(iemOp_jmpe);
6837/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6838FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6839
6840
6841/**
6842 * @opcode 0xb9
6843 * @opinvalid intel-modrm
6844 * @optest ->
6845 */
6846FNIEMOP_DEF(iemOp_Grp10)
6847{
6848 /*
6849 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
6850 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
6851 */
6852 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
6853 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZE); /* just picked Gb,Eb here. */
6854 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
6855}
6856
6857
6858/** Opcode 0x0f 0xba. */
6859FNIEMOP_DEF(iemOp_Grp8)
6860{
6861 IEMOP_HLP_MIN_386();
6862 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6863 PCIEMOPBINSIZES pImpl;
6864 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6865 {
6866 case 0: case 1: case 2: case 3:
6867 /* Both AMD and Intel want full modr/m decoding and imm8. */
6868 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
6869 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6870 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6871 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6872 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6873 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6874 }
6875 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6876
6877 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6878 {
6879 /* register destination. */
6880 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6882
6883 switch (pVCpu->iem.s.enmEffOpSize)
6884 {
6885 case IEMMODE_16BIT:
6886 IEM_MC_BEGIN(3, 0);
6887 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6888 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6889 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6890
6891 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6892 IEM_MC_REF_EFLAGS(pEFlags);
6893 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6894
6895 IEM_MC_ADVANCE_RIP();
6896 IEM_MC_END();
6897 return VINF_SUCCESS;
6898
6899 case IEMMODE_32BIT:
6900 IEM_MC_BEGIN(3, 0);
6901 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6902 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6903 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6904
6905 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6906 IEM_MC_REF_EFLAGS(pEFlags);
6907 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6908
6909 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6910 IEM_MC_ADVANCE_RIP();
6911 IEM_MC_END();
6912 return VINF_SUCCESS;
6913
6914 case IEMMODE_64BIT:
6915 IEM_MC_BEGIN(3, 0);
6916 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6917 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6918 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6919
6920 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6921 IEM_MC_REF_EFLAGS(pEFlags);
6922 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6923
6924 IEM_MC_ADVANCE_RIP();
6925 IEM_MC_END();
6926 return VINF_SUCCESS;
6927
6928 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6929 }
6930 }
6931 else
6932 {
6933 /* memory destination. */
6934
6935 uint32_t fAccess;
6936 if (pImpl->pfnLockedU16)
6937 fAccess = IEM_ACCESS_DATA_RW;
6938 else /* BT */
6939 fAccess = IEM_ACCESS_DATA_R;
6940
6941 /** @todo test negative bit offsets! */
6942 switch (pVCpu->iem.s.enmEffOpSize)
6943 {
6944 case IEMMODE_16BIT:
6945 IEM_MC_BEGIN(3, 1);
6946 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6947 IEM_MC_ARG(uint16_t, u16Src, 1);
6948 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6950
6951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6952 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6953 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6954 if (pImpl->pfnLockedU16)
6955 IEMOP_HLP_DONE_DECODING();
6956 else
6957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6958 IEM_MC_FETCH_EFLAGS(EFlags);
6959 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6960 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6961 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6962 else
6963 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6964 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6965
6966 IEM_MC_COMMIT_EFLAGS(EFlags);
6967 IEM_MC_ADVANCE_RIP();
6968 IEM_MC_END();
6969 return VINF_SUCCESS;
6970
6971 case IEMMODE_32BIT:
6972 IEM_MC_BEGIN(3, 1);
6973 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6974 IEM_MC_ARG(uint32_t, u32Src, 1);
6975 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6977
6978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6979 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6980 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6981 if (pImpl->pfnLockedU16)
6982 IEMOP_HLP_DONE_DECODING();
6983 else
6984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6985 IEM_MC_FETCH_EFLAGS(EFlags);
6986 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6987 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6988 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6989 else
6990 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6991 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6992
6993 IEM_MC_COMMIT_EFLAGS(EFlags);
6994 IEM_MC_ADVANCE_RIP();
6995 IEM_MC_END();
6996 return VINF_SUCCESS;
6997
6998 case IEMMODE_64BIT:
6999 IEM_MC_BEGIN(3, 1);
7000 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7001 IEM_MC_ARG(uint64_t, u64Src, 1);
7002 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7004
7005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7006 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7007 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7008 if (pImpl->pfnLockedU16)
7009 IEMOP_HLP_DONE_DECODING();
7010 else
7011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7012 IEM_MC_FETCH_EFLAGS(EFlags);
7013 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7014 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7015 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7016 else
7017 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7018 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7019
7020 IEM_MC_COMMIT_EFLAGS(EFlags);
7021 IEM_MC_ADVANCE_RIP();
7022 IEM_MC_END();
7023 return VINF_SUCCESS;
7024
7025 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7026 }
7027 }
7028}
7029
7030
7031/** Opcode 0x0f 0xbb. */
7032FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7033{
7034 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7035 IEMOP_HLP_MIN_386();
7036 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7037}
7038
7039
7040/** Opcode 0x0f 0xbc. */
7041FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7042{
7043 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7044 IEMOP_HLP_MIN_386();
7045 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7046 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7047}
7048
7049
7050/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7051FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7052
7053
7054/** Opcode 0x0f 0xbd. */
7055FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7056{
7057 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7058 IEMOP_HLP_MIN_386();
7059 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7060 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7061}
7062
7063
7064/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7065FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7066
7067
7068/** Opcode 0x0f 0xbe. */
7069FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7070{
7071 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7072 IEMOP_HLP_MIN_386();
7073
7074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7075
7076 /*
7077 * If rm is denoting a register, no more instruction bytes.
7078 */
7079 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7080 {
7081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7082 switch (pVCpu->iem.s.enmEffOpSize)
7083 {
7084 case IEMMODE_16BIT:
7085 IEM_MC_BEGIN(0, 1);
7086 IEM_MC_LOCAL(uint16_t, u16Value);
7087 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7088 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7089 IEM_MC_ADVANCE_RIP();
7090 IEM_MC_END();
7091 return VINF_SUCCESS;
7092
7093 case IEMMODE_32BIT:
7094 IEM_MC_BEGIN(0, 1);
7095 IEM_MC_LOCAL(uint32_t, u32Value);
7096 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7097 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7098 IEM_MC_ADVANCE_RIP();
7099 IEM_MC_END();
7100 return VINF_SUCCESS;
7101
7102 case IEMMODE_64BIT:
7103 IEM_MC_BEGIN(0, 1);
7104 IEM_MC_LOCAL(uint64_t, u64Value);
7105 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7106 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7107 IEM_MC_ADVANCE_RIP();
7108 IEM_MC_END();
7109 return VINF_SUCCESS;
7110
7111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7112 }
7113 }
7114 else
7115 {
7116 /*
7117 * We're loading a register from memory.
7118 */
7119 switch (pVCpu->iem.s.enmEffOpSize)
7120 {
7121 case IEMMODE_16BIT:
7122 IEM_MC_BEGIN(0, 2);
7123 IEM_MC_LOCAL(uint16_t, u16Value);
7124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7127 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7128 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7129 IEM_MC_ADVANCE_RIP();
7130 IEM_MC_END();
7131 return VINF_SUCCESS;
7132
7133 case IEMMODE_32BIT:
7134 IEM_MC_BEGIN(0, 2);
7135 IEM_MC_LOCAL(uint32_t, u32Value);
7136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7139 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7140 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7141 IEM_MC_ADVANCE_RIP();
7142 IEM_MC_END();
7143 return VINF_SUCCESS;
7144
7145 case IEMMODE_64BIT:
7146 IEM_MC_BEGIN(0, 2);
7147 IEM_MC_LOCAL(uint64_t, u64Value);
7148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7151 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7152 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7153 IEM_MC_ADVANCE_RIP();
7154 IEM_MC_END();
7155 return VINF_SUCCESS;
7156
7157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7158 }
7159 }
7160}
7161
7162
7163/** Opcode 0x0f 0xbf. */
7164FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7165{
7166 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7167 IEMOP_HLP_MIN_386();
7168
7169 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7170
7171 /** @todo Not entirely sure how the operand size prefix is handled here,
7172 * assuming that it will be ignored. Would be nice to have a few
7173 * test for this. */
7174 /*
7175 * If rm is denoting a register, no more instruction bytes.
7176 */
7177 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7178 {
7179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7180 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7181 {
7182 IEM_MC_BEGIN(0, 1);
7183 IEM_MC_LOCAL(uint32_t, u32Value);
7184 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7185 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7186 IEM_MC_ADVANCE_RIP();
7187 IEM_MC_END();
7188 }
7189 else
7190 {
7191 IEM_MC_BEGIN(0, 1);
7192 IEM_MC_LOCAL(uint64_t, u64Value);
7193 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7194 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7195 IEM_MC_ADVANCE_RIP();
7196 IEM_MC_END();
7197 }
7198 }
7199 else
7200 {
7201 /*
7202 * We're loading a register from memory.
7203 */
7204 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7205 {
7206 IEM_MC_BEGIN(0, 2);
7207 IEM_MC_LOCAL(uint32_t, u32Value);
7208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7211 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7212 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7213 IEM_MC_ADVANCE_RIP();
7214 IEM_MC_END();
7215 }
7216 else
7217 {
7218 IEM_MC_BEGIN(0, 2);
7219 IEM_MC_LOCAL(uint64_t, u64Value);
7220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7223 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7224 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7225 IEM_MC_ADVANCE_RIP();
7226 IEM_MC_END();
7227 }
7228 }
7229 return VINF_SUCCESS;
7230}
7231
7232
7233/** Opcode 0x0f 0xc0. */
7234FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7235{
7236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7237 IEMOP_HLP_MIN_486();
7238 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7239
7240 /*
7241 * If rm is denoting a register, no more instruction bytes.
7242 */
7243 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7244 {
7245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7246
7247 IEM_MC_BEGIN(3, 0);
7248 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7249 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7250 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7251
7252 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7253 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7254 IEM_MC_REF_EFLAGS(pEFlags);
7255 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7256
7257 IEM_MC_ADVANCE_RIP();
7258 IEM_MC_END();
7259 }
7260 else
7261 {
7262 /*
7263 * We're accessing memory.
7264 */
7265 IEM_MC_BEGIN(3, 3);
7266 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7267 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7268 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7269 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7271
7272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7273 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7274 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7275 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7276 IEM_MC_FETCH_EFLAGS(EFlags);
7277 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7278 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7279 else
7280 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7281
7282 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7283 IEM_MC_COMMIT_EFLAGS(EFlags);
7284 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7285 IEM_MC_ADVANCE_RIP();
7286 IEM_MC_END();
7287 return VINF_SUCCESS;
7288 }
7289 return VINF_SUCCESS;
7290}
7291
7292
7293/** Opcode 0x0f 0xc1. */
7294FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7295{
7296 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7297 IEMOP_HLP_MIN_486();
7298 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7299
7300 /*
7301 * If rm is denoting a register, no more instruction bytes.
7302 */
7303 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7304 {
7305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7306
7307 switch (pVCpu->iem.s.enmEffOpSize)
7308 {
7309 case IEMMODE_16BIT:
7310 IEM_MC_BEGIN(3, 0);
7311 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7312 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7313 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7314
7315 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7316 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7317 IEM_MC_REF_EFLAGS(pEFlags);
7318 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7319
7320 IEM_MC_ADVANCE_RIP();
7321 IEM_MC_END();
7322 return VINF_SUCCESS;
7323
7324 case IEMMODE_32BIT:
7325 IEM_MC_BEGIN(3, 0);
7326 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7327 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7328 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7329
7330 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7331 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7332 IEM_MC_REF_EFLAGS(pEFlags);
7333 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7334
7335 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7336 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7337 IEM_MC_ADVANCE_RIP();
7338 IEM_MC_END();
7339 return VINF_SUCCESS;
7340
7341 case IEMMODE_64BIT:
7342 IEM_MC_BEGIN(3, 0);
7343 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7344 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7345 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7346
7347 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7348 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7349 IEM_MC_REF_EFLAGS(pEFlags);
7350 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7351
7352 IEM_MC_ADVANCE_RIP();
7353 IEM_MC_END();
7354 return VINF_SUCCESS;
7355
7356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7357 }
7358 }
7359 else
7360 {
7361 /*
7362 * We're accessing memory.
7363 */
7364 switch (pVCpu->iem.s.enmEffOpSize)
7365 {
7366 case IEMMODE_16BIT:
7367 IEM_MC_BEGIN(3, 3);
7368 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7369 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7370 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7371 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7373
7374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7375 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7376 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7377 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7378 IEM_MC_FETCH_EFLAGS(EFlags);
7379 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7380 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7381 else
7382 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7383
7384 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7385 IEM_MC_COMMIT_EFLAGS(EFlags);
7386 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7387 IEM_MC_ADVANCE_RIP();
7388 IEM_MC_END();
7389 return VINF_SUCCESS;
7390
7391 case IEMMODE_32BIT:
7392 IEM_MC_BEGIN(3, 3);
7393 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7394 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7395 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7396 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7398
7399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7400 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7401 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7402 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7403 IEM_MC_FETCH_EFLAGS(EFlags);
7404 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7405 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7406 else
7407 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7408
7409 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7410 IEM_MC_COMMIT_EFLAGS(EFlags);
7411 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7412 IEM_MC_ADVANCE_RIP();
7413 IEM_MC_END();
7414 return VINF_SUCCESS;
7415
7416 case IEMMODE_64BIT:
7417 IEM_MC_BEGIN(3, 3);
7418 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7419 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7420 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7421 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7423
7424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7425 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7426 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7427 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7428 IEM_MC_FETCH_EFLAGS(EFlags);
7429 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7430 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7431 else
7432 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7433
7434 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7435 IEM_MC_COMMIT_EFLAGS(EFlags);
7436 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7437 IEM_MC_ADVANCE_RIP();
7438 IEM_MC_END();
7439 return VINF_SUCCESS;
7440
7441 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7442 }
7443 }
7444}
7445
7446
7447/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
7448FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
7449/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
7450FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
7451/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
7452FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
7453/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
7454FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
7455
7456
7457/** Opcode 0x0f 0xc3. */
7458FNIEMOP_DEF(iemOp_movnti_My_Gy)
7459{
7460 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7461
7462 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7463
7464 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7465 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7466 {
7467 switch (pVCpu->iem.s.enmEffOpSize)
7468 {
7469 case IEMMODE_32BIT:
7470 IEM_MC_BEGIN(0, 2);
7471 IEM_MC_LOCAL(uint32_t, u32Value);
7472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7473
7474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7476 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7477 return IEMOP_RAISE_INVALID_OPCODE();
7478
7479 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7480 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7481 IEM_MC_ADVANCE_RIP();
7482 IEM_MC_END();
7483 break;
7484
7485 case IEMMODE_64BIT:
7486 IEM_MC_BEGIN(0, 2);
7487 IEM_MC_LOCAL(uint64_t, u64Value);
7488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7489
7490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7492 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7493 return IEMOP_RAISE_INVALID_OPCODE();
7494
7495 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7496 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7497 IEM_MC_ADVANCE_RIP();
7498 IEM_MC_END();
7499 break;
7500
7501 case IEMMODE_16BIT:
7502 /** @todo check this form. */
7503 return IEMOP_RAISE_INVALID_OPCODE();
7504 }
7505 }
7506 else
7507 return IEMOP_RAISE_INVALID_OPCODE();
7508 return VINF_SUCCESS;
7509}
7510/* Opcode 0x66 0x0f 0xc3 - invalid */
7511/* Opcode 0xf3 0x0f 0xc3 - invalid */
7512/* Opcode 0xf2 0x0f 0xc3 - invalid */
7513
7514/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
7515FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7516/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
7517FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
7518/* Opcode 0xf3 0x0f 0xc4 - invalid */
7519/* Opcode 0xf2 0x0f 0xc4 - invalid */
7520
7521/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7522FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7523/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
7524FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
7525/* Opcode 0xf3 0x0f 0xc5 - invalid */
7526/* Opcode 0xf2 0x0f 0xc5 - invalid */
7527
7528/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
7529FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
7530/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
7531FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
7532/* Opcode 0xf3 0x0f 0xc6 - invalid */
7533/* Opcode 0xf2 0x0f 0xc6 - invalid */
7534
7535
7536/** Opcode 0x0f 0xc7 !11/1. */
7537FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7538{
7539 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7540
7541 IEM_MC_BEGIN(4, 3);
7542 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7543 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7544 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7545 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7546 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7547 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7549
7550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7551 IEMOP_HLP_DONE_DECODING();
7552 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7553
7554 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7555 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7556 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7557
7558 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7559 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7560 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7561
7562 IEM_MC_FETCH_EFLAGS(EFlags);
7563 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7564 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7565 else
7566 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7567
7568 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7569 IEM_MC_COMMIT_EFLAGS(EFlags);
7570 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7571 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7572 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7573 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7574 IEM_MC_ENDIF();
7575 IEM_MC_ADVANCE_RIP();
7576
7577 IEM_MC_END();
7578 return VINF_SUCCESS;
7579}
7580
7581
7582/** Opcode REX.W 0x0f 0xc7 !11/1. */
7583FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7584{
7585 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7586 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7587 {
7588#if 0
7589 RT_NOREF(bRm);
7590 IEMOP_BITCH_ABOUT_STUB();
7591 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7592#else
7593 IEM_MC_BEGIN(4, 3);
7594 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7595 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7596 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7597 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7598 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7599 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7601
7602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7603 IEMOP_HLP_DONE_DECODING();
7604 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7605 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7606
7607 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7608 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7609 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7610
7611 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7612 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7613 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7614
7615 IEM_MC_FETCH_EFLAGS(EFlags);
7616# ifdef RT_ARCH_AMD64
7617 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7618 {
7619 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7620 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7621 else
7622 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7623 }
7624 else
7625# endif
7626 {
7627 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7628 accesses and not all all atomic, which works fine on in UNI CPU guest
7629 configuration (ignoring DMA). If guest SMP is active we have no choice
7630 but to use a rendezvous callback here. Sigh. */
7631 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7632 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7633 else
7634 {
7635 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7636 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7637 }
7638 }
7639
7640 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7641 IEM_MC_COMMIT_EFLAGS(EFlags);
7642 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7643 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7644 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7645 IEM_MC_ENDIF();
7646 IEM_MC_ADVANCE_RIP();
7647
7648 IEM_MC_END();
7649 return VINF_SUCCESS;
7650#endif
7651 }
7652 Log(("cmpxchg16b -> #UD\n"));
7653 return IEMOP_RAISE_INVALID_OPCODE();
7654}
7655
7656FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
7657{
7658 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7659 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7660 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7661}
7662
7663/** Opcode 0x0f 0xc7 11/6. */
7664FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7665
7666/** Opcode 0x0f 0xc7 !11/6. */
7667FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7668
7669/** Opcode 0x66 0x0f 0xc7 !11/6. */
7670FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7671
7672/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7673FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7674
7675/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7676FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7677
7678/** Opcode 0x0f 0xc7 11/7. */
7679FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
7680
7681
7682/**
7683 * Group 9 jump table for register variant.
7684 */
7685IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
7686{ /* pfx: none, 066h, 0f3h, 0f2h */
7687 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7688 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
7689 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7690 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7691 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7692 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7693 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7694 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7695};
7696AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
7697
7698
7699/**
7700 * Group 9 jump table for memory variant.
7701 */
7702IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
7703{ /* pfx: none, 066h, 0f3h, 0f2h */
7704 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7705 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
7706 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7707 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7708 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7709 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7710 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
7711 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7712};
7713AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
7714
7715
7716/** Opcode 0x0f 0xc7. */
7717FNIEMOP_DEF(iemOp_Grp9)
7718{
7719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7721 /* register, register */
7722 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7723 + pVCpu->iem.s.idxPrefix], bRm);
7724 /* memory, register */
7725 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7726 + pVCpu->iem.s.idxPrefix], bRm);
7727}
7728
7729
7730/**
7731 * Common 'bswap register' helper.
7732 */
7733FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7734{
7735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7736 switch (pVCpu->iem.s.enmEffOpSize)
7737 {
7738 case IEMMODE_16BIT:
7739 IEM_MC_BEGIN(1, 0);
7740 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7741 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7742 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7743 IEM_MC_ADVANCE_RIP();
7744 IEM_MC_END();
7745 return VINF_SUCCESS;
7746
7747 case IEMMODE_32BIT:
7748 IEM_MC_BEGIN(1, 0);
7749 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7750 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7751 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7752 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7753 IEM_MC_ADVANCE_RIP();
7754 IEM_MC_END();
7755 return VINF_SUCCESS;
7756
7757 case IEMMODE_64BIT:
7758 IEM_MC_BEGIN(1, 0);
7759 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7760 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7761 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7762 IEM_MC_ADVANCE_RIP();
7763 IEM_MC_END();
7764 return VINF_SUCCESS;
7765
7766 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7767 }
7768}
7769
7770
7771/** Opcode 0x0f 0xc8. */
7772FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7773{
7774 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7775 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7776 prefix. REX.B is the correct prefix it appears. For a parallel
7777 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7778 IEMOP_HLP_MIN_486();
7779 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7780}
7781
7782
7783/** Opcode 0x0f 0xc9. */
7784FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7785{
7786 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7787 IEMOP_HLP_MIN_486();
7788 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7789}
7790
7791
7792/** Opcode 0x0f 0xca. */
7793FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7794{
7795 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7796 IEMOP_HLP_MIN_486();
7797 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7798}
7799
7800
7801/** Opcode 0x0f 0xcb. */
7802FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7803{
7804 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7805 IEMOP_HLP_MIN_486();
7806 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7807}
7808
7809
7810/** Opcode 0x0f 0xcc. */
7811FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7812{
7813 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7814 IEMOP_HLP_MIN_486();
7815 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7816}
7817
7818
7819/** Opcode 0x0f 0xcd. */
7820FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7821{
7822 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7823 IEMOP_HLP_MIN_486();
7824 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7825}
7826
7827
7828/** Opcode 0x0f 0xce. */
7829FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7830{
7831 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7832 IEMOP_HLP_MIN_486();
7833 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7834}
7835
7836
7837/** Opcode 0x0f 0xcf. */
7838FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7839{
7840 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7841 IEMOP_HLP_MIN_486();
7842 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7843}
7844
7845
7846/* Opcode 0x0f 0xd0 - invalid */
7847/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
7848FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
7849/* Opcode 0xf3 0x0f 0xd0 - invalid */
7850/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
7851FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
7852
7853/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7854FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7855/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
7856FNIEMOP_STUB(iemOp_psrlw_Vx_W);
7857/* Opcode 0xf3 0x0f 0xd1 - invalid */
7858/* Opcode 0xf2 0x0f 0xd1 - invalid */
7859
7860/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7861FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7862/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
7863FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
7864/* Opcode 0xf3 0x0f 0xd2 - invalid */
7865/* Opcode 0xf2 0x0f 0xd2 - invalid */
7866
7867/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7868FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7869/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
7870FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
7871/* Opcode 0xf3 0x0f 0xd3 - invalid */
7872/* Opcode 0xf2 0x0f 0xd3 - invalid */
7873
7874/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7875FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7876/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
7877FNIEMOP_STUB(iemOp_paddq_Vx_W);
7878/* Opcode 0xf3 0x0f 0xd4 - invalid */
7879/* Opcode 0xf2 0x0f 0xd4 - invalid */
7880
7881/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7882FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7883/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
7884FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
7885/* Opcode 0xf3 0x0f 0xd5 - invalid */
7886/* Opcode 0xf2 0x0f 0xd5 - invalid */
7887
7888/* Opcode 0x0f 0xd6 - invalid */
7889
7890/**
7891 * @opcode 0xd6
7892 * @oppfx 0x66
7893 * @opcpuid sse2
7894 * @opgroup og_sse2_pcksclr_datamove
7895 * @opxcpttype none
7896 * @optest op1=-1 op2=2 -> op1=2
7897 * @optest op1=0 op2=-42 -> op1=-42
7898 */
7899FNIEMOP_DEF(iemOp_movq_Wq_Vq)
7900{
7901 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
7902 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7903 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7904 {
7905 /*
7906 * Register, register.
7907 */
7908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7909 IEM_MC_BEGIN(0, 2);
7910 IEM_MC_LOCAL(uint64_t, uSrc);
7911
7912 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7913 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7914
7915 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7916 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
7917
7918 IEM_MC_ADVANCE_RIP();
7919 IEM_MC_END();
7920 }
7921 else
7922 {
7923 /*
7924 * Memory, register.
7925 */
7926 IEM_MC_BEGIN(0, 2);
7927 IEM_MC_LOCAL(uint64_t, uSrc);
7928 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7929
7930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7932 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7933 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7934
7935 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7936 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7937
7938 IEM_MC_ADVANCE_RIP();
7939 IEM_MC_END();
7940 }
7941 return VINF_SUCCESS;
7942}
7943
7944
7945/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7946FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7947/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7948FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7949#if 0
7950FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7951{
7952 /* Docs says register only. */
7953 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7954
7955 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7956 {
7957 case IEM_OP_PRF_SIZE_OP: /* SSE */
7958 I E M O P _ M N E M O N I C(movq_Wq_Vq, "movq Wq,Vq");
7959 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7960 IEM_MC_BEGIN(2, 0);
7961 IEM_MC_ARG(uint64_t *, pDst, 0);
7962 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
7963 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7964 IEM_MC_PREPARE_SSE_USAGE();
7965 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7966 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7967 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7968 IEM_MC_ADVANCE_RIP();
7969 IEM_MC_END();
7970 return VINF_SUCCESS;
7971
7972 case 0: /* MMX */
7973 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7974 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7975 IEM_MC_BEGIN(2, 0);
7976 IEM_MC_ARG(uint64_t *, pDst, 0);
7977 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7978 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7979 IEM_MC_PREPARE_FPU_USAGE();
7980 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7981 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7982 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7983 IEM_MC_ADVANCE_RIP();
7984 IEM_MC_END();
7985 return VINF_SUCCESS;
7986
7987 default:
7988 return IEMOP_RAISE_INVALID_OPCODE();
7989 }
7990}
7991#endif
7992
7993
7994/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7995FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7996{
7997 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7998 /** @todo testcase: Check that the instruction implicitly clears the high
7999 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8000 * and opcode modifications are made to work with the whole width (not
8001 * just 128). */
8002 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8003 /* Docs says register only. */
8004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8006 {
8007 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8008 IEM_MC_BEGIN(2, 0);
8009 IEM_MC_ARG(uint64_t *, pDst, 0);
8010 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8011 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8012 IEM_MC_PREPARE_FPU_USAGE();
8013 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8014 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8015 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8016 IEM_MC_ADVANCE_RIP();
8017 IEM_MC_END();
8018 return VINF_SUCCESS;
8019 }
8020 return IEMOP_RAISE_INVALID_OPCODE();
8021}
8022
8023/** Opcode 0x66 0x0f 0xd7 - */
8024FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8025{
8026 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8027 /** @todo testcase: Check that the instruction implicitly clears the high
8028 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8029 * and opcode modifications are made to work with the whole width (not
8030 * just 128). */
8031 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8032 /* Docs says register only. */
8033 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8034 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8035 {
8036 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8037 IEM_MC_BEGIN(2, 0);
8038 IEM_MC_ARG(uint64_t *, pDst, 0);
8039 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8040 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8041 IEM_MC_PREPARE_SSE_USAGE();
8042 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8043 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8044 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8045 IEM_MC_ADVANCE_RIP();
8046 IEM_MC_END();
8047 return VINF_SUCCESS;
8048 }
8049 return IEMOP_RAISE_INVALID_OPCODE();
8050}
8051
8052/* Opcode 0xf3 0x0f 0xd7 - invalid */
8053/* Opcode 0xf2 0x0f 0xd7 - invalid */
8054
8055
8056/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8057FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8058/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8059FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8060/* Opcode 0xf3 0x0f 0xd8 - invalid */
8061/* Opcode 0xf2 0x0f 0xd8 - invalid */
8062
8063/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8064FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8065/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8066FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8067/* Opcode 0xf3 0x0f 0xd9 - invalid */
8068/* Opcode 0xf2 0x0f 0xd9 - invalid */
8069
8070/** Opcode 0x0f 0xda - pminub Pq, Qq */
8071FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8072/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8073FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8074/* Opcode 0xf3 0x0f 0xda - invalid */
8075/* Opcode 0xf2 0x0f 0xda - invalid */
8076
8077/** Opcode 0x0f 0xdb - pand Pq, Qq */
8078FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8079/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8080FNIEMOP_STUB(iemOp_pand_Vx_W);
8081/* Opcode 0xf3 0x0f 0xdb - invalid */
8082/* Opcode 0xf2 0x0f 0xdb - invalid */
8083
8084/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8085FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8086/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8087FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8088/* Opcode 0xf3 0x0f 0xdc - invalid */
8089/* Opcode 0xf2 0x0f 0xdc - invalid */
8090
8091/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8092FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8093/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8094FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8095/* Opcode 0xf3 0x0f 0xdd - invalid */
8096/* Opcode 0xf2 0x0f 0xdd - invalid */
8097
8098/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8099FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8100/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8101FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8102/* Opcode 0xf3 0x0f 0xde - invalid */
8103/* Opcode 0xf2 0x0f 0xde - invalid */
8104
8105/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8106FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8107/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8108FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8109/* Opcode 0xf3 0x0f 0xdf - invalid */
8110/* Opcode 0xf2 0x0f 0xdf - invalid */
8111
8112/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8113FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8114/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8115FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8116/* Opcode 0xf3 0x0f 0xe0 - invalid */
8117/* Opcode 0xf2 0x0f 0xe0 - invalid */
8118
8119/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8120FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8121/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8122FNIEMOP_STUB(iemOp_psraw_Vx_W);
8123/* Opcode 0xf3 0x0f 0xe1 - invalid */
8124/* Opcode 0xf2 0x0f 0xe1 - invalid */
8125
8126/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8127FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8128/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8129FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8130/* Opcode 0xf3 0x0f 0xe2 - invalid */
8131/* Opcode 0xf2 0x0f 0xe2 - invalid */
8132
8133/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8134FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8135/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8136FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8137/* Opcode 0xf3 0x0f 0xe3 - invalid */
8138/* Opcode 0xf2 0x0f 0xe3 - invalid */
8139
8140/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8141FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8142/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8143FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8144/* Opcode 0xf3 0x0f 0xe4 - invalid */
8145/* Opcode 0xf2 0x0f 0xe4 - invalid */
8146
8147/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8148FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8149/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8150FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8151/* Opcode 0xf3 0x0f 0xe5 - invalid */
8152/* Opcode 0xf2 0x0f 0xe5 - invalid */
8153
8154/* Opcode 0x0f 0xe6 - invalid */
8155/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8156FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8157/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8158FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8159/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8160FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8161
8162
8163/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8164FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8165{
8166 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8167 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8168 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8169 {
8170 /* Register, memory. */
8171 IEM_MC_BEGIN(0, 2);
8172 IEM_MC_LOCAL(uint64_t, uSrc);
8173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8174
8175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8177 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8178 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8179
8180 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8181 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8182
8183 IEM_MC_ADVANCE_RIP();
8184 IEM_MC_END();
8185 return VINF_SUCCESS;
8186 }
8187 /* The register, register encoding is invalid. */
8188 return IEMOP_RAISE_INVALID_OPCODE();
8189}
8190
8191/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
8192FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
8193{
8194 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8195 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8196 {
8197 /* Register, memory. */
8198 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
8199 IEM_MC_BEGIN(0, 2);
8200 IEM_MC_LOCAL(RTUINT128U, uSrc);
8201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8202
8203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8205 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8206 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8207
8208 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8209 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8210
8211 IEM_MC_ADVANCE_RIP();
8212 IEM_MC_END();
8213 return VINF_SUCCESS;
8214 }
8215
8216 /* The register, register encoding is invalid. */
8217 return IEMOP_RAISE_INVALID_OPCODE();
8218}
8219
8220/* Opcode 0xf3 0x0f 0xe7 - invalid */
8221/* Opcode 0xf2 0x0f 0xe7 - invalid */
8222
8223
8224/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8225FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8226/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8227FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8228/* Opcode 0xf3 0x0f 0xe8 - invalid */
8229/* Opcode 0xf2 0x0f 0xe8 - invalid */
8230
8231/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8232FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8233/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8234FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8235/* Opcode 0xf3 0x0f 0xe9 - invalid */
8236/* Opcode 0xf2 0x0f 0xe9 - invalid */
8237
8238/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8239FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8240/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8241FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8242/* Opcode 0xf3 0x0f 0xea - invalid */
8243/* Opcode 0xf2 0x0f 0xea - invalid */
8244
8245/** Opcode 0x0f 0xeb - por Pq, Qq */
8246FNIEMOP_STUB(iemOp_por_Pq_Qq);
8247/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8248FNIEMOP_STUB(iemOp_por_Vx_W);
8249/* Opcode 0xf3 0x0f 0xeb - invalid */
8250/* Opcode 0xf2 0x0f 0xeb - invalid */
8251
8252/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8253FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8254/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8255FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8256/* Opcode 0xf3 0x0f 0xec - invalid */
8257/* Opcode 0xf2 0x0f 0xec - invalid */
8258
8259/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8260FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8261/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8262FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8263/* Opcode 0xf3 0x0f 0xed - invalid */
8264/* Opcode 0xf2 0x0f 0xed - invalid */
8265
8266/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8267FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8268/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8269FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8270/* Opcode 0xf3 0x0f 0xee - invalid */
8271/* Opcode 0xf2 0x0f 0xee - invalid */
8272
8273
8274/** Opcode 0x0f 0xef - pxor Pq, Qq */
8275FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8276{
8277 IEMOP_MNEMONIC(pxor, "pxor");
8278 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8279}
8280
8281/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8282FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8283{
8284 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8285 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8286}
8287
8288/* Opcode 0xf3 0x0f 0xef - invalid */
8289/* Opcode 0xf2 0x0f 0xef - invalid */
8290
8291/* Opcode 0x0f 0xf0 - invalid */
8292/* Opcode 0x66 0x0f 0xf0 - invalid */
8293/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8294FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8295
8296/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8297FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8298/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8299FNIEMOP_STUB(iemOp_psllw_Vx_W);
8300/* Opcode 0xf2 0x0f 0xf1 - invalid */
8301
8302/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8303FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8304/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8305FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8306/* Opcode 0xf2 0x0f 0xf2 - invalid */
8307
8308/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8309FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8310/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8311FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8312/* Opcode 0xf2 0x0f 0xf3 - invalid */
8313
8314/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8315FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8316/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8317FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8318/* Opcode 0xf2 0x0f 0xf4 - invalid */
8319
8320/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8321FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8322/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
8323FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
8324/* Opcode 0xf2 0x0f 0xf5 - invalid */
8325
8326/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8327FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8328/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
8329FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
8330/* Opcode 0xf2 0x0f 0xf6 - invalid */
8331
8332/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8333FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8334/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
8335FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
8336/* Opcode 0xf2 0x0f 0xf7 - invalid */
8337
8338/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8339FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8340/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
8341FNIEMOP_STUB(iemOp_psubb_Vx_W);
8342/* Opcode 0xf2 0x0f 0xf8 - invalid */
8343
8344/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8345FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8346/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
8347FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
8348/* Opcode 0xf2 0x0f 0xf9 - invalid */
8349
8350/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8351FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8352/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
8353FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
8354/* Opcode 0xf2 0x0f 0xfa - invalid */
8355
8356/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8357FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8358/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
8359FNIEMOP_STUB(iemOp_psubq_Vx_W);
8360/* Opcode 0xf2 0x0f 0xfb - invalid */
8361
8362/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8363FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8364/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
8365FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
8366/* Opcode 0xf2 0x0f 0xfc - invalid */
8367
8368/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8369FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8370/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
8371FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
8372/* Opcode 0xf2 0x0f 0xfd - invalid */
8373
8374/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8375FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8376/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
8377FNIEMOP_STUB(iemOp_paddd_Vx_W);
8378/* Opcode 0xf2 0x0f 0xfe - invalid */
8379
8380
8381/** Opcode **** 0x0f 0xff - UD0 */
8382FNIEMOP_DEF(iemOp_ud0)
8383{
8384 IEMOP_MNEMONIC(ud0, "ud0");
8385 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8386 {
8387 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8388#ifndef TST_IEM_CHECK_MC
8389 RTGCPTR GCPtrEff;
8390 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8391 if (rcStrict != VINF_SUCCESS)
8392 return rcStrict;
8393#endif
8394 IEMOP_HLP_DONE_DECODING();
8395 }
8396 return IEMOP_RAISE_INVALID_OPCODE();
8397}
8398
8399
8400
8401/**
8402 * Two byte opcode map, first byte 0x0f.
8403 *
8404 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
8405 * check if it needs updating as well when making changes.
8406 */
8407IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8408{
8409 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8410 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8411 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8412 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8413 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8414 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8415 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8416 /* 0x06 */ IEMOP_X4(iemOp_clts),
8417 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8418 /* 0x08 */ IEMOP_X4(iemOp_invd),
8419 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8420 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8421 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8422 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8423 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8424 /* 0x0e */ IEMOP_X4(iemOp_femms),
8425 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8426
8427 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vx_Wsd,
8428 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
8429 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
8430 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8431 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8432 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8433 /* 0x16 */ iemOp_movhpsv1_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpdv1_Vdq_Mq, iemOp_movshdup_Vx_Wx, iemOp_InvalidNeedRM,
8434 /* 0x17 */ iemOp_movhpsv1_Mq_Vq, iemOp_movhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8435 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8436 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8437 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8438 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8439 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8440 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8441 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8442 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8443
8444 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8445 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8446 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8447 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8448 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8449 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8450 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8451 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8452 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8453 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8454 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
8455 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8456 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
8457 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
8458 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8459 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8460
8461 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8462 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8463 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8464 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8465 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8466 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8467 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8468 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8469 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
8470 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8471 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
8472 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8473 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8474 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8475 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8476 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8477
8478 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8479 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8480 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8481 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8482 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8483 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8484 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8485 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8486 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8487 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8488 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8489 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8490 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8491 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8492 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8493 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8494
8495 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8496 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
8497 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
8498 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
8499 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8500 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8501 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8502 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8503 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
8504 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
8505 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
8506 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8507 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
8508 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
8509 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
8510 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
8511
8512 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8513 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8514 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8515 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8516 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8517 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8518 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8519 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8520 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8521 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8522 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8523 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8524 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8525 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8526 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8527 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vx_Wx, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM,
8528
8529 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
8530 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8531 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8532 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8533 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8534 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8535 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8536 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8537
8538 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8539 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8540 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8541 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8542 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
8543 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
8544 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
8545 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
8546
8547 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8548 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8549 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8550 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8551 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8552 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8553 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8554 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8555 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8556 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8557 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8558 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8559 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8560 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8561 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8562 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8563
8564 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8565 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8566 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8567 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8568 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8569 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8570 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8571 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8572 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8573 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8574 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8575 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8576 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8577 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8578 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8579 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8580
8581 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8582 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8583 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8584 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8585 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8586 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8587 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8588 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8589 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8590 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8591 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8592 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8593 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8594 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8595 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8596 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8597
8598 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8599 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8600 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8601 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8602 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8603 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8604 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8605 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8606 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8607 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8608 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8609 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8610 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8611 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8612 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8613 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8614
8615 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8616 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8617 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
8618 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8619 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8620 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8621 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8622 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8623 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8624 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8625 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8626 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8627 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8628 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8629 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8630 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8631
8632 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
8633 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8634 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8635 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8636 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8637 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8638 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8639 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8640 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8641 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8642 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8643 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8644 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8645 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8646 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8647 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8648
8649 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8650 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8651 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8652 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8653 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8654 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8655 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
8656 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8657 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8658 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8659 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8660 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8661 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8662 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8663 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8664 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8665
8666 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
8667 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8668 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8669 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8670 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8671 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8672 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8673 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8674 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8675 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8676 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8677 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8678 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8679 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8680 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8681 /* 0xff */ IEMOP_X4(iemOp_ud0),
8682};
8683AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8684
8685/** @} */
8686
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette