VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66789

Last change on this file since 66789 was 66789, checked in by vboxsync, 8 years ago

IEM: Implemented movshdup Vdq,Wdq (f3 0f 16).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 317.3 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66789 2017-05-04 12:23:05Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.215389.xyz. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
38 switch (pVCpu->iem.s.enmEffOpSize)
39 {
40 case IEMMODE_16BIT:
41 IEM_MC_BEGIN(0, 1);
42 IEM_MC_LOCAL(uint16_t, u16Ldtr);
43 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
44 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
45 IEM_MC_ADVANCE_RIP();
46 IEM_MC_END();
47 break;
48
49 case IEMMODE_32BIT:
50 IEM_MC_BEGIN(0, 1);
51 IEM_MC_LOCAL(uint32_t, u32Ldtr);
52 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
53 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
54 IEM_MC_ADVANCE_RIP();
55 IEM_MC_END();
56 break;
57
58 case IEMMODE_64BIT:
59 IEM_MC_BEGIN(0, 1);
60 IEM_MC_LOCAL(uint64_t, u64Ldtr);
61 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
62 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 break;
66
67 IEM_NOT_REACHED_DEFAULT_CASE_RET();
68 }
69 }
70 else
71 {
72 IEM_MC_BEGIN(0, 2);
73 IEM_MC_LOCAL(uint16_t, u16Ldtr);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
76 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
77 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
78 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
79 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/** Opcode 0x0f 0x00 /1. */
88FNIEMOPRM_DEF(iemOp_Grp6_str)
89{
90 IEMOP_MNEMONIC(str, "str Rv/Mw");
91 IEMOP_HLP_MIN_286();
92 IEMOP_HLP_NO_REAL_OR_V86_MODE();
93
94 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
95 {
96 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
97 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
98 switch (pVCpu->iem.s.enmEffOpSize)
99 {
100 case IEMMODE_16BIT:
101 IEM_MC_BEGIN(0, 1);
102 IEM_MC_LOCAL(uint16_t, u16Tr);
103 IEM_MC_FETCH_TR_U16(u16Tr);
104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
105 IEM_MC_ADVANCE_RIP();
106 IEM_MC_END();
107 break;
108
109 case IEMMODE_32BIT:
110 IEM_MC_BEGIN(0, 1);
111 IEM_MC_LOCAL(uint32_t, u32Tr);
112 IEM_MC_FETCH_TR_U32(u32Tr);
113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
114 IEM_MC_ADVANCE_RIP();
115 IEM_MC_END();
116 break;
117
118 case IEMMODE_64BIT:
119 IEM_MC_BEGIN(0, 1);
120 IEM_MC_LOCAL(uint64_t, u64Tr);
121 IEM_MC_FETCH_TR_U64(u64Tr);
122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 break;
126
127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
128 }
129 }
130 else
131 {
132 IEM_MC_BEGIN(0, 2);
133 IEM_MC_LOCAL(uint16_t, u16Tr);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
136 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
137 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
138 IEM_MC_FETCH_TR_U16(u16Tr);
139 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
140 IEM_MC_ADVANCE_RIP();
141 IEM_MC_END();
142 }
143 return VINF_SUCCESS;
144}
145
146
147/** Opcode 0x0f 0x00 /2. */
148FNIEMOPRM_DEF(iemOp_Grp6_lldt)
149{
150 IEMOP_MNEMONIC(lldt, "lldt Ew");
151 IEMOP_HLP_MIN_286();
152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
153
154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
155 {
156 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
157 IEM_MC_BEGIN(1, 0);
158 IEM_MC_ARG(uint16_t, u16Sel, 0);
159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
161 IEM_MC_END();
162 }
163 else
164 {
165 IEM_MC_BEGIN(1, 1);
166 IEM_MC_ARG(uint16_t, u16Sel, 0);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
169 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
170 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
171 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
172 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
173 IEM_MC_END();
174 }
175 return VINF_SUCCESS;
176}
177
178
179/** Opcode 0x0f 0x00 /3. */
180FNIEMOPRM_DEF(iemOp_Grp6_ltr)
181{
182 IEMOP_MNEMONIC(ltr, "ltr Ew");
183 IEMOP_HLP_MIN_286();
184 IEMOP_HLP_NO_REAL_OR_V86_MODE();
185
186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
187 {
188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
189 IEM_MC_BEGIN(1, 0);
190 IEM_MC_ARG(uint16_t, u16Sel, 0);
191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
192 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
193 IEM_MC_END();
194 }
195 else
196 {
197 IEM_MC_BEGIN(1, 1);
198 IEM_MC_ARG(uint16_t, u16Sel, 0);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
202 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
203 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
204 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
205 IEM_MC_END();
206 }
207 return VINF_SUCCESS;
208}
209
210
211/** Opcode 0x0f 0x00 /3. */
212FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
213{
214 IEMOP_HLP_MIN_286();
215 IEMOP_HLP_NO_REAL_OR_V86_MODE();
216
217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
218 {
219 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
220 IEM_MC_BEGIN(2, 0);
221 IEM_MC_ARG(uint16_t, u16Sel, 0);
222 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
223 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
224 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
225 IEM_MC_END();
226 }
227 else
228 {
229 IEM_MC_BEGIN(2, 1);
230 IEM_MC_ARG(uint16_t, u16Sel, 0);
231 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
234 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
235 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
236 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
237 IEM_MC_END();
238 }
239 return VINF_SUCCESS;
240}
241
242
243/** Opcode 0x0f 0x00 /4. */
244FNIEMOPRM_DEF(iemOp_Grp6_verr)
245{
246 IEMOP_MNEMONIC(verr, "verr Ew");
247 IEMOP_HLP_MIN_286();
248 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
249}
250
251
252/** Opcode 0x0f 0x00 /5. */
253FNIEMOPRM_DEF(iemOp_Grp6_verw)
254{
255 IEMOP_MNEMONIC(verw, "verw Ew");
256 IEMOP_HLP_MIN_286();
257 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
258}
259
260
261/**
262 * Group 6 jump table.
263 */
264IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
265{
266 iemOp_Grp6_sldt,
267 iemOp_Grp6_str,
268 iemOp_Grp6_lldt,
269 iemOp_Grp6_ltr,
270 iemOp_Grp6_verr,
271 iemOp_Grp6_verw,
272 iemOp_InvalidWithRM,
273 iemOp_InvalidWithRM
274};
275
276/** Opcode 0x0f 0x00. */
277FNIEMOP_DEF(iemOp_Grp6)
278{
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
280 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
281}
282
283
284/** Opcode 0x0f 0x01 /0. */
285FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
286{
287 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
288 IEMOP_HLP_MIN_286();
289 IEMOP_HLP_64BIT_OP_SIZE();
290 IEM_MC_BEGIN(2, 1);
291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
292 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
295 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
296 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
297 IEM_MC_END();
298 return VINF_SUCCESS;
299}
300
301
302/** Opcode 0x0f 0x01 /0. */
303FNIEMOP_DEF(iemOp_Grp7_vmcall)
304{
305 IEMOP_BITCH_ABOUT_STUB();
306 return IEMOP_RAISE_INVALID_OPCODE();
307}
308
309
310/** Opcode 0x0f 0x01 /0. */
311FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
312{
313 IEMOP_BITCH_ABOUT_STUB();
314 return IEMOP_RAISE_INVALID_OPCODE();
315}
316
317
318/** Opcode 0x0f 0x01 /0. */
319FNIEMOP_DEF(iemOp_Grp7_vmresume)
320{
321 IEMOP_BITCH_ABOUT_STUB();
322 return IEMOP_RAISE_INVALID_OPCODE();
323}
324
325
326/** Opcode 0x0f 0x01 /0. */
327FNIEMOP_DEF(iemOp_Grp7_vmxoff)
328{
329 IEMOP_BITCH_ABOUT_STUB();
330 return IEMOP_RAISE_INVALID_OPCODE();
331}
332
333
334/** Opcode 0x0f 0x01 /1. */
335FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
336{
337 IEMOP_MNEMONIC(sidt, "sidt Ms");
338 IEMOP_HLP_MIN_286();
339 IEMOP_HLP_64BIT_OP_SIZE();
340 IEM_MC_BEGIN(2, 1);
341 IEM_MC_ARG(uint8_t, iEffSeg, 0);
342 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
345 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
346 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
347 IEM_MC_END();
348 return VINF_SUCCESS;
349}
350
351
352/** Opcode 0x0f 0x01 /1. */
353FNIEMOP_DEF(iemOp_Grp7_monitor)
354{
355 IEMOP_MNEMONIC(monitor, "monitor");
356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
357 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
358}
359
360
361/** Opcode 0x0f 0x01 /1. */
362FNIEMOP_DEF(iemOp_Grp7_mwait)
363{
364 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
367}
368
369
370/** Opcode 0x0f 0x01 /2. */
371FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
372{
373 IEMOP_MNEMONIC(lgdt, "lgdt");
374 IEMOP_HLP_64BIT_OP_SIZE();
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd0. */
389FNIEMOP_DEF(iemOp_Grp7_xgetbv)
390{
391 IEMOP_MNEMONIC(xgetbv, "xgetbv");
392 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
393 {
394 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
396 }
397 return IEMOP_RAISE_INVALID_OPCODE();
398}
399
400
401/** Opcode 0x0f 0x01 0xd1. */
402FNIEMOP_DEF(iemOp_Grp7_xsetbv)
403{
404 IEMOP_MNEMONIC(xsetbv, "xsetbv");
405 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
406 {
407 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
408 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
409 }
410 return IEMOP_RAISE_INVALID_OPCODE();
411}
412
413
414/** Opcode 0x0f 0x01 /3. */
415FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
416{
417 IEMOP_MNEMONIC(lidt, "lidt");
418 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
419 ? IEMMODE_64BIT
420 : pVCpu->iem.s.enmEffOpSize;
421 IEM_MC_BEGIN(3, 1);
422 IEM_MC_ARG(uint8_t, iEffSeg, 0);
423 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
424 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
428 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
429 IEM_MC_END();
430 return VINF_SUCCESS;
431}
432
433
434#ifdef VBOX_WITH_NESTED_HWVIRT
435/** Opcode 0x0f 0x01 0xd8. */
436FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
437{
438 IEMOP_MNEMONIC(vmrun, "vmrun");
439 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
440}
441
442/** Opcode 0x0f 0x01 0xd9. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
444{
445 IEMOP_MNEMONIC(vmmcall, "vmmcall");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
447}
448
449
450/** Opcode 0x0f 0x01 0xda. */
451FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
452{
453 IEMOP_MNEMONIC(vmload, "vmload");
454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
455}
456
457
458/** Opcode 0x0f 0x01 0xdb. */
459FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
460{
461 IEMOP_MNEMONIC(vmsave, "vmsave");
462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
463}
464
465
466/** Opcode 0x0f 0x01 0xdc. */
467FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
468{
469 IEMOP_MNEMONIC(stgi, "stgi");
470 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
471}
472
473
474/** Opcode 0x0f 0x01 0xdd. */
475FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
476{
477 IEMOP_MNEMONIC(clgi, "clgi");
478 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
479}
480
481
482/** Opcode 0x0f 0x01 0xdf. */
483FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
484{
485 IEMOP_MNEMONIC(invlpga, "invlpga");
486 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
487}
488
489
490/** Opcode 0x0f 0x01 0xde. */
491FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
492{
493 IEMOP_MNEMONIC(skinit, "skinit");
494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
495}
496#else
497/** Opcode 0x0f 0x01 0xd8. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
499
500/** Opcode 0x0f 0x01 0xd9. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
502
503/** Opcode 0x0f 0x01 0xda. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
505
506/** Opcode 0x0f 0x01 0xdb. */
507FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
508
509/** Opcode 0x0f 0x01 0xdc. */
510FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
511
512/** Opcode 0x0f 0x01 0xdd. */
513FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
514
515/** Opcode 0x0f 0x01 0xdf. */
516FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520#endif /* VBOX_WITH_NESTED_HWVIRT */
521
522/** Opcode 0x0f 0x01 /4. */
523FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
524{
525 IEMOP_MNEMONIC(smsw, "smsw");
526 IEMOP_HLP_MIN_286();
527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
528 {
529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
530 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
531 switch (pVCpu->iem.s.enmEffOpSize)
532 {
533 case IEMMODE_16BIT:
534 IEM_MC_BEGIN(0, 1);
535 IEM_MC_LOCAL(uint16_t, u16Tmp);
536 IEM_MC_FETCH_CR0_U16(u16Tmp);
537 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
538 { /* likely */ }
539 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
541 else
542 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
543 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
544 IEM_MC_ADVANCE_RIP();
545 IEM_MC_END();
546 return VINF_SUCCESS;
547
548 case IEMMODE_32BIT:
549 IEM_MC_BEGIN(0, 1);
550 IEM_MC_LOCAL(uint32_t, u32Tmp);
551 IEM_MC_FETCH_CR0_U32(u32Tmp);
552 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 return VINF_SUCCESS;
556
557 case IEMMODE_64BIT:
558 IEM_MC_BEGIN(0, 1);
559 IEM_MC_LOCAL(uint64_t, u64Tmp);
560 IEM_MC_FETCH_CR0_U64(u64Tmp);
561 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
562 IEM_MC_ADVANCE_RIP();
563 IEM_MC_END();
564 return VINF_SUCCESS;
565
566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
567 }
568 }
569 else
570 {
571 /* Ignore operand size here, memory refs are always 16-bit. */
572 IEM_MC_BEGIN(0, 2);
573 IEM_MC_LOCAL(uint16_t, u16Tmp);
574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
578 IEM_MC_FETCH_CR0_U16(u16Tmp);
579 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
580 { /* likely */ }
581 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
582 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
583 else
584 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
585 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
586 IEM_MC_ADVANCE_RIP();
587 IEM_MC_END();
588 return VINF_SUCCESS;
589 }
590}
591
592
593/** Opcode 0x0f 0x01 /6. */
594FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
595{
596 /* The operand size is effectively ignored, all is 16-bit and only the
597 lower 3-bits are used. */
598 IEMOP_MNEMONIC(lmsw, "lmsw");
599 IEMOP_HLP_MIN_286();
600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
601 {
602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
603 IEM_MC_BEGIN(1, 0);
604 IEM_MC_ARG(uint16_t, u16Tmp, 0);
605 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
606 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
607 IEM_MC_END();
608 }
609 else
610 {
611 IEM_MC_BEGIN(1, 1);
612 IEM_MC_ARG(uint16_t, u16Tmp, 0);
613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
617 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
618 IEM_MC_END();
619 }
620 return VINF_SUCCESS;
621}
622
623
624/** Opcode 0x0f 0x01 /7. */
625FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
626{
627 IEMOP_MNEMONIC(invlpg, "invlpg");
628 IEMOP_HLP_MIN_486();
629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
630 IEM_MC_BEGIN(1, 1);
631 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
633 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
634 IEM_MC_END();
635 return VINF_SUCCESS;
636}
637
638
639/** Opcode 0x0f 0x01 /7. */
640FNIEMOP_DEF(iemOp_Grp7_swapgs)
641{
642 IEMOP_MNEMONIC(swapgs, "swapgs");
643 IEMOP_HLP_ONLY_64BIT();
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
646}
647
648
649/** Opcode 0x0f 0x01 /7. */
650FNIEMOP_DEF(iemOp_Grp7_rdtscp)
651{
652 IEMOP_MNEMONIC(rdtscp, "rdtscp");
653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
654 /** @todo SVM intercept removal from here. */
655 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RDTSCP, SVM_EXIT_RDTSCP, 0, 0);
656 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
657}
658
659
660/**
661 * Group 7 jump table, memory variant.
662 */
663IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
664{
665 iemOp_Grp7_sgdt,
666 iemOp_Grp7_sidt,
667 iemOp_Grp7_lgdt,
668 iemOp_Grp7_lidt,
669 iemOp_Grp7_smsw,
670 iemOp_InvalidWithRM,
671 iemOp_Grp7_lmsw,
672 iemOp_Grp7_invlpg
673};
674
675
676/** Opcode 0x0f 0x01. */
677FNIEMOP_DEF(iemOp_Grp7)
678{
679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
680 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
681 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
682
683 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
684 {
685 case 0:
686 switch (bRm & X86_MODRM_RM_MASK)
687 {
688 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
689 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
690 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
691 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 1:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
700 }
701 return IEMOP_RAISE_INVALID_OPCODE();
702
703 case 2:
704 switch (bRm & X86_MODRM_RM_MASK)
705 {
706 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
707 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
708 }
709 return IEMOP_RAISE_INVALID_OPCODE();
710
711 case 3:
712 switch (bRm & X86_MODRM_RM_MASK)
713 {
714 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
715 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
716 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
717 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
718 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
719 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
720 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
721 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
723 }
724
725 case 4:
726 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
727
728 case 5:
729 return IEMOP_RAISE_INVALID_OPCODE();
730
731 case 6:
732 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
733
734 case 7:
735 switch (bRm & X86_MODRM_RM_MASK)
736 {
737 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
738 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
739 }
740 return IEMOP_RAISE_INVALID_OPCODE();
741
742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
743 }
744}
745
746/** Opcode 0x0f 0x00 /3. */
747FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
748{
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
751
752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
753 {
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 switch (pVCpu->iem.s.enmEffOpSize)
756 {
757 case IEMMODE_16BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 case IEMMODE_32BIT:
773 case IEMMODE_64BIT:
774 {
775 IEM_MC_BEGIN(3, 0);
776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
777 IEM_MC_ARG(uint16_t, u16Sel, 1);
778 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
779
780 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
781 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
782 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
783
784 IEM_MC_END();
785 return VINF_SUCCESS;
786 }
787
788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
789 }
790 }
791 else
792 {
793 switch (pVCpu->iem.s.enmEffOpSize)
794 {
795 case IEMMODE_16BIT:
796 {
797 IEM_MC_BEGIN(3, 1);
798 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
799 IEM_MC_ARG(uint16_t, u16Sel, 1);
800 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
802
803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
804 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
805
806 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
807 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
808 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
809
810 IEM_MC_END();
811 return VINF_SUCCESS;
812 }
813
814 case IEMMODE_32BIT:
815 case IEMMODE_64BIT:
816 {
817 IEM_MC_BEGIN(3, 1);
818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 1);
820 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
822
823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
824 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
825/** @todo testcase: make sure it's a 16-bit read. */
826
827 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
828 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
829 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
830
831 IEM_MC_END();
832 return VINF_SUCCESS;
833 }
834
835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
836 }
837 }
838}
839
840
841
842/** Opcode 0x0f 0x02. */
843FNIEMOP_DEF(iemOp_lar_Gv_Ew)
844{
845 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
846 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
847}
848
849
850/** Opcode 0x0f 0x03. */
851FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
852{
853 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
854 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
855}
856
857
858/** Opcode 0x0f 0x05. */
859FNIEMOP_DEF(iemOp_syscall)
860{
861 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
863 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
864}
865
866
867/** Opcode 0x0f 0x06. */
868FNIEMOP_DEF(iemOp_clts)
869{
870 IEMOP_MNEMONIC(clts, "clts");
871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
872 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
873}
874
875
876/** Opcode 0x0f 0x07. */
877FNIEMOP_DEF(iemOp_sysret)
878{
879 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
881 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
882}
883
884
885/** Opcode 0x0f 0x08. */
886FNIEMOP_DEF(iemOp_invd)
887{
888 IEMOP_MNEMONIC(invd, "invd");
889#ifdef VBOX_WITH_NESTED_HWVIRT
890 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
891 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
892#else
893 RT_NOREF_PV(pVCpu);
894#endif
895 /** @todo implement invd for the regular case (above only handles nested SVM
896 * exits). */
897 IEMOP_BITCH_ABOUT_STUB();
898 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
899}
900
901// IEMOP_HLP_MIN_486();
902
903
904/** Opcode 0x0f 0x09. */
905FNIEMOP_DEF(iemOp_wbinvd)
906{
907 IEMOP_MNEMONIC(wbinvd, "wbinvd");
908 IEMOP_HLP_MIN_486();
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
910 IEM_MC_BEGIN(0, 0);
911 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
912 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
913 IEM_MC_ADVANCE_RIP();
914 IEM_MC_END();
915 return VINF_SUCCESS; /* ignore for now */
916}
917
918
919/** Opcode 0x0f 0x0b. */
920FNIEMOP_DEF(iemOp_ud2)
921{
922 IEMOP_MNEMONIC(ud2, "ud2");
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926/** Opcode 0x0f 0x0d. */
927FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
928{
929 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
931 {
932 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
933 return IEMOP_RAISE_INVALID_OPCODE();
934 }
935
936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
938 {
939 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
940 return IEMOP_RAISE_INVALID_OPCODE();
941 }
942
943 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
944 {
945 case 2: /* Aliased to /0 for the time being. */
946 case 4: /* Aliased to /0 for the time being. */
947 case 5: /* Aliased to /0 for the time being. */
948 case 6: /* Aliased to /0 for the time being. */
949 case 7: /* Aliased to /0 for the time being. */
950 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
951 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
952 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
954 }
955
956 IEM_MC_BEGIN(0, 1);
957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
960 /* Currently a NOP. */
961 NOREF(GCPtrEffSrc);
962 IEM_MC_ADVANCE_RIP();
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x0e. */
969FNIEMOP_STUB(iemOp_femms);
970
971
972/** Opcode 0x0f 0x0f. */
973FNIEMOP_DEF(iemOp_3Dnow)
974{
975 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
976 {
977 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
978 return IEMOP_RAISE_INVALID_OPCODE();
979 }
980
981#ifdef IEM_WITH_3DNOW
982 /* This is pretty sparse, use switch instead of table. */
983 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
984 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
985#else
986 IEMOP_BITCH_ABOUT_STUB();
987 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
988#endif
989}
990
991
992/**
993 * @opcode 0x10
994 * @oppfx none
995 * @opcpuid sse
996 * @opgroup og_sse_simdfp_datamove
997 * @opxcpttype 4UA
998 * @optest op1=1 op2=2 -> op1=2
999 * @optest op1=0 op2=-22 -> op1=-22
1000 */
1001FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1002{
1003 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1006 {
1007 /*
1008 * Register, register.
1009 */
1010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1011 IEM_MC_BEGIN(0, 0);
1012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1013 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1014 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1015 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1016 IEM_MC_ADVANCE_RIP();
1017 IEM_MC_END();
1018 }
1019 else
1020 {
1021 /*
1022 * Memory, register.
1023 */
1024 IEM_MC_BEGIN(0, 2);
1025 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1027
1028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1032
1033 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1034 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1035
1036 IEM_MC_ADVANCE_RIP();
1037 IEM_MC_END();
1038 }
1039 return VINF_SUCCESS;
1040
1041}
1042
1043
1044/**
1045 * @opcode 0x10
1046 * @oppfx 0x66
1047 * @opcpuid sse2
1048 * @opgroup og_sse2_pcksclr_datamove
1049 * @opxcpttype 4UA
1050 * @optest op1=1 op2=2 -> op1=2
1051 * @optest op1=0 op2=-42 -> op1=-42
1052 */
1053FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1054{
1055 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1058 {
1059 /*
1060 * Register, register.
1061 */
1062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1063 IEM_MC_BEGIN(0, 0);
1064 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1066 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1067 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1068 IEM_MC_ADVANCE_RIP();
1069 IEM_MC_END();
1070 }
1071 else
1072 {
1073 /*
1074 * Memory, register.
1075 */
1076 IEM_MC_BEGIN(0, 2);
1077 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1079
1080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1083 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1084
1085 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1086 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1087
1088 IEM_MC_ADVANCE_RIP();
1089 IEM_MC_END();
1090 }
1091 return VINF_SUCCESS;
1092}
1093
1094
1095/**
1096 * @opcode 0x10
1097 * @oppfx 0xf3
1098 * @opcpuid sse
1099 * @opgroup og_sse_simdfp_datamove
1100 * @opxcpttype 5
1101 * @optest op1=1 op2=2 -> op1=2
1102 * @optest op1=0 op2=-22 -> op1=-22
1103 */
1104FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1105{
1106 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZxReg, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1109 {
1110 /*
1111 * Register, register.
1112 */
1113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1114 IEM_MC_BEGIN(0, 1);
1115 IEM_MC_LOCAL(uint32_t, uSrc);
1116
1117 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1118 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1119 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1120 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 else
1126 {
1127 /*
1128 * Memory, register.
1129 */
1130 IEM_MC_BEGIN(0, 2);
1131 IEM_MC_LOCAL(uint32_t, uSrc);
1132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1133
1134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1136 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1138
1139 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1140 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1141
1142 IEM_MC_ADVANCE_RIP();
1143 IEM_MC_END();
1144 }
1145 return VINF_SUCCESS;
1146}
1147
1148
1149/**
1150 * @opcode 0x10
1151 * @oppfx 0xf2
1152 * @opcpuid sse2
1153 * @opgroup og_sse2_pcksclr_datamove
1154 * @opxcpttype 5
1155 * @optest op1=1 op2=2 -> op1=2
1156 * @optest op1=0 op2=-42 -> op1=-42
1157 */
1158FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1159{
1160 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZxReg, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1163 {
1164 /*
1165 * Register, register.
1166 */
1167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1168 IEM_MC_BEGIN(0, 1);
1169 IEM_MC_LOCAL(uint64_t, uSrc);
1170
1171 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1172 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1173 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1174 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1175
1176 IEM_MC_ADVANCE_RIP();
1177 IEM_MC_END();
1178 }
1179 else
1180 {
1181 /*
1182 * Memory, register.
1183 */
1184 IEM_MC_BEGIN(0, 2);
1185 IEM_MC_LOCAL(uint64_t, uSrc);
1186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1187
1188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1190 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1191 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1192
1193 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1194 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1195
1196 IEM_MC_ADVANCE_RIP();
1197 IEM_MC_END();
1198 }
1199 return VINF_SUCCESS;
1200}
1201
1202
1203/**
1204 * @opcode 0x11
1205 * @oppfx none
1206 * @opcpuid sse
1207 * @opgroup og_sse_simdfp_datamove
1208 * @opxcpttype 4UA
1209 * @optest op1=1 op2=2 -> op1=2
1210 * @optest op1=0 op2=-42 -> op1=-42
1211 */
1212FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1213{
1214 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1217 {
1218 /*
1219 * Register, register.
1220 */
1221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1222 IEM_MC_BEGIN(0, 0);
1223 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1224 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1225 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1226 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1227 IEM_MC_ADVANCE_RIP();
1228 IEM_MC_END();
1229 }
1230 else
1231 {
1232 /*
1233 * Memory, register.
1234 */
1235 IEM_MC_BEGIN(0, 2);
1236 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1238
1239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1243
1244 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1245 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1246
1247 IEM_MC_ADVANCE_RIP();
1248 IEM_MC_END();
1249 }
1250 return VINF_SUCCESS;
1251}
1252
1253
1254/**
1255 * @opcode 0x11
1256 * @oppfx 0x66
1257 * @opcpuid sse2
1258 * @opgroup og_sse2_pcksclr_datamove
1259 * @opxcpttype 4UA
1260 * @optest op1=1 op2=2 -> op1=2
1261 * @optest op1=0 op2=-42 -> op1=-42
1262 */
1263FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1264{
1265 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1268 {
1269 /*
1270 * Register, register.
1271 */
1272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1273 IEM_MC_BEGIN(0, 0);
1274 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1275 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1276 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1277 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1278 IEM_MC_ADVANCE_RIP();
1279 IEM_MC_END();
1280 }
1281 else
1282 {
1283 /*
1284 * Memory, register.
1285 */
1286 IEM_MC_BEGIN(0, 2);
1287 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1289
1290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1292 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1294
1295 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1296 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1297
1298 IEM_MC_ADVANCE_RIP();
1299 IEM_MC_END();
1300 }
1301 return VINF_SUCCESS;
1302}
1303
1304
1305/**
1306 * @opcode 0x11
1307 * @oppfx 0xf3
1308 * @opcpuid sse
1309 * @opgroup og_sse_simdfp_datamove
1310 * @opxcpttype 5
1311 * @optest op1=1 op2=2 -> op1=2
1312 * @optest op1=0 op2=-22 -> op1=-22
1313 */
1314FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1315{
1316 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1319 {
1320 /*
1321 * Register, register.
1322 */
1323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1324 IEM_MC_BEGIN(0, 1);
1325 IEM_MC_LOCAL(uint32_t, uSrc);
1326
1327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1328 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1329 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1330 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1331
1332 IEM_MC_ADVANCE_RIP();
1333 IEM_MC_END();
1334 }
1335 else
1336 {
1337 /*
1338 * Memory, register.
1339 */
1340 IEM_MC_BEGIN(0, 2);
1341 IEM_MC_LOCAL(uint32_t, uSrc);
1342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1343
1344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1346 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1348
1349 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1350 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1351
1352 IEM_MC_ADVANCE_RIP();
1353 IEM_MC_END();
1354 }
1355 return VINF_SUCCESS;
1356}
1357
1358
1359/**
1360 * @opcode 0x11
1361 * @oppfx 0xf2
1362 * @opcpuid sse2
1363 * @opgroup og_sse2_pcksclr_datamove
1364 * @opxcpttype 5
1365 * @optest op1=1 op2=2 -> op1=2
1366 * @optest op1=0 op2=-42 -> op1=-42
1367 */
1368FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1369{
1370 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1373 {
1374 /*
1375 * Register, register.
1376 */
1377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1378 IEM_MC_BEGIN(0, 1);
1379 IEM_MC_LOCAL(uint64_t, uSrc);
1380
1381 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1382 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1383 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1384 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1385
1386 IEM_MC_ADVANCE_RIP();
1387 IEM_MC_END();
1388 }
1389 else
1390 {
1391 /*
1392 * Memory, register.
1393 */
1394 IEM_MC_BEGIN(0, 2);
1395 IEM_MC_LOCAL(uint64_t, uSrc);
1396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1397
1398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1400 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1402
1403 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1404 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1405
1406 IEM_MC_ADVANCE_RIP();
1407 IEM_MC_END();
1408 }
1409 return VINF_SUCCESS;
1410}
1411
1412
1413FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1414{
1415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1416 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1417 {
1418 /**
1419 * @opcode 0x12
1420 * @opcodesub 11 mr/reg
1421 * @oppfx none
1422 * @opcpuid sse
1423 * @opgroup og_sse_simdfp_datamove
1424 * @opxcpttype 5
1425 * @optest op1=1 op2=2 -> op1=2
1426 * @optest op1=0 op2=-42 -> op1=-42
1427 */
1428 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1429
1430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1431 IEM_MC_BEGIN(0, 1);
1432 IEM_MC_LOCAL(uint64_t, uSrc);
1433
1434 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1436 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1437 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1438
1439 IEM_MC_ADVANCE_RIP();
1440 IEM_MC_END();
1441 }
1442 else
1443 {
1444 /**
1445 * @opdone
1446 * @opcode 0x12
1447 * @opcodesub !11 mr/reg
1448 * @oppfx none
1449 * @opcpuid sse
1450 * @opgroup og_sse_simdfp_datamove
1451 * @opxcpttype 5
1452 * @optest op1=1 op2=2 -> op1=2
1453 * @optest op1=0 op2=-42 -> op1=-42
1454 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1455 */
1456 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1457
1458 IEM_MC_BEGIN(0, 2);
1459 IEM_MC_LOCAL(uint64_t, uSrc);
1460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1461
1462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1466
1467 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1468 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1469
1470 IEM_MC_ADVANCE_RIP();
1471 IEM_MC_END();
1472 }
1473 return VINF_SUCCESS;
1474}
1475
1476
1477/**
1478 * @opcode 0x12
1479 * @opcodesub !11 mr/reg
1480 * @oppfx 0x66
1481 * @opcpuid sse2
1482 * @opgroup og_sse2_pcksclr_datamove
1483 * @opxcpttype 5
1484 * @optest op1=1 op2=2 -> op1=2
1485 * @optest op1=0 op2=-42 -> op1=-42
1486 */
1487FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1488{
1489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1490 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1491 {
1492 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1493
1494 IEM_MC_BEGIN(0, 2);
1495 IEM_MC_LOCAL(uint64_t, uSrc);
1496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1497
1498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1502
1503 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1504 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1505
1506 IEM_MC_ADVANCE_RIP();
1507 IEM_MC_END();
1508 return VINF_SUCCESS;
1509 }
1510
1511 /**
1512 * @opdone
1513 * @opmnemonic ud660f12m3
1514 * @opcode 0x12
1515 * @opcodesub 11 mr/reg
1516 * @oppfx 0x66
1517 * @opunused immediate
1518 * @opcpuid sse
1519 * @optest ->
1520 */
1521 return IEMOP_RAISE_INVALID_OPCODE();
1522}
1523
1524
1525/**
1526 * @opcode 0x12
1527 * @oppfx 0xf3
1528 * @opcpuid sse3
1529 * @opgroup og_sse3_pcksclr_datamove
1530 * @opxcpttype 4
1531 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1532 * op1=0x00000002000000020000000100000001
1533 */
1534FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1535{
1536 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1538 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1539 {
1540 /*
1541 * Register, register.
1542 */
1543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1544 IEM_MC_BEGIN(2, 0);
1545 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1546 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1547
1548 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1549 IEM_MC_PREPARE_SSE_USAGE();
1550
1551 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1552 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1553 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1554
1555 IEM_MC_ADVANCE_RIP();
1556 IEM_MC_END();
1557 }
1558 else
1559 {
1560 /*
1561 * Register, memory.
1562 */
1563 IEM_MC_BEGIN(2, 2);
1564 IEM_MC_LOCAL(RTUINT128U, uSrc);
1565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1566 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1567 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1568
1569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1571 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1572 IEM_MC_PREPARE_SSE_USAGE();
1573
1574 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1575 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1576 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1577
1578 IEM_MC_ADVANCE_RIP();
1579 IEM_MC_END();
1580 }
1581 return VINF_SUCCESS;
1582}
1583
1584
1585/**
1586 * @opcode 0x12
1587 * @oppfx 0xf2
1588 * @opcpuid sse3
1589 * @opgroup og_sse3_pcksclr_datamove
1590 * @opxcpttype 5
1591 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1592 * op1=0x22222222111111112222222211111111
1593 */
1594FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1595{
1596 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1599 {
1600 /*
1601 * Register, register.
1602 */
1603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1604 IEM_MC_BEGIN(2, 0);
1605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1606 IEM_MC_ARG(uint64_t, uSrc, 1);
1607
1608 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1609 IEM_MC_PREPARE_SSE_USAGE();
1610
1611 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1612 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1613 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1614
1615 IEM_MC_ADVANCE_RIP();
1616 IEM_MC_END();
1617 }
1618 else
1619 {
1620 /*
1621 * Register, memory.
1622 */
1623 IEM_MC_BEGIN(2, 2);
1624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1625 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1626 IEM_MC_ARG(uint64_t, uSrc, 1);
1627
1628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1630 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1631 IEM_MC_PREPARE_SSE_USAGE();
1632
1633 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1634 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1635 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1636
1637 IEM_MC_ADVANCE_RIP();
1638 IEM_MC_END();
1639 }
1640 return VINF_SUCCESS;
1641}
1642
1643
1644/**
1645 * @opcode 0x13
1646 * @opcodesub !11 mr/reg
1647 * @oppfx none
1648 * @opcpuid sse
1649 * @opgroup og_sse_simdfp_datamove
1650 * @opxcpttype 5
1651 * @optest op1=1 op2=2 -> op1=2
1652 * @optest op1=0 op2=-42 -> op1=-42
1653 */
1654FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1655{
1656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1657 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1658 {
1659 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, MqWO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1660
1661 IEM_MC_BEGIN(0, 2);
1662 IEM_MC_LOCAL(uint64_t, uSrc);
1663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1664
1665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1667 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1668 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1669
1670 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1671 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1672
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 return VINF_SUCCESS;
1676 }
1677
1678 /**
1679 * @opdone
1680 * @opmnemonic ud0f13m3
1681 * @opcode 0x13
1682 * @opcodesub 11 mr/reg
1683 * @oppfx none
1684 * @opunused immediate
1685 * @opcpuid sse
1686 * @optest ->
1687 */
1688 return IEMOP_RAISE_INVALID_OPCODE();
1689}
1690
1691
1692/**
1693 * @opcode 0x13
1694 * @opcodesub !11 mr/reg
1695 * @oppfx 0x66
1696 * @opcpuid sse2
1697 * @opgroup og_sse_simdfp_datamove
1698 * @opxcpttype 5
1699 * @optest op1=1 op2=2 -> op1=2
1700 * @optest op1=0 op2=-42 -> op1=-42
1701 */
1702FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1703{
1704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1705 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1706 {
1707 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, MqWO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1708 IEM_MC_BEGIN(0, 2);
1709 IEM_MC_LOCAL(uint64_t, uSrc);
1710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1711
1712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1714 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1715 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1716
1717 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1718 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1719
1720 IEM_MC_ADVANCE_RIP();
1721 IEM_MC_END();
1722 return VINF_SUCCESS;
1723 }
1724
1725 /**
1726 * @opdone
1727 * @opmnemonic ud660f13m3
1728 * @opcode 0x13
1729 * @opcodesub 11 mr/reg
1730 * @oppfx 0x66
1731 * @opunused immediate
1732 * @opcpuid sse
1733 * @optest ->
1734 */
1735 return IEMOP_RAISE_INVALID_OPCODE();
1736}
1737
1738
1739/**
1740 * @opmnemonic udf30f13
1741 * @opcode 0x13
1742 * @oppfx 0xf3
1743 * @opunused intel-modrm
1744 * @opcpuid sse
1745 * @optest ->
1746 * @opdone
1747 */
1748
1749/**
1750 * @opmnemonic udf20f13
1751 * @opcode 0x13
1752 * @oppfx 0xf2
1753 * @opunused intel-modrm
1754 * @opcpuid sse
1755 * @optest ->
1756 * @opdone
1757 */
1758
1759/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1760FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1761/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1762FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1763
1764/**
1765 * @opdone
1766 * @opmnemonic udf30f14
1767 * @opcode 0x14
1768 * @oppfx 0xf3
1769 * @opunused intel-modrm
1770 * @opcpuid sse
1771 * @optest ->
1772 * @opdone
1773 */
1774
1775/**
1776 * @opmnemonic udf20f14
1777 * @opcode 0x14
1778 * @oppfx 0xf2
1779 * @opunused intel-modrm
1780 * @opcpuid sse
1781 * @optest ->
1782 * @opdone
1783 */
1784
1785/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1786FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1787/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1788FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1789/* Opcode 0xf3 0x0f 0x15 - invalid */
1790/* Opcode 0xf2 0x0f 0x15 - invalid */
1791
1792/**
1793 * @opdone
1794 * @opmnemonic udf30f15
1795 * @opcode 0x15
1796 * @oppfx 0xf3
1797 * @opunused intel-modrm
1798 * @opcpuid sse
1799 * @optest ->
1800 * @opdone
1801 */
1802
1803/**
1804 * @opmnemonic udf20f15
1805 * @opcode 0x15
1806 * @oppfx 0xf2
1807 * @opunused intel-modrm
1808 * @opcpuid sse
1809 * @optest ->
1810 * @opdone
1811 */
1812
1813FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1814{
1815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1817 {
1818 /**
1819 * @opcode 0x16
1820 * @opcodesub 11 mr/reg
1821 * @oppfx none
1822 * @opcpuid sse
1823 * @opgroup og_sse_simdfp_datamove
1824 * @opxcpttype 5
1825 * @optest op1=1 op2=2 -> op1=2
1826 * @optest op1=0 op2=-42 -> op1=-42
1827 */
1828 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1829
1830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1831 IEM_MC_BEGIN(0, 1);
1832 IEM_MC_LOCAL(uint64_t, uSrc);
1833
1834 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1835 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1836 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1837 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1838
1839 IEM_MC_ADVANCE_RIP();
1840 IEM_MC_END();
1841 }
1842 else
1843 {
1844 /**
1845 * @opdone
1846 * @opcode 0x16
1847 * @opcodesub !11 mr/reg
1848 * @oppfx none
1849 * @opcpuid sse
1850 * @opgroup og_sse_simdfp_datamove
1851 * @opxcpttype 5
1852 * @optest op1=1 op2=2 -> op1=2
1853 * @optest op1=0 op2=-42 -> op1=-42
1854 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1855 */
1856 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1857
1858 IEM_MC_BEGIN(0, 2);
1859 IEM_MC_LOCAL(uint64_t, uSrc);
1860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1861
1862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1866
1867 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1868 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1869
1870 IEM_MC_ADVANCE_RIP();
1871 IEM_MC_END();
1872 }
1873 return VINF_SUCCESS;
1874}
1875
1876
1877/**
1878 * @opcode 0x16
1879 * @opcodesub !11 mr/reg
1880 * @oppfx 0x66
1881 * @opcpuid sse2
1882 * @opgroup og_sse2_pcksclr_datamove
1883 * @opxcpttype 5
1884 * @optest op1=1 op2=2 -> op1=2
1885 * @optest op1=0 op2=-42 -> op1=-42
1886 */
1887FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1888{
1889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1890 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1891 {
1892 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1893 IEM_MC_BEGIN(0, 2);
1894 IEM_MC_LOCAL(uint64_t, uSrc);
1895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1896
1897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1899 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1900 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1901
1902 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1903 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1904
1905 IEM_MC_ADVANCE_RIP();
1906 IEM_MC_END();
1907 return VINF_SUCCESS;
1908 }
1909
1910 /**
1911 * @opdone
1912 * @opmnemonic ud660f16m3
1913 * @opcode 0x16
1914 * @opcodesub 11 mr/reg
1915 * @oppfx 0x66
1916 * @opunused immediate
1917 * @opcpuid sse
1918 * @optest ->
1919 */
1920 return IEMOP_RAISE_INVALID_OPCODE();
1921}
1922
1923
1924/**
1925 * @opcode 0x16
1926 * @oppfx 0xf3
1927 * @opcpuid sse3
1928 * @opgroup og_sse3_pcksclr_datamove
1929 * @opxcpttype 4
1930 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1931 * op1=0x00000002000000020000000100000001
1932 * @oponly
1933 */
1934FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1935{
1936 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1937 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1938 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1939 {
1940 /*
1941 * Register, register.
1942 */
1943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1944 IEM_MC_BEGIN(2, 0);
1945 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1946 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1947
1948 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1949 IEM_MC_PREPARE_SSE_USAGE();
1950
1951 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1952 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1953 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1954
1955 IEM_MC_ADVANCE_RIP();
1956 IEM_MC_END();
1957 }
1958 else
1959 {
1960 /*
1961 * Register, memory.
1962 */
1963 IEM_MC_BEGIN(2, 2);
1964 IEM_MC_LOCAL(RTUINT128U, uSrc);
1965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1966 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1967 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1968
1969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1971 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1972 IEM_MC_PREPARE_SSE_USAGE();
1973
1974 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1975 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1976 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1977
1978 IEM_MC_ADVANCE_RIP();
1979 IEM_MC_END();
1980 }
1981 return VINF_SUCCESS;
1982}
1983
1984/**
1985 * @opdone
1986 * @opmnemonic udf30f16
1987 * @opcode 0x16
1988 * @oppfx 0xf2
1989 * @opunused intel-modrm
1990 * @opcpuid sse
1991 * @optest ->
1992 * @opdone
1993 */
1994
1995/** Opcode 0x0f 0x17 - movhpsv1 Mq, Vq */
1996FNIEMOP_STUB(iemOp_movhps_Mq_Vq); //NEXT
1997/** Opcode 0x66 0x0f 0x17 - movhpdv1 Mq, Vq */
1998FNIEMOP_STUB(iemOp_movhpd_Mq_Vq); //NEXT
1999
2000/**
2001 * @opdone
2002 * @opmnemonic udf30f17
2003 * @opcode 0x17
2004 * @oppfx 0xf3
2005 * @opunused intel-modrm
2006 * @opcpuid sse
2007 * @optest ->
2008 * @opdone
2009 */
2010
2011/**
2012 * @opmnemonic udf20f17
2013 * @opcode 0x17
2014 * @oppfx 0xf2
2015 * @opunused intel-modrm
2016 * @opcpuid sse
2017 * @optest ->
2018 * @opdone
2019 */
2020
2021
2022/** Opcode 0x0f 0x18. */
2023FNIEMOP_DEF(iemOp_prefetch_Grp16)
2024{
2025 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2026 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2027 {
2028 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2029 {
2030 case 4: /* Aliased to /0 for the time being according to AMD. */
2031 case 5: /* Aliased to /0 for the time being according to AMD. */
2032 case 6: /* Aliased to /0 for the time being according to AMD. */
2033 case 7: /* Aliased to /0 for the time being according to AMD. */
2034 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2035 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2036 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2037 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2038 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2039 }
2040
2041 IEM_MC_BEGIN(0, 1);
2042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2045 /* Currently a NOP. */
2046 NOREF(GCPtrEffSrc);
2047 IEM_MC_ADVANCE_RIP();
2048 IEM_MC_END();
2049 return VINF_SUCCESS;
2050 }
2051
2052 return IEMOP_RAISE_INVALID_OPCODE();
2053}
2054
2055
2056/** Opcode 0x0f 0x19..0x1f. */
2057FNIEMOP_DEF(iemOp_nop_Ev)
2058{
2059 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2061 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2062 {
2063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2064 IEM_MC_BEGIN(0, 0);
2065 IEM_MC_ADVANCE_RIP();
2066 IEM_MC_END();
2067 }
2068 else
2069 {
2070 IEM_MC_BEGIN(0, 1);
2071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2074 /* Currently a NOP. */
2075 NOREF(GCPtrEffSrc);
2076 IEM_MC_ADVANCE_RIP();
2077 IEM_MC_END();
2078 }
2079 return VINF_SUCCESS;
2080}
2081
2082
2083/** Opcode 0x0f 0x20. */
2084FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2085{
2086 /* mod is ignored, as is operand size overrides. */
2087 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2088 IEMOP_HLP_MIN_386();
2089 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2090 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2091 else
2092 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2093
2094 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2095 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2096 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2097 {
2098 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2099 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2100 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2101 iCrReg |= 8;
2102 }
2103 switch (iCrReg)
2104 {
2105 case 0: case 2: case 3: case 4: case 8:
2106 break;
2107 default:
2108 return IEMOP_RAISE_INVALID_OPCODE();
2109 }
2110 IEMOP_HLP_DONE_DECODING();
2111
2112 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2113}
2114
2115
2116/** Opcode 0x0f 0x21. */
2117FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2118{
2119 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2120 IEMOP_HLP_MIN_386();
2121 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2123 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2124 return IEMOP_RAISE_INVALID_OPCODE();
2125 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2126 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2127 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2128}
2129
2130
2131/** Opcode 0x0f 0x22. */
2132FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2133{
2134 /* mod is ignored, as is operand size overrides. */
2135 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2136 IEMOP_HLP_MIN_386();
2137 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2138 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2139 else
2140 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2141
2142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2143 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2144 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2145 {
2146 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2147 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2148 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2149 iCrReg |= 8;
2150 }
2151 switch (iCrReg)
2152 {
2153 case 0: case 2: case 3: case 4: case 8:
2154 break;
2155 default:
2156 return IEMOP_RAISE_INVALID_OPCODE();
2157 }
2158 IEMOP_HLP_DONE_DECODING();
2159
2160 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2161}
2162
2163
2164/** Opcode 0x0f 0x23. */
2165FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2166{
2167 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2168 IEMOP_HLP_MIN_386();
2169 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2171 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2172 return IEMOP_RAISE_INVALID_OPCODE();
2173 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2174 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2175 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2176}
2177
2178
2179/** Opcode 0x0f 0x24. */
2180FNIEMOP_DEF(iemOp_mov_Rd_Td)
2181{
2182 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2183 /** @todo works on 386 and 486. */
2184 /* The RM byte is not considered, see testcase. */
2185 return IEMOP_RAISE_INVALID_OPCODE();
2186}
2187
2188
2189/** Opcode 0x0f 0x26. */
2190FNIEMOP_DEF(iemOp_mov_Td_Rd)
2191{
2192 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2193 /** @todo works on 386 and 486. */
2194 /* The RM byte is not considered, see testcase. */
2195 return IEMOP_RAISE_INVALID_OPCODE();
2196}
2197
2198
2199/** Opcode 0x0f 0x28 - movaps Vps, Wps */
2200FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2201{
2202 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
2203 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2204 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2205 {
2206 /*
2207 * Register, register.
2208 */
2209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2210 IEM_MC_BEGIN(0, 0);
2211 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2212 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2213 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2214 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2215 IEM_MC_ADVANCE_RIP();
2216 IEM_MC_END();
2217 }
2218 else
2219 {
2220 /*
2221 * Register, memory.
2222 */
2223 IEM_MC_BEGIN(0, 2);
2224 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2226
2227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2229 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2230 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2231
2232 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2233 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2234
2235 IEM_MC_ADVANCE_RIP();
2236 IEM_MC_END();
2237 }
2238 return VINF_SUCCESS;
2239}
2240
2241/** Opcode 0x66 0x0f 0x28 - movapd Vpd, Wpd */
2242FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2243{
2244 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
2245 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2246 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2247 {
2248 /*
2249 * Register, register.
2250 */
2251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2252 IEM_MC_BEGIN(0, 0);
2253 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2254 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2255 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2256 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2257 IEM_MC_ADVANCE_RIP();
2258 IEM_MC_END();
2259 }
2260 else
2261 {
2262 /*
2263 * Register, memory.
2264 */
2265 IEM_MC_BEGIN(0, 2);
2266 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2268
2269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2271 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2272 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2273
2274 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2275 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2276
2277 IEM_MC_ADVANCE_RIP();
2278 IEM_MC_END();
2279 }
2280 return VINF_SUCCESS;
2281}
2282
2283/* Opcode 0xf3 0x0f 0x28 - invalid */
2284/* Opcode 0xf2 0x0f 0x28 - invalid */
2285
2286/** Opcode 0x0f 0x29 - movaps Wps, Vps */
2287FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2288{
2289 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
2290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2291 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2292 {
2293 /*
2294 * Register, register.
2295 */
2296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2297 IEM_MC_BEGIN(0, 0);
2298 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2299 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2300 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2301 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2302 IEM_MC_ADVANCE_RIP();
2303 IEM_MC_END();
2304 }
2305 else
2306 {
2307 /*
2308 * Memory, register.
2309 */
2310 IEM_MC_BEGIN(0, 2);
2311 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2313
2314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2316 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2317 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2318
2319 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2320 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2321
2322 IEM_MC_ADVANCE_RIP();
2323 IEM_MC_END();
2324 }
2325 return VINF_SUCCESS;
2326}
2327
2328/** Opcode 0x66 0x0f 0x29 - movapd Wpd,Vpd */
2329FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2330{
2331 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
2332 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2333 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2334 {
2335 /*
2336 * Register, register.
2337 */
2338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2339 IEM_MC_BEGIN(0, 0);
2340 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2341 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2342 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2343 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2344 IEM_MC_ADVANCE_RIP();
2345 IEM_MC_END();
2346 }
2347 else
2348 {
2349 /*
2350 * Memory, register.
2351 */
2352 IEM_MC_BEGIN(0, 2);
2353 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2355
2356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2358 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2359 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2360
2361 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2362 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2363
2364 IEM_MC_ADVANCE_RIP();
2365 IEM_MC_END();
2366 }
2367 return VINF_SUCCESS;
2368}
2369
2370/* Opcode 0xf3 0x0f 0x29 - invalid */
2371/* Opcode 0xf2 0x0f 0x29 - invalid */
2372
2373
2374/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2375FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2376/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2377FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2378/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2379FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2380/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2381FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2382
2383
2384/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
2385FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2386{
2387 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2389 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2390 {
2391 /*
2392 * memory, register.
2393 */
2394 IEM_MC_BEGIN(0, 2);
2395 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2397
2398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2400 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2402
2403 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2404 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2405
2406 IEM_MC_ADVANCE_RIP();
2407 IEM_MC_END();
2408 }
2409 /* The register, register encoding is invalid. */
2410 else
2411 return IEMOP_RAISE_INVALID_OPCODE();
2412 return VINF_SUCCESS;
2413}
2414
2415/** Opcode 0x66 0x0f 0x2b - movntpd Mpd, Vpd */
2416FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2417{
2418 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2419 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2420 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2421 {
2422 /*
2423 * memory, register.
2424 */
2425 IEM_MC_BEGIN(0, 2);
2426 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2428
2429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2431 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2432 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2433
2434 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2435 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2436
2437 IEM_MC_ADVANCE_RIP();
2438 IEM_MC_END();
2439 }
2440 /* The register, register encoding is invalid. */
2441 else
2442 return IEMOP_RAISE_INVALID_OPCODE();
2443 return VINF_SUCCESS;
2444}
2445/* Opcode 0xf3 0x0f 0x2b - invalid */
2446/* Opcode 0xf2 0x0f 0x2b - invalid */
2447
2448
2449/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2450FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2451/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2452FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2453/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2454FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2455/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2456FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2457
2458/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2459FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2460/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2461FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2462/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2463FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2464/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2465FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2466
2467/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2468FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2469/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2470FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2471/* Opcode 0xf3 0x0f 0x2e - invalid */
2472/* Opcode 0xf2 0x0f 0x2e - invalid */
2473
2474/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2475FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2476/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2477FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2478/* Opcode 0xf3 0x0f 0x2f - invalid */
2479/* Opcode 0xf2 0x0f 0x2f - invalid */
2480
2481/** Opcode 0x0f 0x30. */
2482FNIEMOP_DEF(iemOp_wrmsr)
2483{
2484 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2486 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2487}
2488
2489
2490/** Opcode 0x0f 0x31. */
2491FNIEMOP_DEF(iemOp_rdtsc)
2492{
2493 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2495 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2496}
2497
2498
2499/** Opcode 0x0f 0x33. */
2500FNIEMOP_DEF(iemOp_rdmsr)
2501{
2502 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2504 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2505}
2506
2507
2508/** Opcode 0x0f 0x34. */
2509FNIEMOP_DEF(iemOp_rdpmc)
2510{
2511 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2513 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2514}
2515
2516
2517/** Opcode 0x0f 0x34. */
2518FNIEMOP_STUB(iemOp_sysenter);
2519/** Opcode 0x0f 0x35. */
2520FNIEMOP_STUB(iemOp_sysexit);
2521/** Opcode 0x0f 0x37. */
2522FNIEMOP_STUB(iemOp_getsec);
2523
2524
2525/** Opcode 0x0f 0x38. */
2526FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2527{
2528#ifdef IEM_WITH_THREE_0F_38
2529 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2530 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2531#else
2532 IEMOP_BITCH_ABOUT_STUB();
2533 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2534#endif
2535}
2536
2537
2538/** Opcode 0x0f 0x3a. */
2539FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2540{
2541#ifdef IEM_WITH_THREE_0F_3A
2542 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2543 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2544#else
2545 IEMOP_BITCH_ABOUT_STUB();
2546 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2547#endif
2548}
2549
2550
2551/**
2552 * Implements a conditional move.
2553 *
2554 * Wish there was an obvious way to do this where we could share and reduce
2555 * code bloat.
2556 *
2557 * @param a_Cnd The conditional "microcode" operation.
2558 */
2559#define CMOV_X(a_Cnd) \
2560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2561 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2562 { \
2563 switch (pVCpu->iem.s.enmEffOpSize) \
2564 { \
2565 case IEMMODE_16BIT: \
2566 IEM_MC_BEGIN(0, 1); \
2567 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2568 a_Cnd { \
2569 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2570 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2571 } IEM_MC_ENDIF(); \
2572 IEM_MC_ADVANCE_RIP(); \
2573 IEM_MC_END(); \
2574 return VINF_SUCCESS; \
2575 \
2576 case IEMMODE_32BIT: \
2577 IEM_MC_BEGIN(0, 1); \
2578 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2579 a_Cnd { \
2580 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2581 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2582 } IEM_MC_ELSE() { \
2583 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2584 } IEM_MC_ENDIF(); \
2585 IEM_MC_ADVANCE_RIP(); \
2586 IEM_MC_END(); \
2587 return VINF_SUCCESS; \
2588 \
2589 case IEMMODE_64BIT: \
2590 IEM_MC_BEGIN(0, 1); \
2591 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2592 a_Cnd { \
2593 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2594 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2595 } IEM_MC_ENDIF(); \
2596 IEM_MC_ADVANCE_RIP(); \
2597 IEM_MC_END(); \
2598 return VINF_SUCCESS; \
2599 \
2600 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2601 } \
2602 } \
2603 else \
2604 { \
2605 switch (pVCpu->iem.s.enmEffOpSize) \
2606 { \
2607 case IEMMODE_16BIT: \
2608 IEM_MC_BEGIN(0, 2); \
2609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2610 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2612 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2613 a_Cnd { \
2614 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2615 } IEM_MC_ENDIF(); \
2616 IEM_MC_ADVANCE_RIP(); \
2617 IEM_MC_END(); \
2618 return VINF_SUCCESS; \
2619 \
2620 case IEMMODE_32BIT: \
2621 IEM_MC_BEGIN(0, 2); \
2622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2623 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2625 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2626 a_Cnd { \
2627 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2628 } IEM_MC_ELSE() { \
2629 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2630 } IEM_MC_ENDIF(); \
2631 IEM_MC_ADVANCE_RIP(); \
2632 IEM_MC_END(); \
2633 return VINF_SUCCESS; \
2634 \
2635 case IEMMODE_64BIT: \
2636 IEM_MC_BEGIN(0, 2); \
2637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2638 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2640 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2641 a_Cnd { \
2642 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2643 } IEM_MC_ENDIF(); \
2644 IEM_MC_ADVANCE_RIP(); \
2645 IEM_MC_END(); \
2646 return VINF_SUCCESS; \
2647 \
2648 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2649 } \
2650 } do {} while (0)
2651
2652
2653
2654/** Opcode 0x0f 0x40. */
2655FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2656{
2657 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2658 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2659}
2660
2661
2662/** Opcode 0x0f 0x41. */
2663FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2664{
2665 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2666 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2667}
2668
2669
2670/** Opcode 0x0f 0x42. */
2671FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2672{
2673 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2674 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2675}
2676
2677
2678/** Opcode 0x0f 0x43. */
2679FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2680{
2681 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2682 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2683}
2684
2685
2686/** Opcode 0x0f 0x44. */
2687FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2688{
2689 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2690 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2691}
2692
2693
2694/** Opcode 0x0f 0x45. */
2695FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2696{
2697 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2698 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2699}
2700
2701
2702/** Opcode 0x0f 0x46. */
2703FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2704{
2705 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2706 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2707}
2708
2709
2710/** Opcode 0x0f 0x47. */
2711FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2712{
2713 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2714 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2715}
2716
2717
2718/** Opcode 0x0f 0x48. */
2719FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2720{
2721 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2722 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2723}
2724
2725
2726/** Opcode 0x0f 0x49. */
2727FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2728{
2729 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2730 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2731}
2732
2733
2734/** Opcode 0x0f 0x4a. */
2735FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2736{
2737 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2738 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2739}
2740
2741
2742/** Opcode 0x0f 0x4b. */
2743FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2744{
2745 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2746 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2747}
2748
2749
2750/** Opcode 0x0f 0x4c. */
2751FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2752{
2753 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2754 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2755}
2756
2757
2758/** Opcode 0x0f 0x4d. */
2759FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2760{
2761 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2762 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2763}
2764
2765
2766/** Opcode 0x0f 0x4e. */
2767FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2768{
2769 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2770 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2771}
2772
2773
2774/** Opcode 0x0f 0x4f. */
2775FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2776{
2777 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2778 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2779}
2780
2781#undef CMOV_X
2782
2783/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2784FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2785/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2786FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2787/* Opcode 0xf3 0x0f 0x50 - invalid */
2788/* Opcode 0xf2 0x0f 0x50 - invalid */
2789
2790/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2791FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2792/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2793FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2794/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2795FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2796/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2797FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2798
2799/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2800FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2801/* Opcode 0x66 0x0f 0x52 - invalid */
2802/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2803FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2804/* Opcode 0xf2 0x0f 0x52 - invalid */
2805
2806/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2807FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2808/* Opcode 0x66 0x0f 0x53 - invalid */
2809/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2810FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2811/* Opcode 0xf2 0x0f 0x53 - invalid */
2812
2813/** Opcode 0x0f 0x54 - andps Vps, Wps */
2814FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2815/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2816FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2817/* Opcode 0xf3 0x0f 0x54 - invalid */
2818/* Opcode 0xf2 0x0f 0x54 - invalid */
2819
2820/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2821FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2822/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2823FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2824/* Opcode 0xf3 0x0f 0x55 - invalid */
2825/* Opcode 0xf2 0x0f 0x55 - invalid */
2826
2827/** Opcode 0x0f 0x56 - orps Vps, Wps */
2828FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2829/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2830FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2831/* Opcode 0xf3 0x0f 0x56 - invalid */
2832/* Opcode 0xf2 0x0f 0x56 - invalid */
2833
2834/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2835FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2836/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2837FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2838/* Opcode 0xf3 0x0f 0x57 - invalid */
2839/* Opcode 0xf2 0x0f 0x57 - invalid */
2840
2841/** Opcode 0x0f 0x58 - addps Vps, Wps */
2842FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2843/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2844FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2845/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2846FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2847/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2848FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2849
2850/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2851FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2852/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2853FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2854/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2855FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2856/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2857FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2858
2859/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2860FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2861/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2862FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2863/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2864FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2865/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2866FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2867
2868/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2869FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2870/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2871FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2872/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2873FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2874/* Opcode 0xf2 0x0f 0x5b - invalid */
2875
2876/** Opcode 0x0f 0x5c - subps Vps, Wps */
2877FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2878/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2879FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2880/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2881FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2882/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2883FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2884
2885/** Opcode 0x0f 0x5d - minps Vps, Wps */
2886FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2887/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2888FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2889/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2890FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2891/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2892FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2893
2894/** Opcode 0x0f 0x5e - divps Vps, Wps */
2895FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2896/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2897FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2898/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2899FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2900/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2901FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2902
2903/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2904FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2905/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2906FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2907/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2908FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2909/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2910FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2911
2912/**
2913 * Common worker for MMX instructions on the forms:
2914 * pxxxx mm1, mm2/mem32
2915 *
2916 * The 2nd operand is the first half of a register, which in the memory case
2917 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2918 * memory accessed for MMX.
2919 *
2920 * Exceptions type 4.
2921 */
2922FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2923{
2924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2925 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2926 {
2927 /*
2928 * Register, register.
2929 */
2930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2931 IEM_MC_BEGIN(2, 0);
2932 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2933 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2934 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2935 IEM_MC_PREPARE_SSE_USAGE();
2936 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2937 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2938 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2939 IEM_MC_ADVANCE_RIP();
2940 IEM_MC_END();
2941 }
2942 else
2943 {
2944 /*
2945 * Register, memory.
2946 */
2947 IEM_MC_BEGIN(2, 2);
2948 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2949 IEM_MC_LOCAL(uint64_t, uSrc);
2950 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2952
2953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2955 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2956 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2957
2958 IEM_MC_PREPARE_SSE_USAGE();
2959 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2960 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2961
2962 IEM_MC_ADVANCE_RIP();
2963 IEM_MC_END();
2964 }
2965 return VINF_SUCCESS;
2966}
2967
2968
2969/**
2970 * Common worker for SSE2 instructions on the forms:
2971 * pxxxx xmm1, xmm2/mem128
2972 *
2973 * The 2nd operand is the first half of a register, which in the memory case
2974 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2975 * memory accessed for MMX.
2976 *
2977 * Exceptions type 4.
2978 */
2979FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2980{
2981 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2982 if (!pImpl->pfnU64)
2983 return IEMOP_RAISE_INVALID_OPCODE();
2984 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2985 {
2986 /*
2987 * Register, register.
2988 */
2989 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2990 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2992 IEM_MC_BEGIN(2, 0);
2993 IEM_MC_ARG(uint64_t *, pDst, 0);
2994 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2995 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2996 IEM_MC_PREPARE_FPU_USAGE();
2997 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2998 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2999 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3000 IEM_MC_ADVANCE_RIP();
3001 IEM_MC_END();
3002 }
3003 else
3004 {
3005 /*
3006 * Register, memory.
3007 */
3008 IEM_MC_BEGIN(2, 2);
3009 IEM_MC_ARG(uint64_t *, pDst, 0);
3010 IEM_MC_LOCAL(uint32_t, uSrc);
3011 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3013
3014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3016 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3017 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3018
3019 IEM_MC_PREPARE_FPU_USAGE();
3020 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3021 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3022
3023 IEM_MC_ADVANCE_RIP();
3024 IEM_MC_END();
3025 }
3026 return VINF_SUCCESS;
3027}
3028
3029
3030/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3031FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3032{
3033 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3034 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3035}
3036
3037/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3038FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3039{
3040 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3041 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3042}
3043
3044/* Opcode 0xf3 0x0f 0x60 - invalid */
3045
3046
3047/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3048FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3049{
3050 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3051 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3052}
3053
3054/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3055FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3056{
3057 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3058 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3059}
3060
3061/* Opcode 0xf3 0x0f 0x61 - invalid */
3062
3063
3064/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3065FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3066{
3067 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3068 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3069}
3070
3071/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3072FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3073{
3074 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3075 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3076}
3077
3078/* Opcode 0xf3 0x0f 0x62 - invalid */
3079
3080
3081
3082/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3083FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3084/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3085FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3086/* Opcode 0xf3 0x0f 0x63 - invalid */
3087
3088/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3089FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3090/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3091FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3092/* Opcode 0xf3 0x0f 0x64 - invalid */
3093
3094/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3095FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3096/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3097FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3098/* Opcode 0xf3 0x0f 0x65 - invalid */
3099
3100/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3101FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3102/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3103FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3104/* Opcode 0xf3 0x0f 0x66 - invalid */
3105
3106/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3107FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3108/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3109FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3110/* Opcode 0xf3 0x0f 0x67 - invalid */
3111
3112
3113/**
3114 * Common worker for MMX instructions on the form:
3115 * pxxxx mm1, mm2/mem64
3116 *
3117 * The 2nd operand is the second half of a register, which in the memory case
3118 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3119 * where it may read the full 128 bits or only the upper 64 bits.
3120 *
3121 * Exceptions type 4.
3122 */
3123FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3124{
3125 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3126 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3127 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3128 {
3129 /*
3130 * Register, register.
3131 */
3132 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3133 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3135 IEM_MC_BEGIN(2, 0);
3136 IEM_MC_ARG(uint64_t *, pDst, 0);
3137 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3138 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3139 IEM_MC_PREPARE_FPU_USAGE();
3140 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3141 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3142 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3143 IEM_MC_ADVANCE_RIP();
3144 IEM_MC_END();
3145 }
3146 else
3147 {
3148 /*
3149 * Register, memory.
3150 */
3151 IEM_MC_BEGIN(2, 2);
3152 IEM_MC_ARG(uint64_t *, pDst, 0);
3153 IEM_MC_LOCAL(uint64_t, uSrc);
3154 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3156
3157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3159 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3160 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3161
3162 IEM_MC_PREPARE_FPU_USAGE();
3163 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3164 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3165
3166 IEM_MC_ADVANCE_RIP();
3167 IEM_MC_END();
3168 }
3169 return VINF_SUCCESS;
3170}
3171
3172
3173/**
3174 * Common worker for SSE2 instructions on the form:
3175 * pxxxx xmm1, xmm2/mem128
3176 *
3177 * The 2nd operand is the second half of a register, which in the memory case
3178 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3179 * where it may read the full 128 bits or only the upper 64 bits.
3180 *
3181 * Exceptions type 4.
3182 */
3183FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3184{
3185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3187 {
3188 /*
3189 * Register, register.
3190 */
3191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3192 IEM_MC_BEGIN(2, 0);
3193 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3194 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3195 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3196 IEM_MC_PREPARE_SSE_USAGE();
3197 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3198 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3199 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3200 IEM_MC_ADVANCE_RIP();
3201 IEM_MC_END();
3202 }
3203 else
3204 {
3205 /*
3206 * Register, memory.
3207 */
3208 IEM_MC_BEGIN(2, 2);
3209 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3210 IEM_MC_LOCAL(RTUINT128U, uSrc);
3211 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3212 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3213
3214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3216 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3217 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3218
3219 IEM_MC_PREPARE_SSE_USAGE();
3220 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3221 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3222
3223 IEM_MC_ADVANCE_RIP();
3224 IEM_MC_END();
3225 }
3226 return VINF_SUCCESS;
3227}
3228
3229
3230/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3231FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3232{
3233 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3234 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3235}
3236
3237/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3238FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3239{
3240 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3241 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3242}
3243/* Opcode 0xf3 0x0f 0x68 - invalid */
3244
3245
3246/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3247FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3248{
3249 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3250 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3251}
3252
3253/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3254FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3255{
3256 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3257 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3258
3259}
3260/* Opcode 0xf3 0x0f 0x69 - invalid */
3261
3262
3263/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3264FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3265{
3266 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3267 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3268}
3269
3270/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3271FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3272{
3273 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3274 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3275}
3276/* Opcode 0xf3 0x0f 0x6a - invalid */
3277
3278
3279/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3280FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3281/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3282FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3283/* Opcode 0xf3 0x0f 0x6b - invalid */
3284
3285
3286/* Opcode 0x0f 0x6c - invalid */
3287
3288/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3289FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3290{
3291 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3292 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3293}
3294
3295/* Opcode 0xf3 0x0f 0x6c - invalid */
3296/* Opcode 0xf2 0x0f 0x6c - invalid */
3297
3298
3299/* Opcode 0x0f 0x6d - invalid */
3300
3301/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3302FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3303{
3304 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3305 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3306}
3307
3308/* Opcode 0xf3 0x0f 0x6d - invalid */
3309
3310
3311/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
3312FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3313{
3314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3315 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3316 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
3317 else
3318 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
3319 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3320 {
3321 /* MMX, greg */
3322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3323 IEM_MC_BEGIN(0, 1);
3324 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3325 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3326 IEM_MC_LOCAL(uint64_t, u64Tmp);
3327 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3328 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3329 else
3330 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3331 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3332 IEM_MC_ADVANCE_RIP();
3333 IEM_MC_END();
3334 }
3335 else
3336 {
3337 /* MMX, [mem] */
3338 IEM_MC_BEGIN(0, 2);
3339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3340 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3343 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3344 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3345 {
3346 IEM_MC_LOCAL(uint64_t, u64Tmp);
3347 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3348 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3349 }
3350 else
3351 {
3352 IEM_MC_LOCAL(uint32_t, u32Tmp);
3353 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3354 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3355 }
3356 IEM_MC_ADVANCE_RIP();
3357 IEM_MC_END();
3358 }
3359 return VINF_SUCCESS;
3360}
3361
3362/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
3363FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3364{
3365 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3366 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3367 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
3368 else
3369 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
3370 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3371 {
3372 /* XMM, greg*/
3373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3374 IEM_MC_BEGIN(0, 1);
3375 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3376 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3377 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3378 {
3379 IEM_MC_LOCAL(uint64_t, u64Tmp);
3380 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3381 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3382 }
3383 else
3384 {
3385 IEM_MC_LOCAL(uint32_t, u32Tmp);
3386 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3387 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3388 }
3389 IEM_MC_ADVANCE_RIP();
3390 IEM_MC_END();
3391 }
3392 else
3393 {
3394 /* XMM, [mem] */
3395 IEM_MC_BEGIN(0, 2);
3396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3397 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3400 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3401 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3402 {
3403 IEM_MC_LOCAL(uint64_t, u64Tmp);
3404 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3405 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3406 }
3407 else
3408 {
3409 IEM_MC_LOCAL(uint32_t, u32Tmp);
3410 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3411 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3412 }
3413 IEM_MC_ADVANCE_RIP();
3414 IEM_MC_END();
3415 }
3416 return VINF_SUCCESS;
3417}
3418
3419/* Opcode 0xf3 0x0f 0x6e - invalid */
3420
3421
3422/** Opcode 0x0f 0x6f - movq Pq, Qq */
3423FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3424{
3425 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3426 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3427 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3428 {
3429 /*
3430 * Register, register.
3431 */
3432 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3433 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3435 IEM_MC_BEGIN(0, 1);
3436 IEM_MC_LOCAL(uint64_t, u64Tmp);
3437 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3438 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3439 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3440 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3441 IEM_MC_ADVANCE_RIP();
3442 IEM_MC_END();
3443 }
3444 else
3445 {
3446 /*
3447 * Register, memory.
3448 */
3449 IEM_MC_BEGIN(0, 2);
3450 IEM_MC_LOCAL(uint64_t, u64Tmp);
3451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3452
3453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3455 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3456 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3457 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3458 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3459
3460 IEM_MC_ADVANCE_RIP();
3461 IEM_MC_END();
3462 }
3463 return VINF_SUCCESS;
3464}
3465
3466/** Opcode 0x66 0x0f 0x6f - movdqa Vx, Wx */
3467FNIEMOP_DEF(iemOp_movdqa_Vx_Wx)
3468{
3469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3470 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3471 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3472 {
3473 /*
3474 * Register, register.
3475 */
3476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3477 IEM_MC_BEGIN(0, 0);
3478 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3479 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3480 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3481 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3482 IEM_MC_ADVANCE_RIP();
3483 IEM_MC_END();
3484 }
3485 else
3486 {
3487 /*
3488 * Register, memory.
3489 */
3490 IEM_MC_BEGIN(0, 2);
3491 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3493
3494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3496 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3497 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3498 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3499 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3500
3501 IEM_MC_ADVANCE_RIP();
3502 IEM_MC_END();
3503 }
3504 return VINF_SUCCESS;
3505}
3506
3507/** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */
3508FNIEMOP_DEF(iemOp_movdqu_Vx_Wx)
3509{
3510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3511 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3512 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3513 {
3514 /*
3515 * Register, register.
3516 */
3517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3518 IEM_MC_BEGIN(0, 0);
3519 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3520 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3521 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3522 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3523 IEM_MC_ADVANCE_RIP();
3524 IEM_MC_END();
3525 }
3526 else
3527 {
3528 /*
3529 * Register, memory.
3530 */
3531 IEM_MC_BEGIN(0, 2);
3532 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3534
3535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3537 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3538 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3539 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3540 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3541
3542 IEM_MC_ADVANCE_RIP();
3543 IEM_MC_END();
3544 }
3545 return VINF_SUCCESS;
3546}
3547
3548
3549/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3550FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3551{
3552 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3553 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3554 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3555 {
3556 /*
3557 * Register, register.
3558 */
3559 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3561
3562 IEM_MC_BEGIN(3, 0);
3563 IEM_MC_ARG(uint64_t *, pDst, 0);
3564 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3565 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3566 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3567 IEM_MC_PREPARE_FPU_USAGE();
3568 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3569 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3570 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3571 IEM_MC_ADVANCE_RIP();
3572 IEM_MC_END();
3573 }
3574 else
3575 {
3576 /*
3577 * Register, memory.
3578 */
3579 IEM_MC_BEGIN(3, 2);
3580 IEM_MC_ARG(uint64_t *, pDst, 0);
3581 IEM_MC_LOCAL(uint64_t, uSrc);
3582 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3584
3585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3586 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3587 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3589 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3590
3591 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3592 IEM_MC_PREPARE_FPU_USAGE();
3593 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3594 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3595
3596 IEM_MC_ADVANCE_RIP();
3597 IEM_MC_END();
3598 }
3599 return VINF_SUCCESS;
3600}
3601
3602/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3603FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3604{
3605 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3607 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3608 {
3609 /*
3610 * Register, register.
3611 */
3612 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3614
3615 IEM_MC_BEGIN(3, 0);
3616 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3617 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3618 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3619 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3620 IEM_MC_PREPARE_SSE_USAGE();
3621 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3622 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3623 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3624 IEM_MC_ADVANCE_RIP();
3625 IEM_MC_END();
3626 }
3627 else
3628 {
3629 /*
3630 * Register, memory.
3631 */
3632 IEM_MC_BEGIN(3, 2);
3633 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3634 IEM_MC_LOCAL(RTUINT128U, uSrc);
3635 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3636 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3637
3638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3639 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3640 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3642 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3643
3644 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3645 IEM_MC_PREPARE_SSE_USAGE();
3646 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3647 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3648
3649 IEM_MC_ADVANCE_RIP();
3650 IEM_MC_END();
3651 }
3652 return VINF_SUCCESS;
3653}
3654
3655/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3656FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3657{
3658 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3659 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3660 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3661 {
3662 /*
3663 * Register, register.
3664 */
3665 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3667
3668 IEM_MC_BEGIN(3, 0);
3669 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3670 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3671 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3672 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3673 IEM_MC_PREPARE_SSE_USAGE();
3674 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3675 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3676 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3677 IEM_MC_ADVANCE_RIP();
3678 IEM_MC_END();
3679 }
3680 else
3681 {
3682 /*
3683 * Register, memory.
3684 */
3685 IEM_MC_BEGIN(3, 2);
3686 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3687 IEM_MC_LOCAL(RTUINT128U, uSrc);
3688 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3690
3691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3692 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3693 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3695 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3696
3697 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3698 IEM_MC_PREPARE_SSE_USAGE();
3699 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3700 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3701
3702 IEM_MC_ADVANCE_RIP();
3703 IEM_MC_END();
3704 }
3705 return VINF_SUCCESS;
3706}
3707
3708/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3709FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3710{
3711 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3713 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3714 {
3715 /*
3716 * Register, register.
3717 */
3718 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3720
3721 IEM_MC_BEGIN(3, 0);
3722 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3723 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3724 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3725 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3726 IEM_MC_PREPARE_SSE_USAGE();
3727 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3728 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3729 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3730 IEM_MC_ADVANCE_RIP();
3731 IEM_MC_END();
3732 }
3733 else
3734 {
3735 /*
3736 * Register, memory.
3737 */
3738 IEM_MC_BEGIN(3, 2);
3739 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3740 IEM_MC_LOCAL(RTUINT128U, uSrc);
3741 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3743
3744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3745 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3746 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3748 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3749
3750 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3751 IEM_MC_PREPARE_SSE_USAGE();
3752 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3753 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3754
3755 IEM_MC_ADVANCE_RIP();
3756 IEM_MC_END();
3757 }
3758 return VINF_SUCCESS;
3759}
3760
3761
3762/** Opcode 0x0f 0x71 11/2. */
3763FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3764
3765/** Opcode 0x66 0x0f 0x71 11/2. */
3766FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3767
3768/** Opcode 0x0f 0x71 11/4. */
3769FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3770
3771/** Opcode 0x66 0x0f 0x71 11/4. */
3772FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3773
3774/** Opcode 0x0f 0x71 11/6. */
3775FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3776
3777/** Opcode 0x66 0x0f 0x71 11/6. */
3778FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3779
3780
3781/**
3782 * Group 12 jump table for register variant.
3783 */
3784IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3785{
3786 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3787 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3788 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3789 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3790 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3791 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3792 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3793 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3794};
3795AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3796
3797
3798/** Opcode 0x0f 0x71. */
3799FNIEMOP_DEF(iemOp_Grp12)
3800{
3801 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3802 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3803 /* register, register */
3804 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3805 + pVCpu->iem.s.idxPrefix], bRm);
3806 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3807}
3808
3809
3810/** Opcode 0x0f 0x72 11/2. */
3811FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3812
3813/** Opcode 0x66 0x0f 0x72 11/2. */
3814FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
3815
3816/** Opcode 0x0f 0x72 11/4. */
3817FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3818
3819/** Opcode 0x66 0x0f 0x72 11/4. */
3820FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
3821
3822/** Opcode 0x0f 0x72 11/6. */
3823FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3824
3825/** Opcode 0x66 0x0f 0x72 11/6. */
3826FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
3827
3828
3829/**
3830 * Group 13 jump table for register variant.
3831 */
3832IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3833{
3834 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3835 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3836 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3837 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3838 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3839 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3840 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3841 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3842};
3843AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3844
3845/** Opcode 0x0f 0x72. */
3846FNIEMOP_DEF(iemOp_Grp13)
3847{
3848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3849 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3850 /* register, register */
3851 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3852 + pVCpu->iem.s.idxPrefix], bRm);
3853 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3854}
3855
3856
3857/** Opcode 0x0f 0x73 11/2. */
3858FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3859
3860/** Opcode 0x66 0x0f 0x73 11/2. */
3861FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
3862
3863/** Opcode 0x66 0x0f 0x73 11/3. */
3864FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
3865
3866/** Opcode 0x0f 0x73 11/6. */
3867FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3868
3869/** Opcode 0x66 0x0f 0x73 11/6. */
3870FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
3871
3872/** Opcode 0x66 0x0f 0x73 11/7. */
3873FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
3874
3875/**
3876 * Group 14 jump table for register variant.
3877 */
3878IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
3879{
3880 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3881 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3882 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3883 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3884 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3885 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3886 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3887 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3888};
3889AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
3890
3891
3892/** Opcode 0x0f 0x73. */
3893FNIEMOP_DEF(iemOp_Grp14)
3894{
3895 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3896 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3897 /* register, register */
3898 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3899 + pVCpu->iem.s.idxPrefix], bRm);
3900 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3901}
3902
3903
3904/**
3905 * Common worker for MMX instructions on the form:
3906 * pxxx mm1, mm2/mem64
3907 */
3908FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3909{
3910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3911 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3912 {
3913 /*
3914 * Register, register.
3915 */
3916 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3917 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3919 IEM_MC_BEGIN(2, 0);
3920 IEM_MC_ARG(uint64_t *, pDst, 0);
3921 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3922 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3923 IEM_MC_PREPARE_FPU_USAGE();
3924 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3925 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3926 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3927 IEM_MC_ADVANCE_RIP();
3928 IEM_MC_END();
3929 }
3930 else
3931 {
3932 /*
3933 * Register, memory.
3934 */
3935 IEM_MC_BEGIN(2, 2);
3936 IEM_MC_ARG(uint64_t *, pDst, 0);
3937 IEM_MC_LOCAL(uint64_t, uSrc);
3938 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3940
3941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3943 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3944 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3945
3946 IEM_MC_PREPARE_FPU_USAGE();
3947 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3948 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3949
3950 IEM_MC_ADVANCE_RIP();
3951 IEM_MC_END();
3952 }
3953 return VINF_SUCCESS;
3954}
3955
3956
3957/**
3958 * Common worker for SSE2 instructions on the forms:
3959 * pxxx xmm1, xmm2/mem128
3960 *
3961 * Proper alignment of the 128-bit operand is enforced.
3962 * Exceptions type 4. SSE2 cpuid checks.
3963 */
3964FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3965{
3966 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3967 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3968 {
3969 /*
3970 * Register, register.
3971 */
3972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3973 IEM_MC_BEGIN(2, 0);
3974 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3975 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3976 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3977 IEM_MC_PREPARE_SSE_USAGE();
3978 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3979 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3980 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3981 IEM_MC_ADVANCE_RIP();
3982 IEM_MC_END();
3983 }
3984 else
3985 {
3986 /*
3987 * Register, memory.
3988 */
3989 IEM_MC_BEGIN(2, 2);
3990 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3991 IEM_MC_LOCAL(RTUINT128U, uSrc);
3992 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3994
3995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3997 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3998 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3999
4000 IEM_MC_PREPARE_SSE_USAGE();
4001 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4002 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4003
4004 IEM_MC_ADVANCE_RIP();
4005 IEM_MC_END();
4006 }
4007 return VINF_SUCCESS;
4008}
4009
4010
4011/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4012FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4013{
4014 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4015 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4016}
4017
4018/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4019FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4020{
4021 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4022 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4023}
4024
4025/* Opcode 0xf3 0x0f 0x74 - invalid */
4026/* Opcode 0xf2 0x0f 0x74 - invalid */
4027
4028
4029/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4030FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4031{
4032 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4033 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4034}
4035
4036/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4037FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4038{
4039 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4040 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4041}
4042
4043/* Opcode 0xf3 0x0f 0x75 - invalid */
4044/* Opcode 0xf2 0x0f 0x75 - invalid */
4045
4046
4047/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4048FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4049{
4050 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4051 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4052}
4053
4054/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4055FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4056{
4057 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4058 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4059}
4060
4061/* Opcode 0xf3 0x0f 0x76 - invalid */
4062/* Opcode 0xf2 0x0f 0x76 - invalid */
4063
4064
4065/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4066FNIEMOP_STUB(iemOp_emms);
4067/* Opcode 0x66 0x0f 0x77 - invalid */
4068/* Opcode 0xf3 0x0f 0x77 - invalid */
4069/* Opcode 0xf2 0x0f 0x77 - invalid */
4070
4071/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4072FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4073/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4074FNIEMOP_STUB(iemOp_AmdGrp17);
4075/* Opcode 0xf3 0x0f 0x78 - invalid */
4076/* Opcode 0xf2 0x0f 0x78 - invalid */
4077
4078/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4079FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4080/* Opcode 0x66 0x0f 0x79 - invalid */
4081/* Opcode 0xf3 0x0f 0x79 - invalid */
4082/* Opcode 0xf2 0x0f 0x79 - invalid */
4083
4084/* Opcode 0x0f 0x7a - invalid */
4085/* Opcode 0x66 0x0f 0x7a - invalid */
4086/* Opcode 0xf3 0x0f 0x7a - invalid */
4087/* Opcode 0xf2 0x0f 0x7a - invalid */
4088
4089/* Opcode 0x0f 0x7b - invalid */
4090/* Opcode 0x66 0x0f 0x7b - invalid */
4091/* Opcode 0xf3 0x0f 0x7b - invalid */
4092/* Opcode 0xf2 0x0f 0x7b - invalid */
4093
4094/* Opcode 0x0f 0x7c - invalid */
4095/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4096FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4097/* Opcode 0xf3 0x0f 0x7c - invalid */
4098/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4099FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4100
4101/* Opcode 0x0f 0x7d - invalid */
4102/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4103FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4104/* Opcode 0xf3 0x0f 0x7d - invalid */
4105/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4106FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4107
4108
4109/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4110FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4111{
4112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4113 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4114 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
4115 else
4116 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
4117 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4118 {
4119 /* greg, MMX */
4120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4121 IEM_MC_BEGIN(0, 1);
4122 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4123 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4124 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4125 {
4126 IEM_MC_LOCAL(uint64_t, u64Tmp);
4127 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4128 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4129 }
4130 else
4131 {
4132 IEM_MC_LOCAL(uint32_t, u32Tmp);
4133 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4134 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4135 }
4136 IEM_MC_ADVANCE_RIP();
4137 IEM_MC_END();
4138 }
4139 else
4140 {
4141 /* [mem], MMX */
4142 IEM_MC_BEGIN(0, 2);
4143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4144 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4147 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4148 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4149 {
4150 IEM_MC_LOCAL(uint64_t, u64Tmp);
4151 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4152 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4153 }
4154 else
4155 {
4156 IEM_MC_LOCAL(uint32_t, u32Tmp);
4157 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4158 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4159 }
4160 IEM_MC_ADVANCE_RIP();
4161 IEM_MC_END();
4162 }
4163 return VINF_SUCCESS;
4164}
4165
4166/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
4167FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4168{
4169 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4170 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4171 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
4172 else
4173 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
4174 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4175 {
4176 /* greg, XMM */
4177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4178 IEM_MC_BEGIN(0, 1);
4179 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4180 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4181 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4182 {
4183 IEM_MC_LOCAL(uint64_t, u64Tmp);
4184 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4185 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4186 }
4187 else
4188 {
4189 IEM_MC_LOCAL(uint32_t, u32Tmp);
4190 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4191 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4192 }
4193 IEM_MC_ADVANCE_RIP();
4194 IEM_MC_END();
4195 }
4196 else
4197 {
4198 /* [mem], XMM */
4199 IEM_MC_BEGIN(0, 2);
4200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4201 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4204 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4205 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4206 {
4207 IEM_MC_LOCAL(uint64_t, u64Tmp);
4208 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4209 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4210 }
4211 else
4212 {
4213 IEM_MC_LOCAL(uint32_t, u32Tmp);
4214 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4215 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4216 }
4217 IEM_MC_ADVANCE_RIP();
4218 IEM_MC_END();
4219 }
4220 return VINF_SUCCESS;
4221}
4222
4223/** Opcode 0xf3 0x0f 0x7e - movq Vq, Wq */
4224FNIEMOP_STUB(iemOp_movq_Vq_Wq);
4225/* Opcode 0xf2 0x0f 0x7e - invalid */
4226
4227
4228/** Opcode 0x0f 0x7f - movq Qq, Pq */
4229FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4230{
4231 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4232 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4233 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4234 {
4235 /*
4236 * Register, register.
4237 */
4238 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4239 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4241 IEM_MC_BEGIN(0, 1);
4242 IEM_MC_LOCAL(uint64_t, u64Tmp);
4243 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4244 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4245 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4246 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4247 IEM_MC_ADVANCE_RIP();
4248 IEM_MC_END();
4249 }
4250 else
4251 {
4252 /*
4253 * Register, memory.
4254 */
4255 IEM_MC_BEGIN(0, 2);
4256 IEM_MC_LOCAL(uint64_t, u64Tmp);
4257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4258
4259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4261 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4262 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4263
4264 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4265 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4266
4267 IEM_MC_ADVANCE_RIP();
4268 IEM_MC_END();
4269 }
4270 return VINF_SUCCESS;
4271}
4272
4273/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4274FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4275{
4276 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4277 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4278 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4279 {
4280 /*
4281 * Register, register.
4282 */
4283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4284 IEM_MC_BEGIN(0, 0);
4285 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4286 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4287 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4288 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4289 IEM_MC_ADVANCE_RIP();
4290 IEM_MC_END();
4291 }
4292 else
4293 {
4294 /*
4295 * Register, memory.
4296 */
4297 IEM_MC_BEGIN(0, 2);
4298 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4300
4301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4303 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4304 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4305
4306 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4307 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4308
4309 IEM_MC_ADVANCE_RIP();
4310 IEM_MC_END();
4311 }
4312 return VINF_SUCCESS;
4313}
4314
4315/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4316FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4317{
4318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4319 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4320 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4321 {
4322 /*
4323 * Register, register.
4324 */
4325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4326 IEM_MC_BEGIN(0, 0);
4327 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4328 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4329 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4330 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4331 IEM_MC_ADVANCE_RIP();
4332 IEM_MC_END();
4333 }
4334 else
4335 {
4336 /*
4337 * Register, memory.
4338 */
4339 IEM_MC_BEGIN(0, 2);
4340 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4342
4343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4345 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4346 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4347
4348 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4349 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4350
4351 IEM_MC_ADVANCE_RIP();
4352 IEM_MC_END();
4353 }
4354 return VINF_SUCCESS;
4355}
4356
4357/* Opcode 0xf2 0x0f 0x7f - invalid */
4358
4359
4360
4361/** Opcode 0x0f 0x80. */
4362FNIEMOP_DEF(iemOp_jo_Jv)
4363{
4364 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4365 IEMOP_HLP_MIN_386();
4366 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4367 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4368 {
4369 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4371
4372 IEM_MC_BEGIN(0, 0);
4373 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4374 IEM_MC_REL_JMP_S16(i16Imm);
4375 } IEM_MC_ELSE() {
4376 IEM_MC_ADVANCE_RIP();
4377 } IEM_MC_ENDIF();
4378 IEM_MC_END();
4379 }
4380 else
4381 {
4382 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4384
4385 IEM_MC_BEGIN(0, 0);
4386 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4387 IEM_MC_REL_JMP_S32(i32Imm);
4388 } IEM_MC_ELSE() {
4389 IEM_MC_ADVANCE_RIP();
4390 } IEM_MC_ENDIF();
4391 IEM_MC_END();
4392 }
4393 return VINF_SUCCESS;
4394}
4395
4396
4397/** Opcode 0x0f 0x81. */
4398FNIEMOP_DEF(iemOp_jno_Jv)
4399{
4400 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4401 IEMOP_HLP_MIN_386();
4402 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4403 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4404 {
4405 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4407
4408 IEM_MC_BEGIN(0, 0);
4409 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4410 IEM_MC_ADVANCE_RIP();
4411 } IEM_MC_ELSE() {
4412 IEM_MC_REL_JMP_S16(i16Imm);
4413 } IEM_MC_ENDIF();
4414 IEM_MC_END();
4415 }
4416 else
4417 {
4418 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4420
4421 IEM_MC_BEGIN(0, 0);
4422 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4423 IEM_MC_ADVANCE_RIP();
4424 } IEM_MC_ELSE() {
4425 IEM_MC_REL_JMP_S32(i32Imm);
4426 } IEM_MC_ENDIF();
4427 IEM_MC_END();
4428 }
4429 return VINF_SUCCESS;
4430}
4431
4432
4433/** Opcode 0x0f 0x82. */
4434FNIEMOP_DEF(iemOp_jc_Jv)
4435{
4436 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4437 IEMOP_HLP_MIN_386();
4438 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4439 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4440 {
4441 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4443
4444 IEM_MC_BEGIN(0, 0);
4445 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4446 IEM_MC_REL_JMP_S16(i16Imm);
4447 } IEM_MC_ELSE() {
4448 IEM_MC_ADVANCE_RIP();
4449 } IEM_MC_ENDIF();
4450 IEM_MC_END();
4451 }
4452 else
4453 {
4454 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4456
4457 IEM_MC_BEGIN(0, 0);
4458 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4459 IEM_MC_REL_JMP_S32(i32Imm);
4460 } IEM_MC_ELSE() {
4461 IEM_MC_ADVANCE_RIP();
4462 } IEM_MC_ENDIF();
4463 IEM_MC_END();
4464 }
4465 return VINF_SUCCESS;
4466}
4467
4468
4469/** Opcode 0x0f 0x83. */
4470FNIEMOP_DEF(iemOp_jnc_Jv)
4471{
4472 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4473 IEMOP_HLP_MIN_386();
4474 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4475 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4476 {
4477 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4479
4480 IEM_MC_BEGIN(0, 0);
4481 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4482 IEM_MC_ADVANCE_RIP();
4483 } IEM_MC_ELSE() {
4484 IEM_MC_REL_JMP_S16(i16Imm);
4485 } IEM_MC_ENDIF();
4486 IEM_MC_END();
4487 }
4488 else
4489 {
4490 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4492
4493 IEM_MC_BEGIN(0, 0);
4494 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4495 IEM_MC_ADVANCE_RIP();
4496 } IEM_MC_ELSE() {
4497 IEM_MC_REL_JMP_S32(i32Imm);
4498 } IEM_MC_ENDIF();
4499 IEM_MC_END();
4500 }
4501 return VINF_SUCCESS;
4502}
4503
4504
4505/** Opcode 0x0f 0x84. */
4506FNIEMOP_DEF(iemOp_je_Jv)
4507{
4508 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4509 IEMOP_HLP_MIN_386();
4510 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4511 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4512 {
4513 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4515
4516 IEM_MC_BEGIN(0, 0);
4517 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4518 IEM_MC_REL_JMP_S16(i16Imm);
4519 } IEM_MC_ELSE() {
4520 IEM_MC_ADVANCE_RIP();
4521 } IEM_MC_ENDIF();
4522 IEM_MC_END();
4523 }
4524 else
4525 {
4526 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4528
4529 IEM_MC_BEGIN(0, 0);
4530 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4531 IEM_MC_REL_JMP_S32(i32Imm);
4532 } IEM_MC_ELSE() {
4533 IEM_MC_ADVANCE_RIP();
4534 } IEM_MC_ENDIF();
4535 IEM_MC_END();
4536 }
4537 return VINF_SUCCESS;
4538}
4539
4540
4541/** Opcode 0x0f 0x85. */
4542FNIEMOP_DEF(iemOp_jne_Jv)
4543{
4544 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4545 IEMOP_HLP_MIN_386();
4546 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4547 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4548 {
4549 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4551
4552 IEM_MC_BEGIN(0, 0);
4553 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4554 IEM_MC_ADVANCE_RIP();
4555 } IEM_MC_ELSE() {
4556 IEM_MC_REL_JMP_S16(i16Imm);
4557 } IEM_MC_ENDIF();
4558 IEM_MC_END();
4559 }
4560 else
4561 {
4562 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4564
4565 IEM_MC_BEGIN(0, 0);
4566 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4567 IEM_MC_ADVANCE_RIP();
4568 } IEM_MC_ELSE() {
4569 IEM_MC_REL_JMP_S32(i32Imm);
4570 } IEM_MC_ENDIF();
4571 IEM_MC_END();
4572 }
4573 return VINF_SUCCESS;
4574}
4575
4576
4577/** Opcode 0x0f 0x86. */
4578FNIEMOP_DEF(iemOp_jbe_Jv)
4579{
4580 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4581 IEMOP_HLP_MIN_386();
4582 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4583 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4584 {
4585 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4587
4588 IEM_MC_BEGIN(0, 0);
4589 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4590 IEM_MC_REL_JMP_S16(i16Imm);
4591 } IEM_MC_ELSE() {
4592 IEM_MC_ADVANCE_RIP();
4593 } IEM_MC_ENDIF();
4594 IEM_MC_END();
4595 }
4596 else
4597 {
4598 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4600
4601 IEM_MC_BEGIN(0, 0);
4602 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4603 IEM_MC_REL_JMP_S32(i32Imm);
4604 } IEM_MC_ELSE() {
4605 IEM_MC_ADVANCE_RIP();
4606 } IEM_MC_ENDIF();
4607 IEM_MC_END();
4608 }
4609 return VINF_SUCCESS;
4610}
4611
4612
4613/** Opcode 0x0f 0x87. */
4614FNIEMOP_DEF(iemOp_jnbe_Jv)
4615{
4616 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4617 IEMOP_HLP_MIN_386();
4618 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4619 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4620 {
4621 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4623
4624 IEM_MC_BEGIN(0, 0);
4625 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4626 IEM_MC_ADVANCE_RIP();
4627 } IEM_MC_ELSE() {
4628 IEM_MC_REL_JMP_S16(i16Imm);
4629 } IEM_MC_ENDIF();
4630 IEM_MC_END();
4631 }
4632 else
4633 {
4634 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4636
4637 IEM_MC_BEGIN(0, 0);
4638 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4639 IEM_MC_ADVANCE_RIP();
4640 } IEM_MC_ELSE() {
4641 IEM_MC_REL_JMP_S32(i32Imm);
4642 } IEM_MC_ENDIF();
4643 IEM_MC_END();
4644 }
4645 return VINF_SUCCESS;
4646}
4647
4648
4649/** Opcode 0x0f 0x88. */
4650FNIEMOP_DEF(iemOp_js_Jv)
4651{
4652 IEMOP_MNEMONIC(js_Jv, "js Jv");
4653 IEMOP_HLP_MIN_386();
4654 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4655 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4656 {
4657 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4659
4660 IEM_MC_BEGIN(0, 0);
4661 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4662 IEM_MC_REL_JMP_S16(i16Imm);
4663 } IEM_MC_ELSE() {
4664 IEM_MC_ADVANCE_RIP();
4665 } IEM_MC_ENDIF();
4666 IEM_MC_END();
4667 }
4668 else
4669 {
4670 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4672
4673 IEM_MC_BEGIN(0, 0);
4674 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4675 IEM_MC_REL_JMP_S32(i32Imm);
4676 } IEM_MC_ELSE() {
4677 IEM_MC_ADVANCE_RIP();
4678 } IEM_MC_ENDIF();
4679 IEM_MC_END();
4680 }
4681 return VINF_SUCCESS;
4682}
4683
4684
4685/** Opcode 0x0f 0x89. */
4686FNIEMOP_DEF(iemOp_jns_Jv)
4687{
4688 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4689 IEMOP_HLP_MIN_386();
4690 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4691 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4692 {
4693 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4695
4696 IEM_MC_BEGIN(0, 0);
4697 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4698 IEM_MC_ADVANCE_RIP();
4699 } IEM_MC_ELSE() {
4700 IEM_MC_REL_JMP_S16(i16Imm);
4701 } IEM_MC_ENDIF();
4702 IEM_MC_END();
4703 }
4704 else
4705 {
4706 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4708
4709 IEM_MC_BEGIN(0, 0);
4710 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4711 IEM_MC_ADVANCE_RIP();
4712 } IEM_MC_ELSE() {
4713 IEM_MC_REL_JMP_S32(i32Imm);
4714 } IEM_MC_ENDIF();
4715 IEM_MC_END();
4716 }
4717 return VINF_SUCCESS;
4718}
4719
4720
4721/** Opcode 0x0f 0x8a. */
4722FNIEMOP_DEF(iemOp_jp_Jv)
4723{
4724 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4725 IEMOP_HLP_MIN_386();
4726 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4727 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4728 {
4729 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4731
4732 IEM_MC_BEGIN(0, 0);
4733 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4734 IEM_MC_REL_JMP_S16(i16Imm);
4735 } IEM_MC_ELSE() {
4736 IEM_MC_ADVANCE_RIP();
4737 } IEM_MC_ENDIF();
4738 IEM_MC_END();
4739 }
4740 else
4741 {
4742 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4744
4745 IEM_MC_BEGIN(0, 0);
4746 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4747 IEM_MC_REL_JMP_S32(i32Imm);
4748 } IEM_MC_ELSE() {
4749 IEM_MC_ADVANCE_RIP();
4750 } IEM_MC_ENDIF();
4751 IEM_MC_END();
4752 }
4753 return VINF_SUCCESS;
4754}
4755
4756
4757/** Opcode 0x0f 0x8b. */
4758FNIEMOP_DEF(iemOp_jnp_Jv)
4759{
4760 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4761 IEMOP_HLP_MIN_386();
4762 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4763 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4764 {
4765 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4767
4768 IEM_MC_BEGIN(0, 0);
4769 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4770 IEM_MC_ADVANCE_RIP();
4771 } IEM_MC_ELSE() {
4772 IEM_MC_REL_JMP_S16(i16Imm);
4773 } IEM_MC_ENDIF();
4774 IEM_MC_END();
4775 }
4776 else
4777 {
4778 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4780
4781 IEM_MC_BEGIN(0, 0);
4782 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4783 IEM_MC_ADVANCE_RIP();
4784 } IEM_MC_ELSE() {
4785 IEM_MC_REL_JMP_S32(i32Imm);
4786 } IEM_MC_ENDIF();
4787 IEM_MC_END();
4788 }
4789 return VINF_SUCCESS;
4790}
4791
4792
4793/** Opcode 0x0f 0x8c. */
4794FNIEMOP_DEF(iemOp_jl_Jv)
4795{
4796 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4797 IEMOP_HLP_MIN_386();
4798 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4799 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4800 {
4801 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4803
4804 IEM_MC_BEGIN(0, 0);
4805 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4806 IEM_MC_REL_JMP_S16(i16Imm);
4807 } IEM_MC_ELSE() {
4808 IEM_MC_ADVANCE_RIP();
4809 } IEM_MC_ENDIF();
4810 IEM_MC_END();
4811 }
4812 else
4813 {
4814 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4816
4817 IEM_MC_BEGIN(0, 0);
4818 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4819 IEM_MC_REL_JMP_S32(i32Imm);
4820 } IEM_MC_ELSE() {
4821 IEM_MC_ADVANCE_RIP();
4822 } IEM_MC_ENDIF();
4823 IEM_MC_END();
4824 }
4825 return VINF_SUCCESS;
4826}
4827
4828
4829/** Opcode 0x0f 0x8d. */
4830FNIEMOP_DEF(iemOp_jnl_Jv)
4831{
4832 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4833 IEMOP_HLP_MIN_386();
4834 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4835 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4836 {
4837 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4839
4840 IEM_MC_BEGIN(0, 0);
4841 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4842 IEM_MC_ADVANCE_RIP();
4843 } IEM_MC_ELSE() {
4844 IEM_MC_REL_JMP_S16(i16Imm);
4845 } IEM_MC_ENDIF();
4846 IEM_MC_END();
4847 }
4848 else
4849 {
4850 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4852
4853 IEM_MC_BEGIN(0, 0);
4854 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4855 IEM_MC_ADVANCE_RIP();
4856 } IEM_MC_ELSE() {
4857 IEM_MC_REL_JMP_S32(i32Imm);
4858 } IEM_MC_ENDIF();
4859 IEM_MC_END();
4860 }
4861 return VINF_SUCCESS;
4862}
4863
4864
4865/** Opcode 0x0f 0x8e. */
4866FNIEMOP_DEF(iemOp_jle_Jv)
4867{
4868 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4869 IEMOP_HLP_MIN_386();
4870 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4871 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4872 {
4873 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4875
4876 IEM_MC_BEGIN(0, 0);
4877 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4878 IEM_MC_REL_JMP_S16(i16Imm);
4879 } IEM_MC_ELSE() {
4880 IEM_MC_ADVANCE_RIP();
4881 } IEM_MC_ENDIF();
4882 IEM_MC_END();
4883 }
4884 else
4885 {
4886 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4888
4889 IEM_MC_BEGIN(0, 0);
4890 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4891 IEM_MC_REL_JMP_S32(i32Imm);
4892 } IEM_MC_ELSE() {
4893 IEM_MC_ADVANCE_RIP();
4894 } IEM_MC_ENDIF();
4895 IEM_MC_END();
4896 }
4897 return VINF_SUCCESS;
4898}
4899
4900
4901/** Opcode 0x0f 0x8f. */
4902FNIEMOP_DEF(iemOp_jnle_Jv)
4903{
4904 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4905 IEMOP_HLP_MIN_386();
4906 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4907 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4908 {
4909 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4911
4912 IEM_MC_BEGIN(0, 0);
4913 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4914 IEM_MC_ADVANCE_RIP();
4915 } IEM_MC_ELSE() {
4916 IEM_MC_REL_JMP_S16(i16Imm);
4917 } IEM_MC_ENDIF();
4918 IEM_MC_END();
4919 }
4920 else
4921 {
4922 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4924
4925 IEM_MC_BEGIN(0, 0);
4926 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4927 IEM_MC_ADVANCE_RIP();
4928 } IEM_MC_ELSE() {
4929 IEM_MC_REL_JMP_S32(i32Imm);
4930 } IEM_MC_ENDIF();
4931 IEM_MC_END();
4932 }
4933 return VINF_SUCCESS;
4934}
4935
4936
4937/** Opcode 0x0f 0x90. */
4938FNIEMOP_DEF(iemOp_seto_Eb)
4939{
4940 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4941 IEMOP_HLP_MIN_386();
4942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4943
4944 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4945 * any way. AMD says it's "unused", whatever that means. We're
4946 * ignoring for now. */
4947 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4948 {
4949 /* register target */
4950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4951 IEM_MC_BEGIN(0, 0);
4952 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4953 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4954 } IEM_MC_ELSE() {
4955 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4956 } IEM_MC_ENDIF();
4957 IEM_MC_ADVANCE_RIP();
4958 IEM_MC_END();
4959 }
4960 else
4961 {
4962 /* memory target */
4963 IEM_MC_BEGIN(0, 1);
4964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4967 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4968 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4969 } IEM_MC_ELSE() {
4970 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4971 } IEM_MC_ENDIF();
4972 IEM_MC_ADVANCE_RIP();
4973 IEM_MC_END();
4974 }
4975 return VINF_SUCCESS;
4976}
4977
4978
4979/** Opcode 0x0f 0x91. */
4980FNIEMOP_DEF(iemOp_setno_Eb)
4981{
4982 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4983 IEMOP_HLP_MIN_386();
4984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4985
4986 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4987 * any way. AMD says it's "unused", whatever that means. We're
4988 * ignoring for now. */
4989 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4990 {
4991 /* register target */
4992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4993 IEM_MC_BEGIN(0, 0);
4994 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4995 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4996 } IEM_MC_ELSE() {
4997 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4998 } IEM_MC_ENDIF();
4999 IEM_MC_ADVANCE_RIP();
5000 IEM_MC_END();
5001 }
5002 else
5003 {
5004 /* memory target */
5005 IEM_MC_BEGIN(0, 1);
5006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5009 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5010 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5011 } IEM_MC_ELSE() {
5012 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5013 } IEM_MC_ENDIF();
5014 IEM_MC_ADVANCE_RIP();
5015 IEM_MC_END();
5016 }
5017 return VINF_SUCCESS;
5018}
5019
5020
5021/** Opcode 0x0f 0x92. */
5022FNIEMOP_DEF(iemOp_setc_Eb)
5023{
5024 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5025 IEMOP_HLP_MIN_386();
5026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5027
5028 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5029 * any way. AMD says it's "unused", whatever that means. We're
5030 * ignoring for now. */
5031 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5032 {
5033 /* register target */
5034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5035 IEM_MC_BEGIN(0, 0);
5036 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5037 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5038 } IEM_MC_ELSE() {
5039 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5040 } IEM_MC_ENDIF();
5041 IEM_MC_ADVANCE_RIP();
5042 IEM_MC_END();
5043 }
5044 else
5045 {
5046 /* memory target */
5047 IEM_MC_BEGIN(0, 1);
5048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5051 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5052 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5053 } IEM_MC_ELSE() {
5054 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5055 } IEM_MC_ENDIF();
5056 IEM_MC_ADVANCE_RIP();
5057 IEM_MC_END();
5058 }
5059 return VINF_SUCCESS;
5060}
5061
5062
5063/** Opcode 0x0f 0x93. */
5064FNIEMOP_DEF(iemOp_setnc_Eb)
5065{
5066 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5067 IEMOP_HLP_MIN_386();
5068 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5069
5070 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5071 * any way. AMD says it's "unused", whatever that means. We're
5072 * ignoring for now. */
5073 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5074 {
5075 /* register target */
5076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5077 IEM_MC_BEGIN(0, 0);
5078 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5079 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5080 } IEM_MC_ELSE() {
5081 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5082 } IEM_MC_ENDIF();
5083 IEM_MC_ADVANCE_RIP();
5084 IEM_MC_END();
5085 }
5086 else
5087 {
5088 /* memory target */
5089 IEM_MC_BEGIN(0, 1);
5090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5093 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5094 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5095 } IEM_MC_ELSE() {
5096 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5097 } IEM_MC_ENDIF();
5098 IEM_MC_ADVANCE_RIP();
5099 IEM_MC_END();
5100 }
5101 return VINF_SUCCESS;
5102}
5103
5104
5105/** Opcode 0x0f 0x94. */
5106FNIEMOP_DEF(iemOp_sete_Eb)
5107{
5108 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5109 IEMOP_HLP_MIN_386();
5110 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5111
5112 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5113 * any way. AMD says it's "unused", whatever that means. We're
5114 * ignoring for now. */
5115 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5116 {
5117 /* register target */
5118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5119 IEM_MC_BEGIN(0, 0);
5120 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5121 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5122 } IEM_MC_ELSE() {
5123 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5124 } IEM_MC_ENDIF();
5125 IEM_MC_ADVANCE_RIP();
5126 IEM_MC_END();
5127 }
5128 else
5129 {
5130 /* memory target */
5131 IEM_MC_BEGIN(0, 1);
5132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5135 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5136 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5137 } IEM_MC_ELSE() {
5138 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5139 } IEM_MC_ENDIF();
5140 IEM_MC_ADVANCE_RIP();
5141 IEM_MC_END();
5142 }
5143 return VINF_SUCCESS;
5144}
5145
5146
5147/** Opcode 0x0f 0x95. */
5148FNIEMOP_DEF(iemOp_setne_Eb)
5149{
5150 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5151 IEMOP_HLP_MIN_386();
5152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5153
5154 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5155 * any way. AMD says it's "unused", whatever that means. We're
5156 * ignoring for now. */
5157 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5158 {
5159 /* register target */
5160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5161 IEM_MC_BEGIN(0, 0);
5162 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5163 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5164 } IEM_MC_ELSE() {
5165 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5166 } IEM_MC_ENDIF();
5167 IEM_MC_ADVANCE_RIP();
5168 IEM_MC_END();
5169 }
5170 else
5171 {
5172 /* memory target */
5173 IEM_MC_BEGIN(0, 1);
5174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5177 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5178 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5179 } IEM_MC_ELSE() {
5180 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5181 } IEM_MC_ENDIF();
5182 IEM_MC_ADVANCE_RIP();
5183 IEM_MC_END();
5184 }
5185 return VINF_SUCCESS;
5186}
5187
5188
5189/** Opcode 0x0f 0x96. */
5190FNIEMOP_DEF(iemOp_setbe_Eb)
5191{
5192 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5193 IEMOP_HLP_MIN_386();
5194 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5195
5196 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5197 * any way. AMD says it's "unused", whatever that means. We're
5198 * ignoring for now. */
5199 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5200 {
5201 /* register target */
5202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5203 IEM_MC_BEGIN(0, 0);
5204 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5205 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5206 } IEM_MC_ELSE() {
5207 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5208 } IEM_MC_ENDIF();
5209 IEM_MC_ADVANCE_RIP();
5210 IEM_MC_END();
5211 }
5212 else
5213 {
5214 /* memory target */
5215 IEM_MC_BEGIN(0, 1);
5216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5219 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5220 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5221 } IEM_MC_ELSE() {
5222 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5223 } IEM_MC_ENDIF();
5224 IEM_MC_ADVANCE_RIP();
5225 IEM_MC_END();
5226 }
5227 return VINF_SUCCESS;
5228}
5229
5230
5231/** Opcode 0x0f 0x97. */
5232FNIEMOP_DEF(iemOp_setnbe_Eb)
5233{
5234 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5235 IEMOP_HLP_MIN_386();
5236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5237
5238 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5239 * any way. AMD says it's "unused", whatever that means. We're
5240 * ignoring for now. */
5241 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5242 {
5243 /* register target */
5244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5245 IEM_MC_BEGIN(0, 0);
5246 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5247 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5248 } IEM_MC_ELSE() {
5249 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5250 } IEM_MC_ENDIF();
5251 IEM_MC_ADVANCE_RIP();
5252 IEM_MC_END();
5253 }
5254 else
5255 {
5256 /* memory target */
5257 IEM_MC_BEGIN(0, 1);
5258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5261 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5262 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5263 } IEM_MC_ELSE() {
5264 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5265 } IEM_MC_ENDIF();
5266 IEM_MC_ADVANCE_RIP();
5267 IEM_MC_END();
5268 }
5269 return VINF_SUCCESS;
5270}
5271
5272
5273/** Opcode 0x0f 0x98. */
5274FNIEMOP_DEF(iemOp_sets_Eb)
5275{
5276 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5277 IEMOP_HLP_MIN_386();
5278 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5279
5280 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5281 * any way. AMD says it's "unused", whatever that means. We're
5282 * ignoring for now. */
5283 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5284 {
5285 /* register target */
5286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5287 IEM_MC_BEGIN(0, 0);
5288 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5289 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5290 } IEM_MC_ELSE() {
5291 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5292 } IEM_MC_ENDIF();
5293 IEM_MC_ADVANCE_RIP();
5294 IEM_MC_END();
5295 }
5296 else
5297 {
5298 /* memory target */
5299 IEM_MC_BEGIN(0, 1);
5300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5303 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5304 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5305 } IEM_MC_ELSE() {
5306 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5307 } IEM_MC_ENDIF();
5308 IEM_MC_ADVANCE_RIP();
5309 IEM_MC_END();
5310 }
5311 return VINF_SUCCESS;
5312}
5313
5314
5315/** Opcode 0x0f 0x99. */
5316FNIEMOP_DEF(iemOp_setns_Eb)
5317{
5318 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5319 IEMOP_HLP_MIN_386();
5320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5321
5322 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5323 * any way. AMD says it's "unused", whatever that means. We're
5324 * ignoring for now. */
5325 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5326 {
5327 /* register target */
5328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5329 IEM_MC_BEGIN(0, 0);
5330 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5331 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5332 } IEM_MC_ELSE() {
5333 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5334 } IEM_MC_ENDIF();
5335 IEM_MC_ADVANCE_RIP();
5336 IEM_MC_END();
5337 }
5338 else
5339 {
5340 /* memory target */
5341 IEM_MC_BEGIN(0, 1);
5342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5345 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5346 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5347 } IEM_MC_ELSE() {
5348 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5349 } IEM_MC_ENDIF();
5350 IEM_MC_ADVANCE_RIP();
5351 IEM_MC_END();
5352 }
5353 return VINF_SUCCESS;
5354}
5355
5356
5357/** Opcode 0x0f 0x9a. */
5358FNIEMOP_DEF(iemOp_setp_Eb)
5359{
5360 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5361 IEMOP_HLP_MIN_386();
5362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5363
5364 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5365 * any way. AMD says it's "unused", whatever that means. We're
5366 * ignoring for now. */
5367 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5368 {
5369 /* register target */
5370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5371 IEM_MC_BEGIN(0, 0);
5372 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5373 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5374 } IEM_MC_ELSE() {
5375 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5376 } IEM_MC_ENDIF();
5377 IEM_MC_ADVANCE_RIP();
5378 IEM_MC_END();
5379 }
5380 else
5381 {
5382 /* memory target */
5383 IEM_MC_BEGIN(0, 1);
5384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5387 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5388 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5389 } IEM_MC_ELSE() {
5390 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5391 } IEM_MC_ENDIF();
5392 IEM_MC_ADVANCE_RIP();
5393 IEM_MC_END();
5394 }
5395 return VINF_SUCCESS;
5396}
5397
5398
5399/** Opcode 0x0f 0x9b. */
5400FNIEMOP_DEF(iemOp_setnp_Eb)
5401{
5402 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5403 IEMOP_HLP_MIN_386();
5404 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5405
5406 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5407 * any way. AMD says it's "unused", whatever that means. We're
5408 * ignoring for now. */
5409 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5410 {
5411 /* register target */
5412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5413 IEM_MC_BEGIN(0, 0);
5414 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5415 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5416 } IEM_MC_ELSE() {
5417 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5418 } IEM_MC_ENDIF();
5419 IEM_MC_ADVANCE_RIP();
5420 IEM_MC_END();
5421 }
5422 else
5423 {
5424 /* memory target */
5425 IEM_MC_BEGIN(0, 1);
5426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5429 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5430 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5431 } IEM_MC_ELSE() {
5432 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5433 } IEM_MC_ENDIF();
5434 IEM_MC_ADVANCE_RIP();
5435 IEM_MC_END();
5436 }
5437 return VINF_SUCCESS;
5438}
5439
5440
5441/** Opcode 0x0f 0x9c. */
5442FNIEMOP_DEF(iemOp_setl_Eb)
5443{
5444 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5445 IEMOP_HLP_MIN_386();
5446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5447
5448 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5449 * any way. AMD says it's "unused", whatever that means. We're
5450 * ignoring for now. */
5451 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5452 {
5453 /* register target */
5454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5455 IEM_MC_BEGIN(0, 0);
5456 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5457 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5458 } IEM_MC_ELSE() {
5459 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5460 } IEM_MC_ENDIF();
5461 IEM_MC_ADVANCE_RIP();
5462 IEM_MC_END();
5463 }
5464 else
5465 {
5466 /* memory target */
5467 IEM_MC_BEGIN(0, 1);
5468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5471 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5472 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5473 } IEM_MC_ELSE() {
5474 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5475 } IEM_MC_ENDIF();
5476 IEM_MC_ADVANCE_RIP();
5477 IEM_MC_END();
5478 }
5479 return VINF_SUCCESS;
5480}
5481
5482
5483/** Opcode 0x0f 0x9d. */
5484FNIEMOP_DEF(iemOp_setnl_Eb)
5485{
5486 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5487 IEMOP_HLP_MIN_386();
5488 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5489
5490 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5491 * any way. AMD says it's "unused", whatever that means. We're
5492 * ignoring for now. */
5493 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5494 {
5495 /* register target */
5496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5497 IEM_MC_BEGIN(0, 0);
5498 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5499 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5500 } IEM_MC_ELSE() {
5501 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5502 } IEM_MC_ENDIF();
5503 IEM_MC_ADVANCE_RIP();
5504 IEM_MC_END();
5505 }
5506 else
5507 {
5508 /* memory target */
5509 IEM_MC_BEGIN(0, 1);
5510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5513 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5514 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5515 } IEM_MC_ELSE() {
5516 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5517 } IEM_MC_ENDIF();
5518 IEM_MC_ADVANCE_RIP();
5519 IEM_MC_END();
5520 }
5521 return VINF_SUCCESS;
5522}
5523
5524
5525/** Opcode 0x0f 0x9e. */
5526FNIEMOP_DEF(iemOp_setle_Eb)
5527{
5528 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5529 IEMOP_HLP_MIN_386();
5530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5531
5532 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5533 * any way. AMD says it's "unused", whatever that means. We're
5534 * ignoring for now. */
5535 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5536 {
5537 /* register target */
5538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5539 IEM_MC_BEGIN(0, 0);
5540 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5541 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5542 } IEM_MC_ELSE() {
5543 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5544 } IEM_MC_ENDIF();
5545 IEM_MC_ADVANCE_RIP();
5546 IEM_MC_END();
5547 }
5548 else
5549 {
5550 /* memory target */
5551 IEM_MC_BEGIN(0, 1);
5552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5555 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5556 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5557 } IEM_MC_ELSE() {
5558 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5559 } IEM_MC_ENDIF();
5560 IEM_MC_ADVANCE_RIP();
5561 IEM_MC_END();
5562 }
5563 return VINF_SUCCESS;
5564}
5565
5566
5567/** Opcode 0x0f 0x9f. */
5568FNIEMOP_DEF(iemOp_setnle_Eb)
5569{
5570 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5571 IEMOP_HLP_MIN_386();
5572 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5573
5574 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5575 * any way. AMD says it's "unused", whatever that means. We're
5576 * ignoring for now. */
5577 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5578 {
5579 /* register target */
5580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5581 IEM_MC_BEGIN(0, 0);
5582 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5583 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5584 } IEM_MC_ELSE() {
5585 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5586 } IEM_MC_ENDIF();
5587 IEM_MC_ADVANCE_RIP();
5588 IEM_MC_END();
5589 }
5590 else
5591 {
5592 /* memory target */
5593 IEM_MC_BEGIN(0, 1);
5594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5597 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5598 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5599 } IEM_MC_ELSE() {
5600 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5601 } IEM_MC_ENDIF();
5602 IEM_MC_ADVANCE_RIP();
5603 IEM_MC_END();
5604 }
5605 return VINF_SUCCESS;
5606}
5607
5608
5609/**
5610 * Common 'push segment-register' helper.
5611 */
5612FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5613{
5614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5615 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5616 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5617
5618 switch (pVCpu->iem.s.enmEffOpSize)
5619 {
5620 case IEMMODE_16BIT:
5621 IEM_MC_BEGIN(0, 1);
5622 IEM_MC_LOCAL(uint16_t, u16Value);
5623 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5624 IEM_MC_PUSH_U16(u16Value);
5625 IEM_MC_ADVANCE_RIP();
5626 IEM_MC_END();
5627 break;
5628
5629 case IEMMODE_32BIT:
5630 IEM_MC_BEGIN(0, 1);
5631 IEM_MC_LOCAL(uint32_t, u32Value);
5632 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5633 IEM_MC_PUSH_U32_SREG(u32Value);
5634 IEM_MC_ADVANCE_RIP();
5635 IEM_MC_END();
5636 break;
5637
5638 case IEMMODE_64BIT:
5639 IEM_MC_BEGIN(0, 1);
5640 IEM_MC_LOCAL(uint64_t, u64Value);
5641 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5642 IEM_MC_PUSH_U64(u64Value);
5643 IEM_MC_ADVANCE_RIP();
5644 IEM_MC_END();
5645 break;
5646 }
5647
5648 return VINF_SUCCESS;
5649}
5650
5651
5652/** Opcode 0x0f 0xa0. */
5653FNIEMOP_DEF(iemOp_push_fs)
5654{
5655 IEMOP_MNEMONIC(push_fs, "push fs");
5656 IEMOP_HLP_MIN_386();
5657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5658 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5659}
5660
5661
5662/** Opcode 0x0f 0xa1. */
5663FNIEMOP_DEF(iemOp_pop_fs)
5664{
5665 IEMOP_MNEMONIC(pop_fs, "pop fs");
5666 IEMOP_HLP_MIN_386();
5667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5668 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5669}
5670
5671
5672/** Opcode 0x0f 0xa2. */
5673FNIEMOP_DEF(iemOp_cpuid)
5674{
5675 IEMOP_MNEMONIC(cpuid, "cpuid");
5676 IEMOP_HLP_MIN_486(); /* not all 486es. */
5677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5678 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5679}
5680
5681
5682/**
5683 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5684 * iemOp_bts_Ev_Gv.
5685 */
5686FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5687{
5688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5689 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5690
5691 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5692 {
5693 /* register destination. */
5694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5695 switch (pVCpu->iem.s.enmEffOpSize)
5696 {
5697 case IEMMODE_16BIT:
5698 IEM_MC_BEGIN(3, 0);
5699 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5700 IEM_MC_ARG(uint16_t, u16Src, 1);
5701 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5702
5703 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5704 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5705 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5706 IEM_MC_REF_EFLAGS(pEFlags);
5707 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5708
5709 IEM_MC_ADVANCE_RIP();
5710 IEM_MC_END();
5711 return VINF_SUCCESS;
5712
5713 case IEMMODE_32BIT:
5714 IEM_MC_BEGIN(3, 0);
5715 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5716 IEM_MC_ARG(uint32_t, u32Src, 1);
5717 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5718
5719 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5720 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5721 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5722 IEM_MC_REF_EFLAGS(pEFlags);
5723 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5724
5725 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5726 IEM_MC_ADVANCE_RIP();
5727 IEM_MC_END();
5728 return VINF_SUCCESS;
5729
5730 case IEMMODE_64BIT:
5731 IEM_MC_BEGIN(3, 0);
5732 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5733 IEM_MC_ARG(uint64_t, u64Src, 1);
5734 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5735
5736 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5737 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5738 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5739 IEM_MC_REF_EFLAGS(pEFlags);
5740 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5741
5742 IEM_MC_ADVANCE_RIP();
5743 IEM_MC_END();
5744 return VINF_SUCCESS;
5745
5746 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5747 }
5748 }
5749 else
5750 {
5751 /* memory destination. */
5752
5753 uint32_t fAccess;
5754 if (pImpl->pfnLockedU16)
5755 fAccess = IEM_ACCESS_DATA_RW;
5756 else /* BT */
5757 fAccess = IEM_ACCESS_DATA_R;
5758
5759 /** @todo test negative bit offsets! */
5760 switch (pVCpu->iem.s.enmEffOpSize)
5761 {
5762 case IEMMODE_16BIT:
5763 IEM_MC_BEGIN(3, 2);
5764 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5765 IEM_MC_ARG(uint16_t, u16Src, 1);
5766 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5767 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5768 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5769
5770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5771 if (pImpl->pfnLockedU16)
5772 IEMOP_HLP_DONE_DECODING();
5773 else
5774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5775 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5776 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5777 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5778 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5779 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5780 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5781 IEM_MC_FETCH_EFLAGS(EFlags);
5782
5783 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5784 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5785 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5786 else
5787 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5788 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5789
5790 IEM_MC_COMMIT_EFLAGS(EFlags);
5791 IEM_MC_ADVANCE_RIP();
5792 IEM_MC_END();
5793 return VINF_SUCCESS;
5794
5795 case IEMMODE_32BIT:
5796 IEM_MC_BEGIN(3, 2);
5797 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5798 IEM_MC_ARG(uint32_t, u32Src, 1);
5799 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5801 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5802
5803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5804 if (pImpl->pfnLockedU16)
5805 IEMOP_HLP_DONE_DECODING();
5806 else
5807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5808 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5809 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5810 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5811 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5812 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5813 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5814 IEM_MC_FETCH_EFLAGS(EFlags);
5815
5816 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5817 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5818 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5819 else
5820 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5821 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5822
5823 IEM_MC_COMMIT_EFLAGS(EFlags);
5824 IEM_MC_ADVANCE_RIP();
5825 IEM_MC_END();
5826 return VINF_SUCCESS;
5827
5828 case IEMMODE_64BIT:
5829 IEM_MC_BEGIN(3, 2);
5830 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5831 IEM_MC_ARG(uint64_t, u64Src, 1);
5832 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5834 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5835
5836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5837 if (pImpl->pfnLockedU16)
5838 IEMOP_HLP_DONE_DECODING();
5839 else
5840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5841 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5842 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5843 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5844 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5845 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5846 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5847 IEM_MC_FETCH_EFLAGS(EFlags);
5848
5849 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5850 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5851 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5852 else
5853 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5854 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5855
5856 IEM_MC_COMMIT_EFLAGS(EFlags);
5857 IEM_MC_ADVANCE_RIP();
5858 IEM_MC_END();
5859 return VINF_SUCCESS;
5860
5861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5862 }
5863 }
5864}
5865
5866
5867/** Opcode 0x0f 0xa3. */
5868FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5869{
5870 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5871 IEMOP_HLP_MIN_386();
5872 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5873}
5874
5875
5876/**
5877 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5878 */
5879FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5880{
5881 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5882 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5883
5884 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5885 {
5886 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5888
5889 switch (pVCpu->iem.s.enmEffOpSize)
5890 {
5891 case IEMMODE_16BIT:
5892 IEM_MC_BEGIN(4, 0);
5893 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5894 IEM_MC_ARG(uint16_t, u16Src, 1);
5895 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5896 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5897
5898 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5899 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5900 IEM_MC_REF_EFLAGS(pEFlags);
5901 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5902
5903 IEM_MC_ADVANCE_RIP();
5904 IEM_MC_END();
5905 return VINF_SUCCESS;
5906
5907 case IEMMODE_32BIT:
5908 IEM_MC_BEGIN(4, 0);
5909 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5910 IEM_MC_ARG(uint32_t, u32Src, 1);
5911 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5912 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5913
5914 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5915 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5916 IEM_MC_REF_EFLAGS(pEFlags);
5917 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5918
5919 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5920 IEM_MC_ADVANCE_RIP();
5921 IEM_MC_END();
5922 return VINF_SUCCESS;
5923
5924 case IEMMODE_64BIT:
5925 IEM_MC_BEGIN(4, 0);
5926 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5927 IEM_MC_ARG(uint64_t, u64Src, 1);
5928 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5929 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5930
5931 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5932 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5933 IEM_MC_REF_EFLAGS(pEFlags);
5934 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5935
5936 IEM_MC_ADVANCE_RIP();
5937 IEM_MC_END();
5938 return VINF_SUCCESS;
5939
5940 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5941 }
5942 }
5943 else
5944 {
5945 switch (pVCpu->iem.s.enmEffOpSize)
5946 {
5947 case IEMMODE_16BIT:
5948 IEM_MC_BEGIN(4, 2);
5949 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5950 IEM_MC_ARG(uint16_t, u16Src, 1);
5951 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5952 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5954
5955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5956 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5957 IEM_MC_ASSIGN(cShiftArg, cShift);
5958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5959 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5960 IEM_MC_FETCH_EFLAGS(EFlags);
5961 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5962 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5963
5964 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5965 IEM_MC_COMMIT_EFLAGS(EFlags);
5966 IEM_MC_ADVANCE_RIP();
5967 IEM_MC_END();
5968 return VINF_SUCCESS;
5969
5970 case IEMMODE_32BIT:
5971 IEM_MC_BEGIN(4, 2);
5972 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5973 IEM_MC_ARG(uint32_t, u32Src, 1);
5974 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5975 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5977
5978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5979 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5980 IEM_MC_ASSIGN(cShiftArg, cShift);
5981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5982 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5983 IEM_MC_FETCH_EFLAGS(EFlags);
5984 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5985 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5986
5987 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5988 IEM_MC_COMMIT_EFLAGS(EFlags);
5989 IEM_MC_ADVANCE_RIP();
5990 IEM_MC_END();
5991 return VINF_SUCCESS;
5992
5993 case IEMMODE_64BIT:
5994 IEM_MC_BEGIN(4, 2);
5995 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5996 IEM_MC_ARG(uint64_t, u64Src, 1);
5997 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5998 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6000
6001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6002 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6003 IEM_MC_ASSIGN(cShiftArg, cShift);
6004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6005 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6006 IEM_MC_FETCH_EFLAGS(EFlags);
6007 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6008 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6009
6010 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6011 IEM_MC_COMMIT_EFLAGS(EFlags);
6012 IEM_MC_ADVANCE_RIP();
6013 IEM_MC_END();
6014 return VINF_SUCCESS;
6015
6016 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6017 }
6018 }
6019}
6020
6021
6022/**
6023 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6024 */
6025FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6026{
6027 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6028 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6029
6030 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6031 {
6032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6033
6034 switch (pVCpu->iem.s.enmEffOpSize)
6035 {
6036 case IEMMODE_16BIT:
6037 IEM_MC_BEGIN(4, 0);
6038 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6039 IEM_MC_ARG(uint16_t, u16Src, 1);
6040 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6041 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6042
6043 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6044 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6045 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6046 IEM_MC_REF_EFLAGS(pEFlags);
6047 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6048
6049 IEM_MC_ADVANCE_RIP();
6050 IEM_MC_END();
6051 return VINF_SUCCESS;
6052
6053 case IEMMODE_32BIT:
6054 IEM_MC_BEGIN(4, 0);
6055 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6056 IEM_MC_ARG(uint32_t, u32Src, 1);
6057 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6058 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6059
6060 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6061 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6062 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6063 IEM_MC_REF_EFLAGS(pEFlags);
6064 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6065
6066 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6067 IEM_MC_ADVANCE_RIP();
6068 IEM_MC_END();
6069 return VINF_SUCCESS;
6070
6071 case IEMMODE_64BIT:
6072 IEM_MC_BEGIN(4, 0);
6073 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6074 IEM_MC_ARG(uint64_t, u64Src, 1);
6075 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6076 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6077
6078 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6079 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6080 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6081 IEM_MC_REF_EFLAGS(pEFlags);
6082 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6083
6084 IEM_MC_ADVANCE_RIP();
6085 IEM_MC_END();
6086 return VINF_SUCCESS;
6087
6088 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6089 }
6090 }
6091 else
6092 {
6093 switch (pVCpu->iem.s.enmEffOpSize)
6094 {
6095 case IEMMODE_16BIT:
6096 IEM_MC_BEGIN(4, 2);
6097 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6098 IEM_MC_ARG(uint16_t, u16Src, 1);
6099 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6100 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6102
6103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6105 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6106 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6107 IEM_MC_FETCH_EFLAGS(EFlags);
6108 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6109 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6110
6111 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6112 IEM_MC_COMMIT_EFLAGS(EFlags);
6113 IEM_MC_ADVANCE_RIP();
6114 IEM_MC_END();
6115 return VINF_SUCCESS;
6116
6117 case IEMMODE_32BIT:
6118 IEM_MC_BEGIN(4, 2);
6119 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6120 IEM_MC_ARG(uint32_t, u32Src, 1);
6121 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6122 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6124
6125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6128 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6129 IEM_MC_FETCH_EFLAGS(EFlags);
6130 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6131 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6132
6133 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6134 IEM_MC_COMMIT_EFLAGS(EFlags);
6135 IEM_MC_ADVANCE_RIP();
6136 IEM_MC_END();
6137 return VINF_SUCCESS;
6138
6139 case IEMMODE_64BIT:
6140 IEM_MC_BEGIN(4, 2);
6141 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6142 IEM_MC_ARG(uint64_t, u64Src, 1);
6143 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6144 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6146
6147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6149 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6150 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6151 IEM_MC_FETCH_EFLAGS(EFlags);
6152 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6153 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6154
6155 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6156 IEM_MC_COMMIT_EFLAGS(EFlags);
6157 IEM_MC_ADVANCE_RIP();
6158 IEM_MC_END();
6159 return VINF_SUCCESS;
6160
6161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6162 }
6163 }
6164}
6165
6166
6167
6168/** Opcode 0x0f 0xa4. */
6169FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6170{
6171 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6172 IEMOP_HLP_MIN_386();
6173 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6174}
6175
6176
6177/** Opcode 0x0f 0xa5. */
6178FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6179{
6180 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6181 IEMOP_HLP_MIN_386();
6182 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6183}
6184
6185
6186/** Opcode 0x0f 0xa8. */
6187FNIEMOP_DEF(iemOp_push_gs)
6188{
6189 IEMOP_MNEMONIC(push_gs, "push gs");
6190 IEMOP_HLP_MIN_386();
6191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6192 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6193}
6194
6195
6196/** Opcode 0x0f 0xa9. */
6197FNIEMOP_DEF(iemOp_pop_gs)
6198{
6199 IEMOP_MNEMONIC(pop_gs, "pop gs");
6200 IEMOP_HLP_MIN_386();
6201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6202 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6203}
6204
6205
6206/** Opcode 0x0f 0xaa. */
6207FNIEMOP_DEF(iemOp_rsm)
6208{
6209 IEMOP_MNEMONIC(rsm, "rsm");
6210 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
6211 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
6212 * intercept). */
6213 IEMOP_BITCH_ABOUT_STUB();
6214 return IEMOP_RAISE_INVALID_OPCODE();
6215}
6216
6217//IEMOP_HLP_MIN_386();
6218
6219
6220/** Opcode 0x0f 0xab. */
6221FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6222{
6223 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6224 IEMOP_HLP_MIN_386();
6225 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6226}
6227
6228
6229/** Opcode 0x0f 0xac. */
6230FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6231{
6232 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6233 IEMOP_HLP_MIN_386();
6234 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6235}
6236
6237
6238/** Opcode 0x0f 0xad. */
6239FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6240{
6241 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6242 IEMOP_HLP_MIN_386();
6243 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6244}
6245
6246
6247/** Opcode 0x0f 0xae mem/0. */
6248FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6249{
6250 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6251 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6252 return IEMOP_RAISE_INVALID_OPCODE();
6253
6254 IEM_MC_BEGIN(3, 1);
6255 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6256 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6257 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6260 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6261 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6262 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6263 IEM_MC_END();
6264 return VINF_SUCCESS;
6265}
6266
6267
6268/** Opcode 0x0f 0xae mem/1. */
6269FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6270{
6271 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6272 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6273 return IEMOP_RAISE_INVALID_OPCODE();
6274
6275 IEM_MC_BEGIN(3, 1);
6276 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6277 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6278 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6281 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6282 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6283 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6284 IEM_MC_END();
6285 return VINF_SUCCESS;
6286}
6287
6288
6289/**
6290 * @opmaps grp15
6291 * @opcode !11/2
6292 * @oppfx none
6293 * @opcpuid sse
6294 * @opgroup og_sse_mxcsrsm
6295 * @opxcpttype 5
6296 * @optest op1=0 -> mxcsr=0
6297 * @optest op1=0x2083 -> mxcsr=0x2083
6298 * @optest op1=0xfffffffe -> value.xcpt=0xd
6299 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6300 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6301 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6302 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6303 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6304 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6305 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6306 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6307 */
6308FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6309{
6310 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6311 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6312 return IEMOP_RAISE_INVALID_OPCODE();
6313
6314 IEM_MC_BEGIN(2, 0);
6315 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6316 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6319 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6320 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6321 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6322 IEM_MC_END();
6323 return VINF_SUCCESS;
6324}
6325
6326
6327/**
6328 * @opmaps grp15
6329 * @opcode !11/3
6330 * @oppfx none
6331 * @opcpuid sse
6332 * @opgroup og_sse_mxcsrsm
6333 * @opxcpttype 5
6334 * @optest mxcsr=0 -> op1=0
6335 * @optest mxcsr=0x2083 -> op1=0x2083
6336 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6337 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6338 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6339 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6340 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6341 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6342 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6343 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6344 */
6345FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6346{
6347 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, MdWO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6348 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6349 return IEMOP_RAISE_INVALID_OPCODE();
6350
6351 IEM_MC_BEGIN(2, 0);
6352 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6353 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6356 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6357 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6358 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6359 IEM_MC_END();
6360 return VINF_SUCCESS;
6361}
6362
6363
6364/**
6365 * @opmaps grp15
6366 * @opcode !11/4
6367 * @oppfx none
6368 * @opcpuid xsave
6369 * @opgroup og_system
6370 * @opxcpttype none
6371 */
6372FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6373{
6374 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, MRW, DISOPTYPE_HARMLESS, 0);
6375 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6376 return IEMOP_RAISE_INVALID_OPCODE();
6377
6378 IEM_MC_BEGIN(3, 0);
6379 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6380 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6381 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6384 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6385 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6386 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6387 IEM_MC_END();
6388 return VINF_SUCCESS;
6389}
6390
6391
6392/**
6393 * @opmaps grp15
6394 * @opcode !11/5
6395 * @oppfx none
6396 * @opcpuid xsave
6397 * @opgroup og_system
6398 * @opxcpttype none
6399 */
6400FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6401{
6402 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, MRO, DISOPTYPE_HARMLESS, 0);
6403 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6404 return IEMOP_RAISE_INVALID_OPCODE();
6405
6406 IEM_MC_BEGIN(3, 0);
6407 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6408 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6409 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6412 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6413 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6414 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6415 IEM_MC_END();
6416 return VINF_SUCCESS;
6417}
6418
6419/** Opcode 0x0f 0xae mem/6. */
6420FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6421
6422/**
6423 * @opmaps grp15
6424 * @opcode !11/7
6425 * @oppfx none
6426 * @opcpuid clfsh
6427 * @opgroup og_cachectl
6428 * @optest op1=1 ->
6429 */
6430FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6431{
6432 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6433 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6434 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6435
6436 IEM_MC_BEGIN(2, 0);
6437 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6438 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6441 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6442 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6443 IEM_MC_END();
6444 return VINF_SUCCESS;
6445}
6446
6447/**
6448 * @opmaps grp15
6449 * @opcode !11/7
6450 * @oppfx 0x66
6451 * @opcpuid clflushopt
6452 * @opgroup og_cachectl
6453 * @optest op1=1 ->
6454 */
6455FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6456{
6457 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6458 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6459 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6460
6461 IEM_MC_BEGIN(2, 0);
6462 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6463 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6466 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6467 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6468 IEM_MC_END();
6469 return VINF_SUCCESS;
6470}
6471
6472
6473/** Opcode 0x0f 0xae 11b/5. */
6474FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6475{
6476 RT_NOREF_PV(bRm);
6477 IEMOP_MNEMONIC(lfence, "lfence");
6478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6479 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6480 return IEMOP_RAISE_INVALID_OPCODE();
6481
6482 IEM_MC_BEGIN(0, 0);
6483 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6484 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6485 else
6486 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6487 IEM_MC_ADVANCE_RIP();
6488 IEM_MC_END();
6489 return VINF_SUCCESS;
6490}
6491
6492
6493/** Opcode 0x0f 0xae 11b/6. */
6494FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6495{
6496 RT_NOREF_PV(bRm);
6497 IEMOP_MNEMONIC(mfence, "mfence");
6498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6499 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6500 return IEMOP_RAISE_INVALID_OPCODE();
6501
6502 IEM_MC_BEGIN(0, 0);
6503 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6504 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6505 else
6506 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6507 IEM_MC_ADVANCE_RIP();
6508 IEM_MC_END();
6509 return VINF_SUCCESS;
6510}
6511
6512
6513/** Opcode 0x0f 0xae 11b/7. */
6514FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6515{
6516 RT_NOREF_PV(bRm);
6517 IEMOP_MNEMONIC(sfence, "sfence");
6518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6519 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6520 return IEMOP_RAISE_INVALID_OPCODE();
6521
6522 IEM_MC_BEGIN(0, 0);
6523 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6524 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6525 else
6526 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6527 IEM_MC_ADVANCE_RIP();
6528 IEM_MC_END();
6529 return VINF_SUCCESS;
6530}
6531
6532
6533/** Opcode 0xf3 0x0f 0xae 11b/0. */
6534FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6535
6536/** Opcode 0xf3 0x0f 0xae 11b/1. */
6537FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6538
6539/** Opcode 0xf3 0x0f 0xae 11b/2. */
6540FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6541
6542/** Opcode 0xf3 0x0f 0xae 11b/3. */
6543FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6544
6545
6546/**
6547 * Group 15 jump table for register variant.
6548 */
6549IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6550{ /* pfx: none, 066h, 0f3h, 0f2h */
6551 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6552 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6553 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6554 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6555 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6556 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6557 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6558 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6559};
6560AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6561
6562
6563/**
6564 * Group 15 jump table for memory variant.
6565 */
6566IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6567{ /* pfx: none, 066h, 0f3h, 0f2h */
6568 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6569 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6570 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6571 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6572 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6573 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6574 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6575 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6576};
6577AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6578
6579
6580/** Opcode 0x0f 0xae. */
6581FNIEMOP_DEF(iemOp_Grp15)
6582{
6583 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6585 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6586 /* register, register */
6587 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6588 + pVCpu->iem.s.idxPrefix], bRm);
6589 /* memory, register */
6590 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6591 + pVCpu->iem.s.idxPrefix], bRm);
6592}
6593
6594
6595/** Opcode 0x0f 0xaf. */
6596FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6597{
6598 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6599 IEMOP_HLP_MIN_386();
6600 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6601 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6602}
6603
6604
6605/** Opcode 0x0f 0xb0. */
6606FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6607{
6608 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6609 IEMOP_HLP_MIN_486();
6610 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6611
6612 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6613 {
6614 IEMOP_HLP_DONE_DECODING();
6615 IEM_MC_BEGIN(4, 0);
6616 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6617 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6618 IEM_MC_ARG(uint8_t, u8Src, 2);
6619 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6620
6621 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6622 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6623 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6624 IEM_MC_REF_EFLAGS(pEFlags);
6625 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6626 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6627 else
6628 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6629
6630 IEM_MC_ADVANCE_RIP();
6631 IEM_MC_END();
6632 }
6633 else
6634 {
6635 IEM_MC_BEGIN(4, 3);
6636 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6637 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6638 IEM_MC_ARG(uint8_t, u8Src, 2);
6639 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6641 IEM_MC_LOCAL(uint8_t, u8Al);
6642
6643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6644 IEMOP_HLP_DONE_DECODING();
6645 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6646 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6647 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6648 IEM_MC_FETCH_EFLAGS(EFlags);
6649 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6650 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6651 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6652 else
6653 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6654
6655 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6656 IEM_MC_COMMIT_EFLAGS(EFlags);
6657 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6658 IEM_MC_ADVANCE_RIP();
6659 IEM_MC_END();
6660 }
6661 return VINF_SUCCESS;
6662}
6663
6664/** Opcode 0x0f 0xb1. */
6665FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6666{
6667 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6668 IEMOP_HLP_MIN_486();
6669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6670
6671 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6672 {
6673 IEMOP_HLP_DONE_DECODING();
6674 switch (pVCpu->iem.s.enmEffOpSize)
6675 {
6676 case IEMMODE_16BIT:
6677 IEM_MC_BEGIN(4, 0);
6678 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6679 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6680 IEM_MC_ARG(uint16_t, u16Src, 2);
6681 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6682
6683 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6684 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6685 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6686 IEM_MC_REF_EFLAGS(pEFlags);
6687 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6688 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6689 else
6690 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6691
6692 IEM_MC_ADVANCE_RIP();
6693 IEM_MC_END();
6694 return VINF_SUCCESS;
6695
6696 case IEMMODE_32BIT:
6697 IEM_MC_BEGIN(4, 0);
6698 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6699 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6700 IEM_MC_ARG(uint32_t, u32Src, 2);
6701 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6702
6703 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6704 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6705 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6706 IEM_MC_REF_EFLAGS(pEFlags);
6707 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6708 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6709 else
6710 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6711
6712 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6713 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6714 IEM_MC_ADVANCE_RIP();
6715 IEM_MC_END();
6716 return VINF_SUCCESS;
6717
6718 case IEMMODE_64BIT:
6719 IEM_MC_BEGIN(4, 0);
6720 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6721 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6722#ifdef RT_ARCH_X86
6723 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6724#else
6725 IEM_MC_ARG(uint64_t, u64Src, 2);
6726#endif
6727 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6728
6729 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6730 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6731 IEM_MC_REF_EFLAGS(pEFlags);
6732#ifdef RT_ARCH_X86
6733 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6734 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6735 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6736 else
6737 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6738#else
6739 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6740 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6741 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6742 else
6743 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6744#endif
6745
6746 IEM_MC_ADVANCE_RIP();
6747 IEM_MC_END();
6748 return VINF_SUCCESS;
6749
6750 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6751 }
6752 }
6753 else
6754 {
6755 switch (pVCpu->iem.s.enmEffOpSize)
6756 {
6757 case IEMMODE_16BIT:
6758 IEM_MC_BEGIN(4, 3);
6759 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6760 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6761 IEM_MC_ARG(uint16_t, u16Src, 2);
6762 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6764 IEM_MC_LOCAL(uint16_t, u16Ax);
6765
6766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6767 IEMOP_HLP_DONE_DECODING();
6768 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6769 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6770 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6771 IEM_MC_FETCH_EFLAGS(EFlags);
6772 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6773 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6774 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6775 else
6776 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6777
6778 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6779 IEM_MC_COMMIT_EFLAGS(EFlags);
6780 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6781 IEM_MC_ADVANCE_RIP();
6782 IEM_MC_END();
6783 return VINF_SUCCESS;
6784
6785 case IEMMODE_32BIT:
6786 IEM_MC_BEGIN(4, 3);
6787 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6788 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6789 IEM_MC_ARG(uint32_t, u32Src, 2);
6790 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6792 IEM_MC_LOCAL(uint32_t, u32Eax);
6793
6794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6795 IEMOP_HLP_DONE_DECODING();
6796 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6797 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6798 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6799 IEM_MC_FETCH_EFLAGS(EFlags);
6800 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6801 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6802 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6803 else
6804 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6805
6806 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6807 IEM_MC_COMMIT_EFLAGS(EFlags);
6808 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6809 IEM_MC_ADVANCE_RIP();
6810 IEM_MC_END();
6811 return VINF_SUCCESS;
6812
6813 case IEMMODE_64BIT:
6814 IEM_MC_BEGIN(4, 3);
6815 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6816 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6817#ifdef RT_ARCH_X86
6818 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6819#else
6820 IEM_MC_ARG(uint64_t, u64Src, 2);
6821#endif
6822 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6824 IEM_MC_LOCAL(uint64_t, u64Rax);
6825
6826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6827 IEMOP_HLP_DONE_DECODING();
6828 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6829 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6830 IEM_MC_FETCH_EFLAGS(EFlags);
6831 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6832#ifdef RT_ARCH_X86
6833 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6834 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6835 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6836 else
6837 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6838#else
6839 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6840 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6841 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6842 else
6843 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6844#endif
6845
6846 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6847 IEM_MC_COMMIT_EFLAGS(EFlags);
6848 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6849 IEM_MC_ADVANCE_RIP();
6850 IEM_MC_END();
6851 return VINF_SUCCESS;
6852
6853 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6854 }
6855 }
6856}
6857
6858
6859FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6860{
6861 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6862 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6863
6864 switch (pVCpu->iem.s.enmEffOpSize)
6865 {
6866 case IEMMODE_16BIT:
6867 IEM_MC_BEGIN(5, 1);
6868 IEM_MC_ARG(uint16_t, uSel, 0);
6869 IEM_MC_ARG(uint16_t, offSeg, 1);
6870 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6871 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6872 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6873 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6876 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6877 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6878 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6879 IEM_MC_END();
6880 return VINF_SUCCESS;
6881
6882 case IEMMODE_32BIT:
6883 IEM_MC_BEGIN(5, 1);
6884 IEM_MC_ARG(uint16_t, uSel, 0);
6885 IEM_MC_ARG(uint32_t, offSeg, 1);
6886 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6887 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6888 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6889 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6890 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6892 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6893 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6894 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6895 IEM_MC_END();
6896 return VINF_SUCCESS;
6897
6898 case IEMMODE_64BIT:
6899 IEM_MC_BEGIN(5, 1);
6900 IEM_MC_ARG(uint16_t, uSel, 0);
6901 IEM_MC_ARG(uint64_t, offSeg, 1);
6902 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6903 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6904 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6905 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6908 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6909 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6910 else
6911 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6912 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6913 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6914 IEM_MC_END();
6915 return VINF_SUCCESS;
6916
6917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6918 }
6919}
6920
6921
6922/** Opcode 0x0f 0xb2. */
6923FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6924{
6925 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6926 IEMOP_HLP_MIN_386();
6927 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6928 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6929 return IEMOP_RAISE_INVALID_OPCODE();
6930 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6931}
6932
6933
6934/** Opcode 0x0f 0xb3. */
6935FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6936{
6937 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6938 IEMOP_HLP_MIN_386();
6939 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6940}
6941
6942
6943/** Opcode 0x0f 0xb4. */
6944FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6945{
6946 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6947 IEMOP_HLP_MIN_386();
6948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6949 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6950 return IEMOP_RAISE_INVALID_OPCODE();
6951 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6952}
6953
6954
6955/** Opcode 0x0f 0xb5. */
6956FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6957{
6958 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6959 IEMOP_HLP_MIN_386();
6960 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6961 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6962 return IEMOP_RAISE_INVALID_OPCODE();
6963 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6964}
6965
6966
6967/** Opcode 0x0f 0xb6. */
6968FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6969{
6970 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6971 IEMOP_HLP_MIN_386();
6972
6973 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6974
6975 /*
6976 * If rm is denoting a register, no more instruction bytes.
6977 */
6978 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6979 {
6980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6981 switch (pVCpu->iem.s.enmEffOpSize)
6982 {
6983 case IEMMODE_16BIT:
6984 IEM_MC_BEGIN(0, 1);
6985 IEM_MC_LOCAL(uint16_t, u16Value);
6986 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6987 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6988 IEM_MC_ADVANCE_RIP();
6989 IEM_MC_END();
6990 return VINF_SUCCESS;
6991
6992 case IEMMODE_32BIT:
6993 IEM_MC_BEGIN(0, 1);
6994 IEM_MC_LOCAL(uint32_t, u32Value);
6995 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6996 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6997 IEM_MC_ADVANCE_RIP();
6998 IEM_MC_END();
6999 return VINF_SUCCESS;
7000
7001 case IEMMODE_64BIT:
7002 IEM_MC_BEGIN(0, 1);
7003 IEM_MC_LOCAL(uint64_t, u64Value);
7004 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7005 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7006 IEM_MC_ADVANCE_RIP();
7007 IEM_MC_END();
7008 return VINF_SUCCESS;
7009
7010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7011 }
7012 }
7013 else
7014 {
7015 /*
7016 * We're loading a register from memory.
7017 */
7018 switch (pVCpu->iem.s.enmEffOpSize)
7019 {
7020 case IEMMODE_16BIT:
7021 IEM_MC_BEGIN(0, 2);
7022 IEM_MC_LOCAL(uint16_t, u16Value);
7023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7026 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7027 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7028 IEM_MC_ADVANCE_RIP();
7029 IEM_MC_END();
7030 return VINF_SUCCESS;
7031
7032 case IEMMODE_32BIT:
7033 IEM_MC_BEGIN(0, 2);
7034 IEM_MC_LOCAL(uint32_t, u32Value);
7035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7038 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7039 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7040 IEM_MC_ADVANCE_RIP();
7041 IEM_MC_END();
7042 return VINF_SUCCESS;
7043
7044 case IEMMODE_64BIT:
7045 IEM_MC_BEGIN(0, 2);
7046 IEM_MC_LOCAL(uint64_t, u64Value);
7047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7050 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7051 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7052 IEM_MC_ADVANCE_RIP();
7053 IEM_MC_END();
7054 return VINF_SUCCESS;
7055
7056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7057 }
7058 }
7059}
7060
7061
7062/** Opcode 0x0f 0xb7. */
7063FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7064{
7065 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7066 IEMOP_HLP_MIN_386();
7067
7068 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7069
7070 /** @todo Not entirely sure how the operand size prefix is handled here,
7071 * assuming that it will be ignored. Would be nice to have a few
7072 * test for this. */
7073 /*
7074 * If rm is denoting a register, no more instruction bytes.
7075 */
7076 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7077 {
7078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7079 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7080 {
7081 IEM_MC_BEGIN(0, 1);
7082 IEM_MC_LOCAL(uint32_t, u32Value);
7083 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7084 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7085 IEM_MC_ADVANCE_RIP();
7086 IEM_MC_END();
7087 }
7088 else
7089 {
7090 IEM_MC_BEGIN(0, 1);
7091 IEM_MC_LOCAL(uint64_t, u64Value);
7092 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7093 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7094 IEM_MC_ADVANCE_RIP();
7095 IEM_MC_END();
7096 }
7097 }
7098 else
7099 {
7100 /*
7101 * We're loading a register from memory.
7102 */
7103 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7104 {
7105 IEM_MC_BEGIN(0, 2);
7106 IEM_MC_LOCAL(uint32_t, u32Value);
7107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7110 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7111 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7112 IEM_MC_ADVANCE_RIP();
7113 IEM_MC_END();
7114 }
7115 else
7116 {
7117 IEM_MC_BEGIN(0, 2);
7118 IEM_MC_LOCAL(uint64_t, u64Value);
7119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7122 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7123 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7124 IEM_MC_ADVANCE_RIP();
7125 IEM_MC_END();
7126 }
7127 }
7128 return VINF_SUCCESS;
7129}
7130
7131
7132/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7133FNIEMOP_UD_STUB(iemOp_jmpe);
7134/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7135FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7136
7137
7138/**
7139 * @opcode 0xb9
7140 * @opinvalid intel-modrm
7141 * @optest ->
7142 */
7143FNIEMOP_DEF(iemOp_Grp10)
7144{
7145 /*
7146 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7147 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7148 */
7149 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7150 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZE); /* just picked Gb,Eb here. */
7151 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7152}
7153
7154
7155/** Opcode 0x0f 0xba. */
7156FNIEMOP_DEF(iemOp_Grp8)
7157{
7158 IEMOP_HLP_MIN_386();
7159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7160 PCIEMOPBINSIZES pImpl;
7161 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7162 {
7163 case 0: case 1: case 2: case 3:
7164 /* Both AMD and Intel want full modr/m decoding and imm8. */
7165 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7166 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7167 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7168 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7169 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7171 }
7172 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7173
7174 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7175 {
7176 /* register destination. */
7177 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7179
7180 switch (pVCpu->iem.s.enmEffOpSize)
7181 {
7182 case IEMMODE_16BIT:
7183 IEM_MC_BEGIN(3, 0);
7184 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7185 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7186 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7187
7188 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7189 IEM_MC_REF_EFLAGS(pEFlags);
7190 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7191
7192 IEM_MC_ADVANCE_RIP();
7193 IEM_MC_END();
7194 return VINF_SUCCESS;
7195
7196 case IEMMODE_32BIT:
7197 IEM_MC_BEGIN(3, 0);
7198 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7199 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7200 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7201
7202 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7203 IEM_MC_REF_EFLAGS(pEFlags);
7204 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7205
7206 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7207 IEM_MC_ADVANCE_RIP();
7208 IEM_MC_END();
7209 return VINF_SUCCESS;
7210
7211 case IEMMODE_64BIT:
7212 IEM_MC_BEGIN(3, 0);
7213 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7214 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7215 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7216
7217 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7218 IEM_MC_REF_EFLAGS(pEFlags);
7219 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7220
7221 IEM_MC_ADVANCE_RIP();
7222 IEM_MC_END();
7223 return VINF_SUCCESS;
7224
7225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7226 }
7227 }
7228 else
7229 {
7230 /* memory destination. */
7231
7232 uint32_t fAccess;
7233 if (pImpl->pfnLockedU16)
7234 fAccess = IEM_ACCESS_DATA_RW;
7235 else /* BT */
7236 fAccess = IEM_ACCESS_DATA_R;
7237
7238 /** @todo test negative bit offsets! */
7239 switch (pVCpu->iem.s.enmEffOpSize)
7240 {
7241 case IEMMODE_16BIT:
7242 IEM_MC_BEGIN(3, 1);
7243 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7244 IEM_MC_ARG(uint16_t, u16Src, 1);
7245 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7247
7248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7249 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7250 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7251 if (pImpl->pfnLockedU16)
7252 IEMOP_HLP_DONE_DECODING();
7253 else
7254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7255 IEM_MC_FETCH_EFLAGS(EFlags);
7256 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7257 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7258 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7259 else
7260 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7261 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7262
7263 IEM_MC_COMMIT_EFLAGS(EFlags);
7264 IEM_MC_ADVANCE_RIP();
7265 IEM_MC_END();
7266 return VINF_SUCCESS;
7267
7268 case IEMMODE_32BIT:
7269 IEM_MC_BEGIN(3, 1);
7270 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7271 IEM_MC_ARG(uint32_t, u32Src, 1);
7272 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7274
7275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7276 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7277 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7278 if (pImpl->pfnLockedU16)
7279 IEMOP_HLP_DONE_DECODING();
7280 else
7281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7282 IEM_MC_FETCH_EFLAGS(EFlags);
7283 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7284 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7285 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7286 else
7287 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7288 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7289
7290 IEM_MC_COMMIT_EFLAGS(EFlags);
7291 IEM_MC_ADVANCE_RIP();
7292 IEM_MC_END();
7293 return VINF_SUCCESS;
7294
7295 case IEMMODE_64BIT:
7296 IEM_MC_BEGIN(3, 1);
7297 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7298 IEM_MC_ARG(uint64_t, u64Src, 1);
7299 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7301
7302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7303 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7304 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7305 if (pImpl->pfnLockedU16)
7306 IEMOP_HLP_DONE_DECODING();
7307 else
7308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7309 IEM_MC_FETCH_EFLAGS(EFlags);
7310 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7311 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7312 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7313 else
7314 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7315 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7316
7317 IEM_MC_COMMIT_EFLAGS(EFlags);
7318 IEM_MC_ADVANCE_RIP();
7319 IEM_MC_END();
7320 return VINF_SUCCESS;
7321
7322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7323 }
7324 }
7325}
7326
7327
7328/** Opcode 0x0f 0xbb. */
7329FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7330{
7331 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7332 IEMOP_HLP_MIN_386();
7333 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7334}
7335
7336
7337/** Opcode 0x0f 0xbc. */
7338FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7339{
7340 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7341 IEMOP_HLP_MIN_386();
7342 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7343 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7344}
7345
7346
7347/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7348FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7349
7350
7351/** Opcode 0x0f 0xbd. */
7352FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7353{
7354 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7355 IEMOP_HLP_MIN_386();
7356 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7357 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7358}
7359
7360
7361/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7362FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7363
7364
7365/** Opcode 0x0f 0xbe. */
7366FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7367{
7368 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7369 IEMOP_HLP_MIN_386();
7370
7371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7372
7373 /*
7374 * If rm is denoting a register, no more instruction bytes.
7375 */
7376 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7377 {
7378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7379 switch (pVCpu->iem.s.enmEffOpSize)
7380 {
7381 case IEMMODE_16BIT:
7382 IEM_MC_BEGIN(0, 1);
7383 IEM_MC_LOCAL(uint16_t, u16Value);
7384 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7385 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7386 IEM_MC_ADVANCE_RIP();
7387 IEM_MC_END();
7388 return VINF_SUCCESS;
7389
7390 case IEMMODE_32BIT:
7391 IEM_MC_BEGIN(0, 1);
7392 IEM_MC_LOCAL(uint32_t, u32Value);
7393 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7394 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7395 IEM_MC_ADVANCE_RIP();
7396 IEM_MC_END();
7397 return VINF_SUCCESS;
7398
7399 case IEMMODE_64BIT:
7400 IEM_MC_BEGIN(0, 1);
7401 IEM_MC_LOCAL(uint64_t, u64Value);
7402 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7403 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7404 IEM_MC_ADVANCE_RIP();
7405 IEM_MC_END();
7406 return VINF_SUCCESS;
7407
7408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7409 }
7410 }
7411 else
7412 {
7413 /*
7414 * We're loading a register from memory.
7415 */
7416 switch (pVCpu->iem.s.enmEffOpSize)
7417 {
7418 case IEMMODE_16BIT:
7419 IEM_MC_BEGIN(0, 2);
7420 IEM_MC_LOCAL(uint16_t, u16Value);
7421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7424 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7425 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7426 IEM_MC_ADVANCE_RIP();
7427 IEM_MC_END();
7428 return VINF_SUCCESS;
7429
7430 case IEMMODE_32BIT:
7431 IEM_MC_BEGIN(0, 2);
7432 IEM_MC_LOCAL(uint32_t, u32Value);
7433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7436 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7437 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7438 IEM_MC_ADVANCE_RIP();
7439 IEM_MC_END();
7440 return VINF_SUCCESS;
7441
7442 case IEMMODE_64BIT:
7443 IEM_MC_BEGIN(0, 2);
7444 IEM_MC_LOCAL(uint64_t, u64Value);
7445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7448 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7449 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7450 IEM_MC_ADVANCE_RIP();
7451 IEM_MC_END();
7452 return VINF_SUCCESS;
7453
7454 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7455 }
7456 }
7457}
7458
7459
7460/** Opcode 0x0f 0xbf. */
7461FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7462{
7463 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7464 IEMOP_HLP_MIN_386();
7465
7466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7467
7468 /** @todo Not entirely sure how the operand size prefix is handled here,
7469 * assuming that it will be ignored. Would be nice to have a few
7470 * test for this. */
7471 /*
7472 * If rm is denoting a register, no more instruction bytes.
7473 */
7474 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7475 {
7476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7477 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7478 {
7479 IEM_MC_BEGIN(0, 1);
7480 IEM_MC_LOCAL(uint32_t, u32Value);
7481 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7482 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7483 IEM_MC_ADVANCE_RIP();
7484 IEM_MC_END();
7485 }
7486 else
7487 {
7488 IEM_MC_BEGIN(0, 1);
7489 IEM_MC_LOCAL(uint64_t, u64Value);
7490 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7491 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7492 IEM_MC_ADVANCE_RIP();
7493 IEM_MC_END();
7494 }
7495 }
7496 else
7497 {
7498 /*
7499 * We're loading a register from memory.
7500 */
7501 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7502 {
7503 IEM_MC_BEGIN(0, 2);
7504 IEM_MC_LOCAL(uint32_t, u32Value);
7505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7508 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7509 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7510 IEM_MC_ADVANCE_RIP();
7511 IEM_MC_END();
7512 }
7513 else
7514 {
7515 IEM_MC_BEGIN(0, 2);
7516 IEM_MC_LOCAL(uint64_t, u64Value);
7517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7520 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7521 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7522 IEM_MC_ADVANCE_RIP();
7523 IEM_MC_END();
7524 }
7525 }
7526 return VINF_SUCCESS;
7527}
7528
7529
7530/** Opcode 0x0f 0xc0. */
7531FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7532{
7533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7534 IEMOP_HLP_MIN_486();
7535 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7536
7537 /*
7538 * If rm is denoting a register, no more instruction bytes.
7539 */
7540 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7541 {
7542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7543
7544 IEM_MC_BEGIN(3, 0);
7545 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7546 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7547 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7548
7549 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7550 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7551 IEM_MC_REF_EFLAGS(pEFlags);
7552 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7553
7554 IEM_MC_ADVANCE_RIP();
7555 IEM_MC_END();
7556 }
7557 else
7558 {
7559 /*
7560 * We're accessing memory.
7561 */
7562 IEM_MC_BEGIN(3, 3);
7563 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7564 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7565 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7566 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7568
7569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7570 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7571 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7572 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7573 IEM_MC_FETCH_EFLAGS(EFlags);
7574 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7575 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7576 else
7577 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7578
7579 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7580 IEM_MC_COMMIT_EFLAGS(EFlags);
7581 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7582 IEM_MC_ADVANCE_RIP();
7583 IEM_MC_END();
7584 return VINF_SUCCESS;
7585 }
7586 return VINF_SUCCESS;
7587}
7588
7589
7590/** Opcode 0x0f 0xc1. */
7591FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7592{
7593 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7594 IEMOP_HLP_MIN_486();
7595 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7596
7597 /*
7598 * If rm is denoting a register, no more instruction bytes.
7599 */
7600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7601 {
7602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7603
7604 switch (pVCpu->iem.s.enmEffOpSize)
7605 {
7606 case IEMMODE_16BIT:
7607 IEM_MC_BEGIN(3, 0);
7608 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7609 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7610 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7611
7612 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7613 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7614 IEM_MC_REF_EFLAGS(pEFlags);
7615 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7616
7617 IEM_MC_ADVANCE_RIP();
7618 IEM_MC_END();
7619 return VINF_SUCCESS;
7620
7621 case IEMMODE_32BIT:
7622 IEM_MC_BEGIN(3, 0);
7623 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7624 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7625 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7626
7627 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7628 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7629 IEM_MC_REF_EFLAGS(pEFlags);
7630 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7631
7632 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7633 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7634 IEM_MC_ADVANCE_RIP();
7635 IEM_MC_END();
7636 return VINF_SUCCESS;
7637
7638 case IEMMODE_64BIT:
7639 IEM_MC_BEGIN(3, 0);
7640 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7641 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7642 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7643
7644 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7645 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7646 IEM_MC_REF_EFLAGS(pEFlags);
7647 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7648
7649 IEM_MC_ADVANCE_RIP();
7650 IEM_MC_END();
7651 return VINF_SUCCESS;
7652
7653 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7654 }
7655 }
7656 else
7657 {
7658 /*
7659 * We're accessing memory.
7660 */
7661 switch (pVCpu->iem.s.enmEffOpSize)
7662 {
7663 case IEMMODE_16BIT:
7664 IEM_MC_BEGIN(3, 3);
7665 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7666 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7667 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7668 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7670
7671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7672 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7673 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7674 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7675 IEM_MC_FETCH_EFLAGS(EFlags);
7676 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7677 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7678 else
7679 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7680
7681 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7682 IEM_MC_COMMIT_EFLAGS(EFlags);
7683 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7684 IEM_MC_ADVANCE_RIP();
7685 IEM_MC_END();
7686 return VINF_SUCCESS;
7687
7688 case IEMMODE_32BIT:
7689 IEM_MC_BEGIN(3, 3);
7690 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7691 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7692 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7693 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7695
7696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7697 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7698 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7699 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7700 IEM_MC_FETCH_EFLAGS(EFlags);
7701 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7702 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7703 else
7704 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7705
7706 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7707 IEM_MC_COMMIT_EFLAGS(EFlags);
7708 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7709 IEM_MC_ADVANCE_RIP();
7710 IEM_MC_END();
7711 return VINF_SUCCESS;
7712
7713 case IEMMODE_64BIT:
7714 IEM_MC_BEGIN(3, 3);
7715 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7716 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7717 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7718 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7720
7721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7722 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7723 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7724 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7725 IEM_MC_FETCH_EFLAGS(EFlags);
7726 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7727 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7728 else
7729 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7730
7731 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7732 IEM_MC_COMMIT_EFLAGS(EFlags);
7733 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7734 IEM_MC_ADVANCE_RIP();
7735 IEM_MC_END();
7736 return VINF_SUCCESS;
7737
7738 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7739 }
7740 }
7741}
7742
7743
7744/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
7745FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
7746/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
7747FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
7748/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
7749FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
7750/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
7751FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
7752
7753
7754/** Opcode 0x0f 0xc3. */
7755FNIEMOP_DEF(iemOp_movnti_My_Gy)
7756{
7757 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7758
7759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7760
7761 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7762 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7763 {
7764 switch (pVCpu->iem.s.enmEffOpSize)
7765 {
7766 case IEMMODE_32BIT:
7767 IEM_MC_BEGIN(0, 2);
7768 IEM_MC_LOCAL(uint32_t, u32Value);
7769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7770
7771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7773 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7774 return IEMOP_RAISE_INVALID_OPCODE();
7775
7776 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7777 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7778 IEM_MC_ADVANCE_RIP();
7779 IEM_MC_END();
7780 break;
7781
7782 case IEMMODE_64BIT:
7783 IEM_MC_BEGIN(0, 2);
7784 IEM_MC_LOCAL(uint64_t, u64Value);
7785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7786
7787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7789 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7790 return IEMOP_RAISE_INVALID_OPCODE();
7791
7792 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7793 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7794 IEM_MC_ADVANCE_RIP();
7795 IEM_MC_END();
7796 break;
7797
7798 case IEMMODE_16BIT:
7799 /** @todo check this form. */
7800 return IEMOP_RAISE_INVALID_OPCODE();
7801 }
7802 }
7803 else
7804 return IEMOP_RAISE_INVALID_OPCODE();
7805 return VINF_SUCCESS;
7806}
7807/* Opcode 0x66 0x0f 0xc3 - invalid */
7808/* Opcode 0xf3 0x0f 0xc3 - invalid */
7809/* Opcode 0xf2 0x0f 0xc3 - invalid */
7810
7811/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
7812FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7813/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
7814FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
7815/* Opcode 0xf3 0x0f 0xc4 - invalid */
7816/* Opcode 0xf2 0x0f 0xc4 - invalid */
7817
7818/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7819FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7820/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
7821FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
7822/* Opcode 0xf3 0x0f 0xc5 - invalid */
7823/* Opcode 0xf2 0x0f 0xc5 - invalid */
7824
7825/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
7826FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
7827/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
7828FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
7829/* Opcode 0xf3 0x0f 0xc6 - invalid */
7830/* Opcode 0xf2 0x0f 0xc6 - invalid */
7831
7832
7833/** Opcode 0x0f 0xc7 !11/1. */
7834FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7835{
7836 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7837
7838 IEM_MC_BEGIN(4, 3);
7839 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7840 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7841 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7842 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7843 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7844 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7846
7847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7848 IEMOP_HLP_DONE_DECODING();
7849 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7850
7851 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7852 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7853 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7854
7855 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7856 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7857 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7858
7859 IEM_MC_FETCH_EFLAGS(EFlags);
7860 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7861 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7862 else
7863 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7864
7865 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7866 IEM_MC_COMMIT_EFLAGS(EFlags);
7867 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7868 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7869 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7870 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7871 IEM_MC_ENDIF();
7872 IEM_MC_ADVANCE_RIP();
7873
7874 IEM_MC_END();
7875 return VINF_SUCCESS;
7876}
7877
7878
7879/** Opcode REX.W 0x0f 0xc7 !11/1. */
7880FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7881{
7882 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7883 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7884 {
7885#if 0
7886 RT_NOREF(bRm);
7887 IEMOP_BITCH_ABOUT_STUB();
7888 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7889#else
7890 IEM_MC_BEGIN(4, 3);
7891 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7892 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7893 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7894 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7895 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7896 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7898
7899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7900 IEMOP_HLP_DONE_DECODING();
7901 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7902 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7903
7904 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7905 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7906 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7907
7908 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7909 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7910 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7911
7912 IEM_MC_FETCH_EFLAGS(EFlags);
7913# ifdef RT_ARCH_AMD64
7914 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7915 {
7916 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7917 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7918 else
7919 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7920 }
7921 else
7922# endif
7923 {
7924 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7925 accesses and not all all atomic, which works fine on in UNI CPU guest
7926 configuration (ignoring DMA). If guest SMP is active we have no choice
7927 but to use a rendezvous callback here. Sigh. */
7928 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7929 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7930 else
7931 {
7932 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7933 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7934 }
7935 }
7936
7937 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7938 IEM_MC_COMMIT_EFLAGS(EFlags);
7939 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7940 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7941 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7942 IEM_MC_ENDIF();
7943 IEM_MC_ADVANCE_RIP();
7944
7945 IEM_MC_END();
7946 return VINF_SUCCESS;
7947#endif
7948 }
7949 Log(("cmpxchg16b -> #UD\n"));
7950 return IEMOP_RAISE_INVALID_OPCODE();
7951}
7952
7953FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
7954{
7955 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7956 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7957 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7958}
7959
7960/** Opcode 0x0f 0xc7 11/6. */
7961FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7962
7963/** Opcode 0x0f 0xc7 !11/6. */
7964FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7965
7966/** Opcode 0x66 0x0f 0xc7 !11/6. */
7967FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7968
7969/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7970FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7971
7972/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7973FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7974
7975/** Opcode 0x0f 0xc7 11/7. */
7976FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
7977
7978
7979/**
7980 * Group 9 jump table for register variant.
7981 */
7982IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
7983{ /* pfx: none, 066h, 0f3h, 0f2h */
7984 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7985 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
7986 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7987 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7988 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7989 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7990 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7991 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7992};
7993AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
7994
7995
7996/**
7997 * Group 9 jump table for memory variant.
7998 */
7999IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8000{ /* pfx: none, 066h, 0f3h, 0f2h */
8001 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8002 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8003 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8004 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8005 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8006 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8007 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8008 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8009};
8010AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8011
8012
8013/** Opcode 0x0f 0xc7. */
8014FNIEMOP_DEF(iemOp_Grp9)
8015{
8016 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8017 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8018 /* register, register */
8019 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8020 + pVCpu->iem.s.idxPrefix], bRm);
8021 /* memory, register */
8022 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8023 + pVCpu->iem.s.idxPrefix], bRm);
8024}
8025
8026
8027/**
8028 * Common 'bswap register' helper.
8029 */
8030FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8031{
8032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8033 switch (pVCpu->iem.s.enmEffOpSize)
8034 {
8035 case IEMMODE_16BIT:
8036 IEM_MC_BEGIN(1, 0);
8037 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8038 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8039 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8040 IEM_MC_ADVANCE_RIP();
8041 IEM_MC_END();
8042 return VINF_SUCCESS;
8043
8044 case IEMMODE_32BIT:
8045 IEM_MC_BEGIN(1, 0);
8046 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8047 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8048 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8049 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8050 IEM_MC_ADVANCE_RIP();
8051 IEM_MC_END();
8052 return VINF_SUCCESS;
8053
8054 case IEMMODE_64BIT:
8055 IEM_MC_BEGIN(1, 0);
8056 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8057 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8058 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8059 IEM_MC_ADVANCE_RIP();
8060 IEM_MC_END();
8061 return VINF_SUCCESS;
8062
8063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8064 }
8065}
8066
8067
8068/** Opcode 0x0f 0xc8. */
8069FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8070{
8071 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8072 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8073 prefix. REX.B is the correct prefix it appears. For a parallel
8074 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8075 IEMOP_HLP_MIN_486();
8076 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8077}
8078
8079
8080/** Opcode 0x0f 0xc9. */
8081FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8082{
8083 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8084 IEMOP_HLP_MIN_486();
8085 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8086}
8087
8088
8089/** Opcode 0x0f 0xca. */
8090FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8091{
8092 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8093 IEMOP_HLP_MIN_486();
8094 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8095}
8096
8097
8098/** Opcode 0x0f 0xcb. */
8099FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8100{
8101 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8102 IEMOP_HLP_MIN_486();
8103 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8104}
8105
8106
8107/** Opcode 0x0f 0xcc. */
8108FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8109{
8110 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8111 IEMOP_HLP_MIN_486();
8112 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8113}
8114
8115
8116/** Opcode 0x0f 0xcd. */
8117FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8118{
8119 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8120 IEMOP_HLP_MIN_486();
8121 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8122}
8123
8124
8125/** Opcode 0x0f 0xce. */
8126FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8127{
8128 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8129 IEMOP_HLP_MIN_486();
8130 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8131}
8132
8133
8134/** Opcode 0x0f 0xcf. */
8135FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8136{
8137 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8138 IEMOP_HLP_MIN_486();
8139 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8140}
8141
8142
8143/* Opcode 0x0f 0xd0 - invalid */
8144/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8145FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8146/* Opcode 0xf3 0x0f 0xd0 - invalid */
8147/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8148FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8149
8150/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8151FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8152/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8153FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8154/* Opcode 0xf3 0x0f 0xd1 - invalid */
8155/* Opcode 0xf2 0x0f 0xd1 - invalid */
8156
8157/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8158FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8159/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8160FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8161/* Opcode 0xf3 0x0f 0xd2 - invalid */
8162/* Opcode 0xf2 0x0f 0xd2 - invalid */
8163
8164/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8165FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8166/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8167FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8168/* Opcode 0xf3 0x0f 0xd3 - invalid */
8169/* Opcode 0xf2 0x0f 0xd3 - invalid */
8170
8171/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8172FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8173/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8174FNIEMOP_STUB(iemOp_paddq_Vx_W);
8175/* Opcode 0xf3 0x0f 0xd4 - invalid */
8176/* Opcode 0xf2 0x0f 0xd4 - invalid */
8177
8178/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8179FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8180/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8181FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8182/* Opcode 0xf3 0x0f 0xd5 - invalid */
8183/* Opcode 0xf2 0x0f 0xd5 - invalid */
8184
8185/* Opcode 0x0f 0xd6 - invalid */
8186
8187/**
8188 * @opcode 0xd6
8189 * @oppfx 0x66
8190 * @opcpuid sse2
8191 * @opgroup og_sse2_pcksclr_datamove
8192 * @opxcpttype none
8193 * @optest op1=-1 op2=2 -> op1=2
8194 * @optest op1=0 op2=-42 -> op1=-42
8195 */
8196FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8197{
8198 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
8199 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8200 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8201 {
8202 /*
8203 * Register, register.
8204 */
8205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8206 IEM_MC_BEGIN(0, 2);
8207 IEM_MC_LOCAL(uint64_t, uSrc);
8208
8209 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8210 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8211
8212 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8213 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8214
8215 IEM_MC_ADVANCE_RIP();
8216 IEM_MC_END();
8217 }
8218 else
8219 {
8220 /*
8221 * Memory, register.
8222 */
8223 IEM_MC_BEGIN(0, 2);
8224 IEM_MC_LOCAL(uint64_t, uSrc);
8225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8226
8227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8229 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8230 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8231
8232 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8233 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8234
8235 IEM_MC_ADVANCE_RIP();
8236 IEM_MC_END();
8237 }
8238 return VINF_SUCCESS;
8239}
8240
8241
8242/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
8243FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
8244/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
8245FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
8246#if 0
8247FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
8248{
8249 /* Docs says register only. */
8250 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8251
8252 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8253 {
8254 case IEM_OP_PRF_SIZE_OP: /* SSE */
8255 I E M O P _ M N E M O N I C(movq_Wq_Vq, "movq Wq,Vq");
8256 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8257 IEM_MC_BEGIN(2, 0);
8258 IEM_MC_ARG(uint64_t *, pDst, 0);
8259 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8260 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8261 IEM_MC_PREPARE_SSE_USAGE();
8262 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8263 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8264 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8265 IEM_MC_ADVANCE_RIP();
8266 IEM_MC_END();
8267 return VINF_SUCCESS;
8268
8269 case 0: /* MMX */
8270 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
8271 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8272 IEM_MC_BEGIN(2, 0);
8273 IEM_MC_ARG(uint64_t *, pDst, 0);
8274 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8275 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8276 IEM_MC_PREPARE_FPU_USAGE();
8277 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8278 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8279 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8280 IEM_MC_ADVANCE_RIP();
8281 IEM_MC_END();
8282 return VINF_SUCCESS;
8283
8284 default:
8285 return IEMOP_RAISE_INVALID_OPCODE();
8286 }
8287}
8288#endif
8289
8290
8291/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8292FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8293{
8294 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8295 /** @todo testcase: Check that the instruction implicitly clears the high
8296 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8297 * and opcode modifications are made to work with the whole width (not
8298 * just 128). */
8299 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8300 /* Docs says register only. */
8301 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8303 {
8304 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8305 IEM_MC_BEGIN(2, 0);
8306 IEM_MC_ARG(uint64_t *, pDst, 0);
8307 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8308 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8309 IEM_MC_PREPARE_FPU_USAGE();
8310 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8311 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8312 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8313 IEM_MC_ADVANCE_RIP();
8314 IEM_MC_END();
8315 return VINF_SUCCESS;
8316 }
8317 return IEMOP_RAISE_INVALID_OPCODE();
8318}
8319
8320/** Opcode 0x66 0x0f 0xd7 - */
8321FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8322{
8323 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8324 /** @todo testcase: Check that the instruction implicitly clears the high
8325 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8326 * and opcode modifications are made to work with the whole width (not
8327 * just 128). */
8328 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8329 /* Docs says register only. */
8330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8331 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8332 {
8333 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8334 IEM_MC_BEGIN(2, 0);
8335 IEM_MC_ARG(uint64_t *, pDst, 0);
8336 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8337 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8338 IEM_MC_PREPARE_SSE_USAGE();
8339 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8340 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8341 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8342 IEM_MC_ADVANCE_RIP();
8343 IEM_MC_END();
8344 return VINF_SUCCESS;
8345 }
8346 return IEMOP_RAISE_INVALID_OPCODE();
8347}
8348
8349/* Opcode 0xf3 0x0f 0xd7 - invalid */
8350/* Opcode 0xf2 0x0f 0xd7 - invalid */
8351
8352
8353/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8354FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8355/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8356FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8357/* Opcode 0xf3 0x0f 0xd8 - invalid */
8358/* Opcode 0xf2 0x0f 0xd8 - invalid */
8359
8360/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8361FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8362/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8363FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8364/* Opcode 0xf3 0x0f 0xd9 - invalid */
8365/* Opcode 0xf2 0x0f 0xd9 - invalid */
8366
8367/** Opcode 0x0f 0xda - pminub Pq, Qq */
8368FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8369/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8370FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8371/* Opcode 0xf3 0x0f 0xda - invalid */
8372/* Opcode 0xf2 0x0f 0xda - invalid */
8373
8374/** Opcode 0x0f 0xdb - pand Pq, Qq */
8375FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8376/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8377FNIEMOP_STUB(iemOp_pand_Vx_W);
8378/* Opcode 0xf3 0x0f 0xdb - invalid */
8379/* Opcode 0xf2 0x0f 0xdb - invalid */
8380
8381/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8382FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8383/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8384FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8385/* Opcode 0xf3 0x0f 0xdc - invalid */
8386/* Opcode 0xf2 0x0f 0xdc - invalid */
8387
8388/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8389FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8390/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8391FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8392/* Opcode 0xf3 0x0f 0xdd - invalid */
8393/* Opcode 0xf2 0x0f 0xdd - invalid */
8394
8395/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8396FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8397/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8398FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8399/* Opcode 0xf3 0x0f 0xde - invalid */
8400/* Opcode 0xf2 0x0f 0xde - invalid */
8401
8402/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8403FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8404/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8405FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8406/* Opcode 0xf3 0x0f 0xdf - invalid */
8407/* Opcode 0xf2 0x0f 0xdf - invalid */
8408
8409/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8410FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8411/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8412FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8413/* Opcode 0xf3 0x0f 0xe0 - invalid */
8414/* Opcode 0xf2 0x0f 0xe0 - invalid */
8415
8416/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8417FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8418/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8419FNIEMOP_STUB(iemOp_psraw_Vx_W);
8420/* Opcode 0xf3 0x0f 0xe1 - invalid */
8421/* Opcode 0xf2 0x0f 0xe1 - invalid */
8422
8423/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8424FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8425/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8426FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8427/* Opcode 0xf3 0x0f 0xe2 - invalid */
8428/* Opcode 0xf2 0x0f 0xe2 - invalid */
8429
8430/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8431FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8432/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8433FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8434/* Opcode 0xf3 0x0f 0xe3 - invalid */
8435/* Opcode 0xf2 0x0f 0xe3 - invalid */
8436
8437/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8438FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8439/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8440FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8441/* Opcode 0xf3 0x0f 0xe4 - invalid */
8442/* Opcode 0xf2 0x0f 0xe4 - invalid */
8443
8444/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8445FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8446/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8447FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8448/* Opcode 0xf3 0x0f 0xe5 - invalid */
8449/* Opcode 0xf2 0x0f 0xe5 - invalid */
8450
8451/* Opcode 0x0f 0xe6 - invalid */
8452/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8453FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8454/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8455FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8456/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8457FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8458
8459
8460/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8461FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8462{
8463 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8464 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8465 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8466 {
8467 /* Register, memory. */
8468 IEM_MC_BEGIN(0, 2);
8469 IEM_MC_LOCAL(uint64_t, uSrc);
8470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8471
8472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8474 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8475 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8476
8477 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8478 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8479
8480 IEM_MC_ADVANCE_RIP();
8481 IEM_MC_END();
8482 return VINF_SUCCESS;
8483 }
8484 /* The register, register encoding is invalid. */
8485 return IEMOP_RAISE_INVALID_OPCODE();
8486}
8487
8488/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
8489FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
8490{
8491 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8492 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8493 {
8494 /* Register, memory. */
8495 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
8496 IEM_MC_BEGIN(0, 2);
8497 IEM_MC_LOCAL(RTUINT128U, uSrc);
8498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8499
8500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8502 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8503 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8504
8505 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8506 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8507
8508 IEM_MC_ADVANCE_RIP();
8509 IEM_MC_END();
8510 return VINF_SUCCESS;
8511 }
8512
8513 /* The register, register encoding is invalid. */
8514 return IEMOP_RAISE_INVALID_OPCODE();
8515}
8516
8517/* Opcode 0xf3 0x0f 0xe7 - invalid */
8518/* Opcode 0xf2 0x0f 0xe7 - invalid */
8519
8520
8521/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8522FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8523/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8524FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8525/* Opcode 0xf3 0x0f 0xe8 - invalid */
8526/* Opcode 0xf2 0x0f 0xe8 - invalid */
8527
8528/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8529FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8530/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8531FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8532/* Opcode 0xf3 0x0f 0xe9 - invalid */
8533/* Opcode 0xf2 0x0f 0xe9 - invalid */
8534
8535/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8536FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8537/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8538FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8539/* Opcode 0xf3 0x0f 0xea - invalid */
8540/* Opcode 0xf2 0x0f 0xea - invalid */
8541
8542/** Opcode 0x0f 0xeb - por Pq, Qq */
8543FNIEMOP_STUB(iemOp_por_Pq_Qq);
8544/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8545FNIEMOP_STUB(iemOp_por_Vx_W);
8546/* Opcode 0xf3 0x0f 0xeb - invalid */
8547/* Opcode 0xf2 0x0f 0xeb - invalid */
8548
8549/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8550FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8551/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8552FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8553/* Opcode 0xf3 0x0f 0xec - invalid */
8554/* Opcode 0xf2 0x0f 0xec - invalid */
8555
8556/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8557FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8558/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8559FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8560/* Opcode 0xf3 0x0f 0xed - invalid */
8561/* Opcode 0xf2 0x0f 0xed - invalid */
8562
8563/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8564FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8565/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8566FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8567/* Opcode 0xf3 0x0f 0xee - invalid */
8568/* Opcode 0xf2 0x0f 0xee - invalid */
8569
8570
8571/** Opcode 0x0f 0xef - pxor Pq, Qq */
8572FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8573{
8574 IEMOP_MNEMONIC(pxor, "pxor");
8575 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8576}
8577
8578/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8579FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8580{
8581 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8582 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8583}
8584
8585/* Opcode 0xf3 0x0f 0xef - invalid */
8586/* Opcode 0xf2 0x0f 0xef - invalid */
8587
8588/* Opcode 0x0f 0xf0 - invalid */
8589/* Opcode 0x66 0x0f 0xf0 - invalid */
8590/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8591FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8592
8593/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8594FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8595/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8596FNIEMOP_STUB(iemOp_psllw_Vx_W);
8597/* Opcode 0xf2 0x0f 0xf1 - invalid */
8598
8599/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8600FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8601/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8602FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8603/* Opcode 0xf2 0x0f 0xf2 - invalid */
8604
8605/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8606FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8607/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8608FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8609/* Opcode 0xf2 0x0f 0xf3 - invalid */
8610
8611/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8612FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8613/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8614FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8615/* Opcode 0xf2 0x0f 0xf4 - invalid */
8616
8617/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8618FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8619/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
8620FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
8621/* Opcode 0xf2 0x0f 0xf5 - invalid */
8622
8623/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8624FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8625/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
8626FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
8627/* Opcode 0xf2 0x0f 0xf6 - invalid */
8628
8629/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8630FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8631/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
8632FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
8633/* Opcode 0xf2 0x0f 0xf7 - invalid */
8634
8635/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8636FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8637/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
8638FNIEMOP_STUB(iemOp_psubb_Vx_W);
8639/* Opcode 0xf2 0x0f 0xf8 - invalid */
8640
8641/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8642FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8643/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
8644FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
8645/* Opcode 0xf2 0x0f 0xf9 - invalid */
8646
8647/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8648FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8649/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
8650FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
8651/* Opcode 0xf2 0x0f 0xfa - invalid */
8652
8653/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8654FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8655/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
8656FNIEMOP_STUB(iemOp_psubq_Vx_W);
8657/* Opcode 0xf2 0x0f 0xfb - invalid */
8658
8659/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8660FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8661/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
8662FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
8663/* Opcode 0xf2 0x0f 0xfc - invalid */
8664
8665/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8666FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8667/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
8668FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
8669/* Opcode 0xf2 0x0f 0xfd - invalid */
8670
8671/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8672FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8673/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
8674FNIEMOP_STUB(iemOp_paddd_Vx_W);
8675/* Opcode 0xf2 0x0f 0xfe - invalid */
8676
8677
8678/** Opcode **** 0x0f 0xff - UD0 */
8679FNIEMOP_DEF(iemOp_ud0)
8680{
8681 IEMOP_MNEMONIC(ud0, "ud0");
8682 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8683 {
8684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8685#ifndef TST_IEM_CHECK_MC
8686 RTGCPTR GCPtrEff;
8687 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8688 if (rcStrict != VINF_SUCCESS)
8689 return rcStrict;
8690#endif
8691 IEMOP_HLP_DONE_DECODING();
8692 }
8693 return IEMOP_RAISE_INVALID_OPCODE();
8694}
8695
8696
8697
8698/**
8699 * Two byte opcode map, first byte 0x0f.
8700 *
8701 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
8702 * check if it needs updating as well when making changes.
8703 */
8704IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8705{
8706 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8707 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8708 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8709 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8710 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8711 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8712 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8713 /* 0x06 */ IEMOP_X4(iemOp_clts),
8714 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8715 /* 0x08 */ IEMOP_X4(iemOp_invd),
8716 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8717 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8718 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8719 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8720 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8721 /* 0x0e */ IEMOP_X4(iemOp_femms),
8722 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8723
8724 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
8725 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
8726 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
8727 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8728 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8729 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8730 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
8731 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8732 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8733 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8734 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8735 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8736 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8737 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8738 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8739 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8740
8741 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8742 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8743 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8744 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8745 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8746 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8747 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8748 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8749 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8750 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8751 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
8752 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8753 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
8754 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
8755 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8756 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8757
8758 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8759 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8760 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8761 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8762 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8763 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8764 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8765 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8766 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
8767 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8768 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
8769 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8770 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8771 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8772 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8773 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8774
8775 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8776 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8777 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8778 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8779 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8780 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8781 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8782 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8783 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8784 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8785 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8786 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8787 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8788 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8789 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8790 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8791
8792 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8793 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
8794 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
8795 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
8796 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8797 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8798 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8799 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8800 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
8801 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
8802 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
8803 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8804 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
8805 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
8806 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
8807 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
8808
8809 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8810 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8811 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8812 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8813 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8814 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8815 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8816 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8817 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8818 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8819 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8820 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8821 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8822 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8823 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8824 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vx_Wx, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM,
8825
8826 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
8827 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8828 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8829 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8830 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8831 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8832 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8833 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8834
8835 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8836 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8837 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8838 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8839 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
8840 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
8841 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
8842 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
8843
8844 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8845 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8846 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8847 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8848 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8849 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8850 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8851 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8852 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8853 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8854 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8855 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8856 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8857 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8858 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8859 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8860
8861 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8862 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8863 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8864 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8865 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8866 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8867 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8868 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8869 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8870 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8871 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8872 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8873 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8874 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8875 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8876 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8877
8878 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8879 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8880 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8881 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8882 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8883 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8884 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8885 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8886 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8887 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8888 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8889 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8890 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8891 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8892 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8893 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8894
8895 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8896 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8897 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8898 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8899 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8900 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8901 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8902 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8903 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8904 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8905 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8906 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8907 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8908 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8909 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8910 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8911
8912 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8913 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8914 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
8915 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8916 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8917 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8918 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8919 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8920 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8921 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8922 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8923 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8924 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8925 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8926 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8927 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8928
8929 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
8930 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8931 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8932 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8933 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8934 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8935 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8936 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8937 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8938 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8939 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8940 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8941 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8942 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8943 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8944 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8945
8946 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8947 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8948 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8949 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8950 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8951 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8952 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
8953 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8954 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8955 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8956 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8957 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8958 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8959 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8960 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8961 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8962
8963 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
8964 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8965 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8966 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8967 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8968 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8969 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8970 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8971 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8972 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8973 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8974 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8975 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8976 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8977 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8978 /* 0xff */ IEMOP_X4(iemOp_ud0),
8979};
8980AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8981
8982/** @} */
8983
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette