VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66792

Last change on this file since 66792 was 66792, checked in by vboxsync, 8 years ago

IEM: Implemented movhpd Mq,Vq (66 0f 17).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 319.8 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66792 2017-05-04 12:32:54Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.215389.xyz. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
38 switch (pVCpu->iem.s.enmEffOpSize)
39 {
40 case IEMMODE_16BIT:
41 IEM_MC_BEGIN(0, 1);
42 IEM_MC_LOCAL(uint16_t, u16Ldtr);
43 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
44 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
45 IEM_MC_ADVANCE_RIP();
46 IEM_MC_END();
47 break;
48
49 case IEMMODE_32BIT:
50 IEM_MC_BEGIN(0, 1);
51 IEM_MC_LOCAL(uint32_t, u32Ldtr);
52 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
53 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
54 IEM_MC_ADVANCE_RIP();
55 IEM_MC_END();
56 break;
57
58 case IEMMODE_64BIT:
59 IEM_MC_BEGIN(0, 1);
60 IEM_MC_LOCAL(uint64_t, u64Ldtr);
61 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
62 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 break;
66
67 IEM_NOT_REACHED_DEFAULT_CASE_RET();
68 }
69 }
70 else
71 {
72 IEM_MC_BEGIN(0, 2);
73 IEM_MC_LOCAL(uint16_t, u16Ldtr);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
76 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
77 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
78 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
79 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/** Opcode 0x0f 0x00 /1. */
88FNIEMOPRM_DEF(iemOp_Grp6_str)
89{
90 IEMOP_MNEMONIC(str, "str Rv/Mw");
91 IEMOP_HLP_MIN_286();
92 IEMOP_HLP_NO_REAL_OR_V86_MODE();
93
94 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
95 {
96 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
97 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
98 switch (pVCpu->iem.s.enmEffOpSize)
99 {
100 case IEMMODE_16BIT:
101 IEM_MC_BEGIN(0, 1);
102 IEM_MC_LOCAL(uint16_t, u16Tr);
103 IEM_MC_FETCH_TR_U16(u16Tr);
104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
105 IEM_MC_ADVANCE_RIP();
106 IEM_MC_END();
107 break;
108
109 case IEMMODE_32BIT:
110 IEM_MC_BEGIN(0, 1);
111 IEM_MC_LOCAL(uint32_t, u32Tr);
112 IEM_MC_FETCH_TR_U32(u32Tr);
113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
114 IEM_MC_ADVANCE_RIP();
115 IEM_MC_END();
116 break;
117
118 case IEMMODE_64BIT:
119 IEM_MC_BEGIN(0, 1);
120 IEM_MC_LOCAL(uint64_t, u64Tr);
121 IEM_MC_FETCH_TR_U64(u64Tr);
122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 break;
126
127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
128 }
129 }
130 else
131 {
132 IEM_MC_BEGIN(0, 2);
133 IEM_MC_LOCAL(uint16_t, u16Tr);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
136 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
137 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
138 IEM_MC_FETCH_TR_U16(u16Tr);
139 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
140 IEM_MC_ADVANCE_RIP();
141 IEM_MC_END();
142 }
143 return VINF_SUCCESS;
144}
145
146
147/** Opcode 0x0f 0x00 /2. */
148FNIEMOPRM_DEF(iemOp_Grp6_lldt)
149{
150 IEMOP_MNEMONIC(lldt, "lldt Ew");
151 IEMOP_HLP_MIN_286();
152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
153
154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
155 {
156 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
157 IEM_MC_BEGIN(1, 0);
158 IEM_MC_ARG(uint16_t, u16Sel, 0);
159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
161 IEM_MC_END();
162 }
163 else
164 {
165 IEM_MC_BEGIN(1, 1);
166 IEM_MC_ARG(uint16_t, u16Sel, 0);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
169 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
170 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
171 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
172 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
173 IEM_MC_END();
174 }
175 return VINF_SUCCESS;
176}
177
178
179/** Opcode 0x0f 0x00 /3. */
180FNIEMOPRM_DEF(iemOp_Grp6_ltr)
181{
182 IEMOP_MNEMONIC(ltr, "ltr Ew");
183 IEMOP_HLP_MIN_286();
184 IEMOP_HLP_NO_REAL_OR_V86_MODE();
185
186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
187 {
188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
189 IEM_MC_BEGIN(1, 0);
190 IEM_MC_ARG(uint16_t, u16Sel, 0);
191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
192 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
193 IEM_MC_END();
194 }
195 else
196 {
197 IEM_MC_BEGIN(1, 1);
198 IEM_MC_ARG(uint16_t, u16Sel, 0);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
202 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
203 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
204 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
205 IEM_MC_END();
206 }
207 return VINF_SUCCESS;
208}
209
210
211/** Opcode 0x0f 0x00 /3. */
212FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
213{
214 IEMOP_HLP_MIN_286();
215 IEMOP_HLP_NO_REAL_OR_V86_MODE();
216
217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
218 {
219 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
220 IEM_MC_BEGIN(2, 0);
221 IEM_MC_ARG(uint16_t, u16Sel, 0);
222 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
223 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
224 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
225 IEM_MC_END();
226 }
227 else
228 {
229 IEM_MC_BEGIN(2, 1);
230 IEM_MC_ARG(uint16_t, u16Sel, 0);
231 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
234 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
235 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
236 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
237 IEM_MC_END();
238 }
239 return VINF_SUCCESS;
240}
241
242
243/** Opcode 0x0f 0x00 /4. */
244FNIEMOPRM_DEF(iemOp_Grp6_verr)
245{
246 IEMOP_MNEMONIC(verr, "verr Ew");
247 IEMOP_HLP_MIN_286();
248 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
249}
250
251
252/** Opcode 0x0f 0x00 /5. */
253FNIEMOPRM_DEF(iemOp_Grp6_verw)
254{
255 IEMOP_MNEMONIC(verw, "verw Ew");
256 IEMOP_HLP_MIN_286();
257 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
258}
259
260
261/**
262 * Group 6 jump table.
263 */
264IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
265{
266 iemOp_Grp6_sldt,
267 iemOp_Grp6_str,
268 iemOp_Grp6_lldt,
269 iemOp_Grp6_ltr,
270 iemOp_Grp6_verr,
271 iemOp_Grp6_verw,
272 iemOp_InvalidWithRM,
273 iemOp_InvalidWithRM
274};
275
276/** Opcode 0x0f 0x00. */
277FNIEMOP_DEF(iemOp_Grp6)
278{
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
280 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
281}
282
283
284/** Opcode 0x0f 0x01 /0. */
285FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
286{
287 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
288 IEMOP_HLP_MIN_286();
289 IEMOP_HLP_64BIT_OP_SIZE();
290 IEM_MC_BEGIN(2, 1);
291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
292 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
295 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
296 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
297 IEM_MC_END();
298 return VINF_SUCCESS;
299}
300
301
302/** Opcode 0x0f 0x01 /0. */
303FNIEMOP_DEF(iemOp_Grp7_vmcall)
304{
305 IEMOP_BITCH_ABOUT_STUB();
306 return IEMOP_RAISE_INVALID_OPCODE();
307}
308
309
310/** Opcode 0x0f 0x01 /0. */
311FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
312{
313 IEMOP_BITCH_ABOUT_STUB();
314 return IEMOP_RAISE_INVALID_OPCODE();
315}
316
317
318/** Opcode 0x0f 0x01 /0. */
319FNIEMOP_DEF(iemOp_Grp7_vmresume)
320{
321 IEMOP_BITCH_ABOUT_STUB();
322 return IEMOP_RAISE_INVALID_OPCODE();
323}
324
325
326/** Opcode 0x0f 0x01 /0. */
327FNIEMOP_DEF(iemOp_Grp7_vmxoff)
328{
329 IEMOP_BITCH_ABOUT_STUB();
330 return IEMOP_RAISE_INVALID_OPCODE();
331}
332
333
334/** Opcode 0x0f 0x01 /1. */
335FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
336{
337 IEMOP_MNEMONIC(sidt, "sidt Ms");
338 IEMOP_HLP_MIN_286();
339 IEMOP_HLP_64BIT_OP_SIZE();
340 IEM_MC_BEGIN(2, 1);
341 IEM_MC_ARG(uint8_t, iEffSeg, 0);
342 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
345 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
346 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
347 IEM_MC_END();
348 return VINF_SUCCESS;
349}
350
351
352/** Opcode 0x0f 0x01 /1. */
353FNIEMOP_DEF(iemOp_Grp7_monitor)
354{
355 IEMOP_MNEMONIC(monitor, "monitor");
356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
357 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
358}
359
360
361/** Opcode 0x0f 0x01 /1. */
362FNIEMOP_DEF(iemOp_Grp7_mwait)
363{
364 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
367}
368
369
370/** Opcode 0x0f 0x01 /2. */
371FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
372{
373 IEMOP_MNEMONIC(lgdt, "lgdt");
374 IEMOP_HLP_64BIT_OP_SIZE();
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd0. */
389FNIEMOP_DEF(iemOp_Grp7_xgetbv)
390{
391 IEMOP_MNEMONIC(xgetbv, "xgetbv");
392 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
393 {
394 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
396 }
397 return IEMOP_RAISE_INVALID_OPCODE();
398}
399
400
401/** Opcode 0x0f 0x01 0xd1. */
402FNIEMOP_DEF(iemOp_Grp7_xsetbv)
403{
404 IEMOP_MNEMONIC(xsetbv, "xsetbv");
405 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
406 {
407 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
408 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
409 }
410 return IEMOP_RAISE_INVALID_OPCODE();
411}
412
413
414/** Opcode 0x0f 0x01 /3. */
415FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
416{
417 IEMOP_MNEMONIC(lidt, "lidt");
418 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
419 ? IEMMODE_64BIT
420 : pVCpu->iem.s.enmEffOpSize;
421 IEM_MC_BEGIN(3, 1);
422 IEM_MC_ARG(uint8_t, iEffSeg, 0);
423 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
424 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
428 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
429 IEM_MC_END();
430 return VINF_SUCCESS;
431}
432
433
434#ifdef VBOX_WITH_NESTED_HWVIRT
435/** Opcode 0x0f 0x01 0xd8. */
436FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
437{
438 IEMOP_MNEMONIC(vmrun, "vmrun");
439 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
440}
441
442/** Opcode 0x0f 0x01 0xd9. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
444{
445 IEMOP_MNEMONIC(vmmcall, "vmmcall");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
447}
448
449
450/** Opcode 0x0f 0x01 0xda. */
451FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
452{
453 IEMOP_MNEMONIC(vmload, "vmload");
454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
455}
456
457
458/** Opcode 0x0f 0x01 0xdb. */
459FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
460{
461 IEMOP_MNEMONIC(vmsave, "vmsave");
462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
463}
464
465
466/** Opcode 0x0f 0x01 0xdc. */
467FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
468{
469 IEMOP_MNEMONIC(stgi, "stgi");
470 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
471}
472
473
474/** Opcode 0x0f 0x01 0xdd. */
475FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
476{
477 IEMOP_MNEMONIC(clgi, "clgi");
478 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
479}
480
481
482/** Opcode 0x0f 0x01 0xdf. */
483FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
484{
485 IEMOP_MNEMONIC(invlpga, "invlpga");
486 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
487}
488
489
490/** Opcode 0x0f 0x01 0xde. */
491FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
492{
493 IEMOP_MNEMONIC(skinit, "skinit");
494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
495}
496#else
497/** Opcode 0x0f 0x01 0xd8. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
499
500/** Opcode 0x0f 0x01 0xd9. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
502
503/** Opcode 0x0f 0x01 0xda. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
505
506/** Opcode 0x0f 0x01 0xdb. */
507FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
508
509/** Opcode 0x0f 0x01 0xdc. */
510FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
511
512/** Opcode 0x0f 0x01 0xdd. */
513FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
514
515/** Opcode 0x0f 0x01 0xdf. */
516FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520#endif /* VBOX_WITH_NESTED_HWVIRT */
521
522/** Opcode 0x0f 0x01 /4. */
523FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
524{
525 IEMOP_MNEMONIC(smsw, "smsw");
526 IEMOP_HLP_MIN_286();
527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
528 {
529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
530 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
531 switch (pVCpu->iem.s.enmEffOpSize)
532 {
533 case IEMMODE_16BIT:
534 IEM_MC_BEGIN(0, 1);
535 IEM_MC_LOCAL(uint16_t, u16Tmp);
536 IEM_MC_FETCH_CR0_U16(u16Tmp);
537 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
538 { /* likely */ }
539 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
541 else
542 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
543 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
544 IEM_MC_ADVANCE_RIP();
545 IEM_MC_END();
546 return VINF_SUCCESS;
547
548 case IEMMODE_32BIT:
549 IEM_MC_BEGIN(0, 1);
550 IEM_MC_LOCAL(uint32_t, u32Tmp);
551 IEM_MC_FETCH_CR0_U32(u32Tmp);
552 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 return VINF_SUCCESS;
556
557 case IEMMODE_64BIT:
558 IEM_MC_BEGIN(0, 1);
559 IEM_MC_LOCAL(uint64_t, u64Tmp);
560 IEM_MC_FETCH_CR0_U64(u64Tmp);
561 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
562 IEM_MC_ADVANCE_RIP();
563 IEM_MC_END();
564 return VINF_SUCCESS;
565
566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
567 }
568 }
569 else
570 {
571 /* Ignore operand size here, memory refs are always 16-bit. */
572 IEM_MC_BEGIN(0, 2);
573 IEM_MC_LOCAL(uint16_t, u16Tmp);
574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
578 IEM_MC_FETCH_CR0_U16(u16Tmp);
579 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
580 { /* likely */ }
581 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
582 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
583 else
584 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
585 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
586 IEM_MC_ADVANCE_RIP();
587 IEM_MC_END();
588 return VINF_SUCCESS;
589 }
590}
591
592
593/** Opcode 0x0f 0x01 /6. */
594FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
595{
596 /* The operand size is effectively ignored, all is 16-bit and only the
597 lower 3-bits are used. */
598 IEMOP_MNEMONIC(lmsw, "lmsw");
599 IEMOP_HLP_MIN_286();
600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
601 {
602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
603 IEM_MC_BEGIN(1, 0);
604 IEM_MC_ARG(uint16_t, u16Tmp, 0);
605 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
606 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
607 IEM_MC_END();
608 }
609 else
610 {
611 IEM_MC_BEGIN(1, 1);
612 IEM_MC_ARG(uint16_t, u16Tmp, 0);
613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
617 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
618 IEM_MC_END();
619 }
620 return VINF_SUCCESS;
621}
622
623
624/** Opcode 0x0f 0x01 /7. */
625FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
626{
627 IEMOP_MNEMONIC(invlpg, "invlpg");
628 IEMOP_HLP_MIN_486();
629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
630 IEM_MC_BEGIN(1, 1);
631 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
633 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
634 IEM_MC_END();
635 return VINF_SUCCESS;
636}
637
638
639/** Opcode 0x0f 0x01 /7. */
640FNIEMOP_DEF(iemOp_Grp7_swapgs)
641{
642 IEMOP_MNEMONIC(swapgs, "swapgs");
643 IEMOP_HLP_ONLY_64BIT();
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
646}
647
648
649/** Opcode 0x0f 0x01 /7. */
650FNIEMOP_DEF(iemOp_Grp7_rdtscp)
651{
652 IEMOP_MNEMONIC(rdtscp, "rdtscp");
653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
654 /** @todo SVM intercept removal from here. */
655 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RDTSCP, SVM_EXIT_RDTSCP, 0, 0);
656 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
657}
658
659
660/**
661 * Group 7 jump table, memory variant.
662 */
663IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
664{
665 iemOp_Grp7_sgdt,
666 iemOp_Grp7_sidt,
667 iemOp_Grp7_lgdt,
668 iemOp_Grp7_lidt,
669 iemOp_Grp7_smsw,
670 iemOp_InvalidWithRM,
671 iemOp_Grp7_lmsw,
672 iemOp_Grp7_invlpg
673};
674
675
676/** Opcode 0x0f 0x01. */
677FNIEMOP_DEF(iemOp_Grp7)
678{
679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
680 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
681 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
682
683 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
684 {
685 case 0:
686 switch (bRm & X86_MODRM_RM_MASK)
687 {
688 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
689 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
690 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
691 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 1:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
700 }
701 return IEMOP_RAISE_INVALID_OPCODE();
702
703 case 2:
704 switch (bRm & X86_MODRM_RM_MASK)
705 {
706 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
707 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
708 }
709 return IEMOP_RAISE_INVALID_OPCODE();
710
711 case 3:
712 switch (bRm & X86_MODRM_RM_MASK)
713 {
714 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
715 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
716 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
717 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
718 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
719 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
720 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
721 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
723 }
724
725 case 4:
726 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
727
728 case 5:
729 return IEMOP_RAISE_INVALID_OPCODE();
730
731 case 6:
732 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
733
734 case 7:
735 switch (bRm & X86_MODRM_RM_MASK)
736 {
737 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
738 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
739 }
740 return IEMOP_RAISE_INVALID_OPCODE();
741
742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
743 }
744}
745
746/** Opcode 0x0f 0x00 /3. */
747FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
748{
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
751
752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
753 {
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 switch (pVCpu->iem.s.enmEffOpSize)
756 {
757 case IEMMODE_16BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 case IEMMODE_32BIT:
773 case IEMMODE_64BIT:
774 {
775 IEM_MC_BEGIN(3, 0);
776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
777 IEM_MC_ARG(uint16_t, u16Sel, 1);
778 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
779
780 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
781 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
782 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
783
784 IEM_MC_END();
785 return VINF_SUCCESS;
786 }
787
788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
789 }
790 }
791 else
792 {
793 switch (pVCpu->iem.s.enmEffOpSize)
794 {
795 case IEMMODE_16BIT:
796 {
797 IEM_MC_BEGIN(3, 1);
798 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
799 IEM_MC_ARG(uint16_t, u16Sel, 1);
800 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
802
803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
804 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
805
806 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
807 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
808 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
809
810 IEM_MC_END();
811 return VINF_SUCCESS;
812 }
813
814 case IEMMODE_32BIT:
815 case IEMMODE_64BIT:
816 {
817 IEM_MC_BEGIN(3, 1);
818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 1);
820 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
822
823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
824 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
825/** @todo testcase: make sure it's a 16-bit read. */
826
827 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
828 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
829 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
830
831 IEM_MC_END();
832 return VINF_SUCCESS;
833 }
834
835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
836 }
837 }
838}
839
840
841
842/** Opcode 0x0f 0x02. */
843FNIEMOP_DEF(iemOp_lar_Gv_Ew)
844{
845 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
846 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
847}
848
849
850/** Opcode 0x0f 0x03. */
851FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
852{
853 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
854 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
855}
856
857
858/** Opcode 0x0f 0x05. */
859FNIEMOP_DEF(iemOp_syscall)
860{
861 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
863 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
864}
865
866
867/** Opcode 0x0f 0x06. */
868FNIEMOP_DEF(iemOp_clts)
869{
870 IEMOP_MNEMONIC(clts, "clts");
871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
872 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
873}
874
875
876/** Opcode 0x0f 0x07. */
877FNIEMOP_DEF(iemOp_sysret)
878{
879 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
881 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
882}
883
884
885/** Opcode 0x0f 0x08. */
886FNIEMOP_DEF(iemOp_invd)
887{
888 IEMOP_MNEMONIC(invd, "invd");
889#ifdef VBOX_WITH_NESTED_HWVIRT
890 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
891 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
892#else
893 RT_NOREF_PV(pVCpu);
894#endif
895 /** @todo implement invd for the regular case (above only handles nested SVM
896 * exits). */
897 IEMOP_BITCH_ABOUT_STUB();
898 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
899}
900
901// IEMOP_HLP_MIN_486();
902
903
904/** Opcode 0x0f 0x09. */
905FNIEMOP_DEF(iemOp_wbinvd)
906{
907 IEMOP_MNEMONIC(wbinvd, "wbinvd");
908 IEMOP_HLP_MIN_486();
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
910 IEM_MC_BEGIN(0, 0);
911 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
912 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
913 IEM_MC_ADVANCE_RIP();
914 IEM_MC_END();
915 return VINF_SUCCESS; /* ignore for now */
916}
917
918
919/** Opcode 0x0f 0x0b. */
920FNIEMOP_DEF(iemOp_ud2)
921{
922 IEMOP_MNEMONIC(ud2, "ud2");
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926/** Opcode 0x0f 0x0d. */
927FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
928{
929 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
931 {
932 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
933 return IEMOP_RAISE_INVALID_OPCODE();
934 }
935
936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
938 {
939 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
940 return IEMOP_RAISE_INVALID_OPCODE();
941 }
942
943 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
944 {
945 case 2: /* Aliased to /0 for the time being. */
946 case 4: /* Aliased to /0 for the time being. */
947 case 5: /* Aliased to /0 for the time being. */
948 case 6: /* Aliased to /0 for the time being. */
949 case 7: /* Aliased to /0 for the time being. */
950 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
951 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
952 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
954 }
955
956 IEM_MC_BEGIN(0, 1);
957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
960 /* Currently a NOP. */
961 NOREF(GCPtrEffSrc);
962 IEM_MC_ADVANCE_RIP();
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x0e. */
969FNIEMOP_STUB(iemOp_femms);
970
971
972/** Opcode 0x0f 0x0f. */
973FNIEMOP_DEF(iemOp_3Dnow)
974{
975 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
976 {
977 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
978 return IEMOP_RAISE_INVALID_OPCODE();
979 }
980
981#ifdef IEM_WITH_3DNOW
982 /* This is pretty sparse, use switch instead of table. */
983 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
984 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
985#else
986 IEMOP_BITCH_ABOUT_STUB();
987 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
988#endif
989}
990
991
992/**
993 * @opcode 0x10
994 * @oppfx none
995 * @opcpuid sse
996 * @opgroup og_sse_simdfp_datamove
997 * @opxcpttype 4UA
998 * @optest op1=1 op2=2 -> op1=2
999 * @optest op1=0 op2=-22 -> op1=-22
1000 */
1001FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1002{
1003 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1006 {
1007 /*
1008 * Register, register.
1009 */
1010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1011 IEM_MC_BEGIN(0, 0);
1012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1013 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1014 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1015 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1016 IEM_MC_ADVANCE_RIP();
1017 IEM_MC_END();
1018 }
1019 else
1020 {
1021 /*
1022 * Memory, register.
1023 */
1024 IEM_MC_BEGIN(0, 2);
1025 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1027
1028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1032
1033 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1034 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1035
1036 IEM_MC_ADVANCE_RIP();
1037 IEM_MC_END();
1038 }
1039 return VINF_SUCCESS;
1040
1041}
1042
1043
1044/**
1045 * @opcode 0x10
1046 * @oppfx 0x66
1047 * @opcpuid sse2
1048 * @opgroup og_sse2_pcksclr_datamove
1049 * @opxcpttype 4UA
1050 * @optest op1=1 op2=2 -> op1=2
1051 * @optest op1=0 op2=-42 -> op1=-42
1052 */
1053FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1054{
1055 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1058 {
1059 /*
1060 * Register, register.
1061 */
1062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1063 IEM_MC_BEGIN(0, 0);
1064 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1066 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1067 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1068 IEM_MC_ADVANCE_RIP();
1069 IEM_MC_END();
1070 }
1071 else
1072 {
1073 /*
1074 * Memory, register.
1075 */
1076 IEM_MC_BEGIN(0, 2);
1077 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1079
1080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1083 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1084
1085 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1086 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1087
1088 IEM_MC_ADVANCE_RIP();
1089 IEM_MC_END();
1090 }
1091 return VINF_SUCCESS;
1092}
1093
1094
1095/**
1096 * @opcode 0x10
1097 * @oppfx 0xf3
1098 * @opcpuid sse
1099 * @opgroup og_sse_simdfp_datamove
1100 * @opxcpttype 5
1101 * @optest op1=1 op2=2 -> op1=2
1102 * @optest op1=0 op2=-22 -> op1=-22
1103 */
1104FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1105{
1106 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZxReg, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1109 {
1110 /*
1111 * Register, register.
1112 */
1113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1114 IEM_MC_BEGIN(0, 1);
1115 IEM_MC_LOCAL(uint32_t, uSrc);
1116
1117 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1118 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1119 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1120 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 else
1126 {
1127 /*
1128 * Memory, register.
1129 */
1130 IEM_MC_BEGIN(0, 2);
1131 IEM_MC_LOCAL(uint32_t, uSrc);
1132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1133
1134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1136 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1138
1139 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1140 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1141
1142 IEM_MC_ADVANCE_RIP();
1143 IEM_MC_END();
1144 }
1145 return VINF_SUCCESS;
1146}
1147
1148
1149/**
1150 * @opcode 0x10
1151 * @oppfx 0xf2
1152 * @opcpuid sse2
1153 * @opgroup og_sse2_pcksclr_datamove
1154 * @opxcpttype 5
1155 * @optest op1=1 op2=2 -> op1=2
1156 * @optest op1=0 op2=-42 -> op1=-42
1157 */
1158FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1159{
1160 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZxReg, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1163 {
1164 /*
1165 * Register, register.
1166 */
1167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1168 IEM_MC_BEGIN(0, 1);
1169 IEM_MC_LOCAL(uint64_t, uSrc);
1170
1171 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1172 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1173 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1174 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1175
1176 IEM_MC_ADVANCE_RIP();
1177 IEM_MC_END();
1178 }
1179 else
1180 {
1181 /*
1182 * Memory, register.
1183 */
1184 IEM_MC_BEGIN(0, 2);
1185 IEM_MC_LOCAL(uint64_t, uSrc);
1186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1187
1188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1190 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1191 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1192
1193 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1194 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1195
1196 IEM_MC_ADVANCE_RIP();
1197 IEM_MC_END();
1198 }
1199 return VINF_SUCCESS;
1200}
1201
1202
1203/**
1204 * @opcode 0x11
1205 * @oppfx none
1206 * @opcpuid sse
1207 * @opgroup og_sse_simdfp_datamove
1208 * @opxcpttype 4UA
1209 * @optest op1=1 op2=2 -> op1=2
1210 * @optest op1=0 op2=-42 -> op1=-42
1211 */
1212FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1213{
1214 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1217 {
1218 /*
1219 * Register, register.
1220 */
1221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1222 IEM_MC_BEGIN(0, 0);
1223 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1224 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1225 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1226 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1227 IEM_MC_ADVANCE_RIP();
1228 IEM_MC_END();
1229 }
1230 else
1231 {
1232 /*
1233 * Memory, register.
1234 */
1235 IEM_MC_BEGIN(0, 2);
1236 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1238
1239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1243
1244 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1245 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1246
1247 IEM_MC_ADVANCE_RIP();
1248 IEM_MC_END();
1249 }
1250 return VINF_SUCCESS;
1251}
1252
1253
1254/**
1255 * @opcode 0x11
1256 * @oppfx 0x66
1257 * @opcpuid sse2
1258 * @opgroup og_sse2_pcksclr_datamove
1259 * @opxcpttype 4UA
1260 * @optest op1=1 op2=2 -> op1=2
1261 * @optest op1=0 op2=-42 -> op1=-42
1262 */
1263FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1264{
1265 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1268 {
1269 /*
1270 * Register, register.
1271 */
1272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1273 IEM_MC_BEGIN(0, 0);
1274 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1275 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1276 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1277 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1278 IEM_MC_ADVANCE_RIP();
1279 IEM_MC_END();
1280 }
1281 else
1282 {
1283 /*
1284 * Memory, register.
1285 */
1286 IEM_MC_BEGIN(0, 2);
1287 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1289
1290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1292 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1294
1295 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1296 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1297
1298 IEM_MC_ADVANCE_RIP();
1299 IEM_MC_END();
1300 }
1301 return VINF_SUCCESS;
1302}
1303
1304
1305/**
1306 * @opcode 0x11
1307 * @oppfx 0xf3
1308 * @opcpuid sse
1309 * @opgroup og_sse_simdfp_datamove
1310 * @opxcpttype 5
1311 * @optest op1=1 op2=2 -> op1=2
1312 * @optest op1=0 op2=-22 -> op1=-22
1313 */
1314FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1315{
1316 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1319 {
1320 /*
1321 * Register, register.
1322 */
1323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1324 IEM_MC_BEGIN(0, 1);
1325 IEM_MC_LOCAL(uint32_t, uSrc);
1326
1327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1328 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1329 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1330 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1331
1332 IEM_MC_ADVANCE_RIP();
1333 IEM_MC_END();
1334 }
1335 else
1336 {
1337 /*
1338 * Memory, register.
1339 */
1340 IEM_MC_BEGIN(0, 2);
1341 IEM_MC_LOCAL(uint32_t, uSrc);
1342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1343
1344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1346 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1348
1349 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1350 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1351
1352 IEM_MC_ADVANCE_RIP();
1353 IEM_MC_END();
1354 }
1355 return VINF_SUCCESS;
1356}
1357
1358
1359/**
1360 * @opcode 0x11
1361 * @oppfx 0xf2
1362 * @opcpuid sse2
1363 * @opgroup og_sse2_pcksclr_datamove
1364 * @opxcpttype 5
1365 * @optest op1=1 op2=2 -> op1=2
1366 * @optest op1=0 op2=-42 -> op1=-42
1367 */
1368FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1369{
1370 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1373 {
1374 /*
1375 * Register, register.
1376 */
1377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1378 IEM_MC_BEGIN(0, 1);
1379 IEM_MC_LOCAL(uint64_t, uSrc);
1380
1381 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1382 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1383 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1384 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1385
1386 IEM_MC_ADVANCE_RIP();
1387 IEM_MC_END();
1388 }
1389 else
1390 {
1391 /*
1392 * Memory, register.
1393 */
1394 IEM_MC_BEGIN(0, 2);
1395 IEM_MC_LOCAL(uint64_t, uSrc);
1396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1397
1398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1400 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1402
1403 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1404 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1405
1406 IEM_MC_ADVANCE_RIP();
1407 IEM_MC_END();
1408 }
1409 return VINF_SUCCESS;
1410}
1411
1412
1413FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1414{
1415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1416 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1417 {
1418 /**
1419 * @opcode 0x12
1420 * @opcodesub 11 mr/reg
1421 * @oppfx none
1422 * @opcpuid sse
1423 * @opgroup og_sse_simdfp_datamove
1424 * @opxcpttype 5
1425 * @optest op1=1 op2=2 -> op1=2
1426 * @optest op1=0 op2=-42 -> op1=-42
1427 */
1428 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1429
1430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1431 IEM_MC_BEGIN(0, 1);
1432 IEM_MC_LOCAL(uint64_t, uSrc);
1433
1434 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1436 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1437 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1438
1439 IEM_MC_ADVANCE_RIP();
1440 IEM_MC_END();
1441 }
1442 else
1443 {
1444 /**
1445 * @opdone
1446 * @opcode 0x12
1447 * @opcodesub !11 mr/reg
1448 * @oppfx none
1449 * @opcpuid sse
1450 * @opgroup og_sse_simdfp_datamove
1451 * @opxcpttype 5
1452 * @optest op1=1 op2=2 -> op1=2
1453 * @optest op1=0 op2=-42 -> op1=-42
1454 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1455 */
1456 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1457
1458 IEM_MC_BEGIN(0, 2);
1459 IEM_MC_LOCAL(uint64_t, uSrc);
1460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1461
1462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1466
1467 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1468 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1469
1470 IEM_MC_ADVANCE_RIP();
1471 IEM_MC_END();
1472 }
1473 return VINF_SUCCESS;
1474}
1475
1476
1477/**
1478 * @opcode 0x12
1479 * @opcodesub !11 mr/reg
1480 * @oppfx 0x66
1481 * @opcpuid sse2
1482 * @opgroup og_sse2_pcksclr_datamove
1483 * @opxcpttype 5
1484 * @optest op1=1 op2=2 -> op1=2
1485 * @optest op1=0 op2=-42 -> op1=-42
1486 */
1487FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1488{
1489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1490 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1491 {
1492 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1493
1494 IEM_MC_BEGIN(0, 2);
1495 IEM_MC_LOCAL(uint64_t, uSrc);
1496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1497
1498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1502
1503 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1504 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1505
1506 IEM_MC_ADVANCE_RIP();
1507 IEM_MC_END();
1508 return VINF_SUCCESS;
1509 }
1510
1511 /**
1512 * @opdone
1513 * @opmnemonic ud660f12m3
1514 * @opcode 0x12
1515 * @opcodesub 11 mr/reg
1516 * @oppfx 0x66
1517 * @opunused immediate
1518 * @opcpuid sse
1519 * @optest ->
1520 */
1521 return IEMOP_RAISE_INVALID_OPCODE();
1522}
1523
1524
1525/**
1526 * @opcode 0x12
1527 * @oppfx 0xf3
1528 * @opcpuid sse3
1529 * @opgroup og_sse3_pcksclr_datamove
1530 * @opxcpttype 4
1531 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1532 * op1=0x00000002000000020000000100000001
1533 */
1534FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1535{
1536 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1538 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1539 {
1540 /*
1541 * Register, register.
1542 */
1543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1544 IEM_MC_BEGIN(2, 0);
1545 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1546 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1547
1548 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1549 IEM_MC_PREPARE_SSE_USAGE();
1550
1551 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1552 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1553 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1554
1555 IEM_MC_ADVANCE_RIP();
1556 IEM_MC_END();
1557 }
1558 else
1559 {
1560 /*
1561 * Register, memory.
1562 */
1563 IEM_MC_BEGIN(2, 2);
1564 IEM_MC_LOCAL(RTUINT128U, uSrc);
1565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1566 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1567 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1568
1569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1571 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1572 IEM_MC_PREPARE_SSE_USAGE();
1573
1574 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1575 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1576 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1577
1578 IEM_MC_ADVANCE_RIP();
1579 IEM_MC_END();
1580 }
1581 return VINF_SUCCESS;
1582}
1583
1584
1585/**
1586 * @opcode 0x12
1587 * @oppfx 0xf2
1588 * @opcpuid sse3
1589 * @opgroup og_sse3_pcksclr_datamove
1590 * @opxcpttype 5
1591 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1592 * op1=0x22222222111111112222222211111111
1593 */
1594FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1595{
1596 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1599 {
1600 /*
1601 * Register, register.
1602 */
1603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1604 IEM_MC_BEGIN(2, 0);
1605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1606 IEM_MC_ARG(uint64_t, uSrc, 1);
1607
1608 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1609 IEM_MC_PREPARE_SSE_USAGE();
1610
1611 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1612 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1613 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1614
1615 IEM_MC_ADVANCE_RIP();
1616 IEM_MC_END();
1617 }
1618 else
1619 {
1620 /*
1621 * Register, memory.
1622 */
1623 IEM_MC_BEGIN(2, 2);
1624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1625 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1626 IEM_MC_ARG(uint64_t, uSrc, 1);
1627
1628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1630 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1631 IEM_MC_PREPARE_SSE_USAGE();
1632
1633 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1634 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1635 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1636
1637 IEM_MC_ADVANCE_RIP();
1638 IEM_MC_END();
1639 }
1640 return VINF_SUCCESS;
1641}
1642
1643
1644/**
1645 * @opcode 0x13
1646 * @opcodesub !11 mr/reg
1647 * @oppfx none
1648 * @opcpuid sse
1649 * @opgroup og_sse_simdfp_datamove
1650 * @opxcpttype 5
1651 * @optest op1=1 op2=2 -> op1=2
1652 * @optest op1=0 op2=-42 -> op1=-42
1653 */
1654FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1655{
1656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1657 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1658 {
1659 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, MqWO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1660
1661 IEM_MC_BEGIN(0, 2);
1662 IEM_MC_LOCAL(uint64_t, uSrc);
1663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1664
1665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1667 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1668 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1669
1670 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1671 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1672
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 return VINF_SUCCESS;
1676 }
1677
1678 /**
1679 * @opdone
1680 * @opmnemonic ud0f13m3
1681 * @opcode 0x13
1682 * @opcodesub 11 mr/reg
1683 * @oppfx none
1684 * @opunused immediate
1685 * @opcpuid sse
1686 * @optest ->
1687 */
1688 return IEMOP_RAISE_INVALID_OPCODE();
1689}
1690
1691
1692/**
1693 * @opcode 0x13
1694 * @opcodesub !11 mr/reg
1695 * @oppfx 0x66
1696 * @opcpuid sse2
1697 * @opgroup og_sse2_pcksclr_datamove
1698 * @opxcpttype 5
1699 * @optest op1=1 op2=2 -> op1=2
1700 * @optest op1=0 op2=-42 -> op1=-42
1701 */
1702FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1703{
1704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1705 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1706 {
1707 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, MqWO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1708 IEM_MC_BEGIN(0, 2);
1709 IEM_MC_LOCAL(uint64_t, uSrc);
1710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1711
1712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1714 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1715 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1716
1717 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1718 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1719
1720 IEM_MC_ADVANCE_RIP();
1721 IEM_MC_END();
1722 return VINF_SUCCESS;
1723 }
1724
1725 /**
1726 * @opdone
1727 * @opmnemonic ud660f13m3
1728 * @opcode 0x13
1729 * @opcodesub 11 mr/reg
1730 * @oppfx 0x66
1731 * @opunused immediate
1732 * @opcpuid sse
1733 * @optest ->
1734 */
1735 return IEMOP_RAISE_INVALID_OPCODE();
1736}
1737
1738
1739/**
1740 * @opmnemonic udf30f13
1741 * @opcode 0x13
1742 * @oppfx 0xf3
1743 * @opunused intel-modrm
1744 * @opcpuid sse
1745 * @optest ->
1746 * @opdone
1747 */
1748
1749/**
1750 * @opmnemonic udf20f13
1751 * @opcode 0x13
1752 * @oppfx 0xf2
1753 * @opunused intel-modrm
1754 * @opcpuid sse
1755 * @optest ->
1756 * @opdone
1757 */
1758
1759/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1760FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1761/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1762FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1763
1764/**
1765 * @opdone
1766 * @opmnemonic udf30f14
1767 * @opcode 0x14
1768 * @oppfx 0xf3
1769 * @opunused intel-modrm
1770 * @opcpuid sse
1771 * @optest ->
1772 * @opdone
1773 */
1774
1775/**
1776 * @opmnemonic udf20f14
1777 * @opcode 0x14
1778 * @oppfx 0xf2
1779 * @opunused intel-modrm
1780 * @opcpuid sse
1781 * @optest ->
1782 * @opdone
1783 */
1784
1785/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1786FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1787/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1788FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1789/* Opcode 0xf3 0x0f 0x15 - invalid */
1790/* Opcode 0xf2 0x0f 0x15 - invalid */
1791
1792/**
1793 * @opdone
1794 * @opmnemonic udf30f15
1795 * @opcode 0x15
1796 * @oppfx 0xf3
1797 * @opunused intel-modrm
1798 * @opcpuid sse
1799 * @optest ->
1800 * @opdone
1801 */
1802
1803/**
1804 * @opmnemonic udf20f15
1805 * @opcode 0x15
1806 * @oppfx 0xf2
1807 * @opunused intel-modrm
1808 * @opcpuid sse
1809 * @optest ->
1810 * @opdone
1811 */
1812
1813FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1814{
1815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1817 {
1818 /**
1819 * @opcode 0x16
1820 * @opcodesub 11 mr/reg
1821 * @oppfx none
1822 * @opcpuid sse
1823 * @opgroup og_sse_simdfp_datamove
1824 * @opxcpttype 5
1825 * @optest op1=1 op2=2 -> op1=2
1826 * @optest op1=0 op2=-42 -> op1=-42
1827 */
1828 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1829
1830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1831 IEM_MC_BEGIN(0, 1);
1832 IEM_MC_LOCAL(uint64_t, uSrc);
1833
1834 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1835 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1836 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1837 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1838
1839 IEM_MC_ADVANCE_RIP();
1840 IEM_MC_END();
1841 }
1842 else
1843 {
1844 /**
1845 * @opdone
1846 * @opcode 0x16
1847 * @opcodesub !11 mr/reg
1848 * @oppfx none
1849 * @opcpuid sse
1850 * @opgroup og_sse_simdfp_datamove
1851 * @opxcpttype 5
1852 * @optest op1=1 op2=2 -> op1=2
1853 * @optest op1=0 op2=-42 -> op1=-42
1854 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1855 */
1856 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1857
1858 IEM_MC_BEGIN(0, 2);
1859 IEM_MC_LOCAL(uint64_t, uSrc);
1860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1861
1862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1866
1867 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1868 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1869
1870 IEM_MC_ADVANCE_RIP();
1871 IEM_MC_END();
1872 }
1873 return VINF_SUCCESS;
1874}
1875
1876
1877/**
1878 * @opcode 0x16
1879 * @opcodesub !11 mr/reg
1880 * @oppfx 0x66
1881 * @opcpuid sse2
1882 * @opgroup og_sse2_pcksclr_datamove
1883 * @opxcpttype 5
1884 * @optest op1=1 op2=2 -> op1=2
1885 * @optest op1=0 op2=-42 -> op1=-42
1886 */
1887FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1888{
1889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1890 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1891 {
1892 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1893 IEM_MC_BEGIN(0, 2);
1894 IEM_MC_LOCAL(uint64_t, uSrc);
1895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1896
1897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1899 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1900 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1901
1902 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1903 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1904
1905 IEM_MC_ADVANCE_RIP();
1906 IEM_MC_END();
1907 return VINF_SUCCESS;
1908 }
1909
1910 /**
1911 * @opdone
1912 * @opmnemonic ud660f16m3
1913 * @opcode 0x16
1914 * @opcodesub 11 mr/reg
1915 * @oppfx 0x66
1916 * @opunused immediate
1917 * @opcpuid sse
1918 * @optest ->
1919 */
1920 return IEMOP_RAISE_INVALID_OPCODE();
1921}
1922
1923
1924/**
1925 * @opcode 0x16
1926 * @oppfx 0xf3
1927 * @opcpuid sse3
1928 * @opgroup og_sse3_pcksclr_datamove
1929 * @opxcpttype 4
1930 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1931 * op1=0x00000002000000020000000100000001
1932 * @oponly
1933 */
1934FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1935{
1936 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1937 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1938 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1939 {
1940 /*
1941 * Register, register.
1942 */
1943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1944 IEM_MC_BEGIN(2, 0);
1945 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1946 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1947
1948 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1949 IEM_MC_PREPARE_SSE_USAGE();
1950
1951 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1952 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1953 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1954
1955 IEM_MC_ADVANCE_RIP();
1956 IEM_MC_END();
1957 }
1958 else
1959 {
1960 /*
1961 * Register, memory.
1962 */
1963 IEM_MC_BEGIN(2, 2);
1964 IEM_MC_LOCAL(RTUINT128U, uSrc);
1965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1966 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1967 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1968
1969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1971 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1972 IEM_MC_PREPARE_SSE_USAGE();
1973
1974 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1975 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1976 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1977
1978 IEM_MC_ADVANCE_RIP();
1979 IEM_MC_END();
1980 }
1981 return VINF_SUCCESS;
1982}
1983
1984/**
1985 * @opdone
1986 * @opmnemonic udf30f16
1987 * @opcode 0x16
1988 * @oppfx 0xf2
1989 * @opunused intel-modrm
1990 * @opcpuid sse
1991 * @optest ->
1992 * @opdone
1993 */
1994
1995
1996/**
1997 * @opcode 0x17
1998 * @opcodesub !11 mr/reg
1999 * @oppfx none
2000 * @opcpuid sse
2001 * @opgroup og_sse_simdfp_datamove
2002 * @opxcpttype 5
2003 * @optest op1=1 op2=2 -> op1=2
2004 * @optest op1=0 op2=-42 -> op1=-42
2005 */
2006FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2007{
2008 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2009 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2010 {
2011 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, MqWO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2012
2013 IEM_MC_BEGIN(0, 2);
2014 IEM_MC_LOCAL(uint64_t, uSrc);
2015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2016
2017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2019 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2020 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2021
2022 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2023 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2024
2025 IEM_MC_ADVANCE_RIP();
2026 IEM_MC_END();
2027 return VINF_SUCCESS;
2028 }
2029
2030 /**
2031 * @opdone
2032 * @opmnemonic ud0f17m3
2033 * @opcode 0x17
2034 * @opcodesub 11 mr/reg
2035 * @oppfx none
2036 * @opunused immediate
2037 * @opcpuid sse
2038 * @optest ->
2039 */
2040 return IEMOP_RAISE_INVALID_OPCODE();
2041}
2042
2043
2044/**
2045 * @opcode 0x17
2046 * @opcodesub !11 mr/reg
2047 * @oppfx 0x66
2048 * @opcpuid sse2
2049 * @opgroup og_sse2_pcksclr_datamove
2050 * @opxcpttype 5
2051 * @optest op1=1 op2=2 -> op1=2
2052 * @optest op1=0 op2=-42 -> op1=-42
2053 */
2054FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
2055{
2056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2057 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2058 {
2059 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, MqWO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2060
2061 IEM_MC_BEGIN(0, 2);
2062 IEM_MC_LOCAL(uint64_t, uSrc);
2063 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2064
2065 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2067 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2068 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2069
2070 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2071 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2072
2073 IEM_MC_ADVANCE_RIP();
2074 IEM_MC_END();
2075 return VINF_SUCCESS;
2076 }
2077
2078 /**
2079 * @opdone
2080 * @opmnemonic ud660f17m3
2081 * @opcode 0x17
2082 * @opcodesub 11 mr/reg
2083 * @oppfx 0x66
2084 * @opunused immediate
2085 * @opcpuid sse
2086 * @optest ->
2087 */
2088 return IEMOP_RAISE_INVALID_OPCODE();
2089}
2090
2091
2092/**
2093 * @opdone
2094 * @opmnemonic udf30f17
2095 * @opcode 0x17
2096 * @oppfx 0xf3
2097 * @opunused intel-modrm
2098 * @opcpuid sse
2099 * @optest ->
2100 * @opdone
2101 */
2102
2103/**
2104 * @opmnemonic udf20f17
2105 * @opcode 0x17
2106 * @oppfx 0xf2
2107 * @opunused intel-modrm
2108 * @opcpuid sse
2109 * @optest ->
2110 * @opdone
2111 */
2112
2113
2114/** Opcode 0x0f 0x18. */
2115FNIEMOP_DEF(iemOp_prefetch_Grp16)
2116{
2117 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2118 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2119 {
2120 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2121 {
2122 case 4: /* Aliased to /0 for the time being according to AMD. */
2123 case 5: /* Aliased to /0 for the time being according to AMD. */
2124 case 6: /* Aliased to /0 for the time being according to AMD. */
2125 case 7: /* Aliased to /0 for the time being according to AMD. */
2126 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2127 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2128 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2129 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2130 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2131 }
2132
2133 IEM_MC_BEGIN(0, 1);
2134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2137 /* Currently a NOP. */
2138 NOREF(GCPtrEffSrc);
2139 IEM_MC_ADVANCE_RIP();
2140 IEM_MC_END();
2141 return VINF_SUCCESS;
2142 }
2143
2144 return IEMOP_RAISE_INVALID_OPCODE();
2145}
2146
2147
2148/** Opcode 0x0f 0x19..0x1f. */
2149FNIEMOP_DEF(iemOp_nop_Ev)
2150{
2151 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2153 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2154 {
2155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2156 IEM_MC_BEGIN(0, 0);
2157 IEM_MC_ADVANCE_RIP();
2158 IEM_MC_END();
2159 }
2160 else
2161 {
2162 IEM_MC_BEGIN(0, 1);
2163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2166 /* Currently a NOP. */
2167 NOREF(GCPtrEffSrc);
2168 IEM_MC_ADVANCE_RIP();
2169 IEM_MC_END();
2170 }
2171 return VINF_SUCCESS;
2172}
2173
2174
2175/** Opcode 0x0f 0x20. */
2176FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2177{
2178 /* mod is ignored, as is operand size overrides. */
2179 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2180 IEMOP_HLP_MIN_386();
2181 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2182 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2183 else
2184 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2185
2186 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2187 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2188 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2189 {
2190 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2191 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2192 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2193 iCrReg |= 8;
2194 }
2195 switch (iCrReg)
2196 {
2197 case 0: case 2: case 3: case 4: case 8:
2198 break;
2199 default:
2200 return IEMOP_RAISE_INVALID_OPCODE();
2201 }
2202 IEMOP_HLP_DONE_DECODING();
2203
2204 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2205}
2206
2207
2208/** Opcode 0x0f 0x21. */
2209FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2210{
2211 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2212 IEMOP_HLP_MIN_386();
2213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2215 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2216 return IEMOP_RAISE_INVALID_OPCODE();
2217 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2218 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2219 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2220}
2221
2222
2223/** Opcode 0x0f 0x22. */
2224FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2225{
2226 /* mod is ignored, as is operand size overrides. */
2227 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2228 IEMOP_HLP_MIN_386();
2229 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2230 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2231 else
2232 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2233
2234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2235 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2236 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2237 {
2238 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2239 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2240 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2241 iCrReg |= 8;
2242 }
2243 switch (iCrReg)
2244 {
2245 case 0: case 2: case 3: case 4: case 8:
2246 break;
2247 default:
2248 return IEMOP_RAISE_INVALID_OPCODE();
2249 }
2250 IEMOP_HLP_DONE_DECODING();
2251
2252 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2253}
2254
2255
2256/** Opcode 0x0f 0x23. */
2257FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2258{
2259 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2260 IEMOP_HLP_MIN_386();
2261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2263 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2264 return IEMOP_RAISE_INVALID_OPCODE();
2265 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2266 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2267 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2268}
2269
2270
2271/** Opcode 0x0f 0x24. */
2272FNIEMOP_DEF(iemOp_mov_Rd_Td)
2273{
2274 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2275 /** @todo works on 386 and 486. */
2276 /* The RM byte is not considered, see testcase. */
2277 return IEMOP_RAISE_INVALID_OPCODE();
2278}
2279
2280
2281/** Opcode 0x0f 0x26. */
2282FNIEMOP_DEF(iemOp_mov_Td_Rd)
2283{
2284 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2285 /** @todo works on 386 and 486. */
2286 /* The RM byte is not considered, see testcase. */
2287 return IEMOP_RAISE_INVALID_OPCODE();
2288}
2289
2290
2291/** Opcode 0x0f 0x28 - movaps Vps, Wps */
2292FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2293{
2294 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
2295 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2296 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2297 {
2298 /*
2299 * Register, register.
2300 */
2301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2302 IEM_MC_BEGIN(0, 0);
2303 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2304 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2305 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2306 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2307 IEM_MC_ADVANCE_RIP();
2308 IEM_MC_END();
2309 }
2310 else
2311 {
2312 /*
2313 * Register, memory.
2314 */
2315 IEM_MC_BEGIN(0, 2);
2316 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2318
2319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2321 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2322 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2323
2324 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2325 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2326
2327 IEM_MC_ADVANCE_RIP();
2328 IEM_MC_END();
2329 }
2330 return VINF_SUCCESS;
2331}
2332
2333/** Opcode 0x66 0x0f 0x28 - movapd Vpd, Wpd */
2334FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2335{
2336 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
2337 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2338 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2339 {
2340 /*
2341 * Register, register.
2342 */
2343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2344 IEM_MC_BEGIN(0, 0);
2345 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2346 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2347 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2348 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2349 IEM_MC_ADVANCE_RIP();
2350 IEM_MC_END();
2351 }
2352 else
2353 {
2354 /*
2355 * Register, memory.
2356 */
2357 IEM_MC_BEGIN(0, 2);
2358 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2360
2361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2363 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2364 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2365
2366 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2367 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2368
2369 IEM_MC_ADVANCE_RIP();
2370 IEM_MC_END();
2371 }
2372 return VINF_SUCCESS;
2373}
2374
2375/* Opcode 0xf3 0x0f 0x28 - invalid */
2376/* Opcode 0xf2 0x0f 0x28 - invalid */
2377
2378/** Opcode 0x0f 0x29 - movaps Wps, Vps */
2379FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2380{
2381 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
2382 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2383 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2384 {
2385 /*
2386 * Register, register.
2387 */
2388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2389 IEM_MC_BEGIN(0, 0);
2390 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2391 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2392 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2393 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2394 IEM_MC_ADVANCE_RIP();
2395 IEM_MC_END();
2396 }
2397 else
2398 {
2399 /*
2400 * Memory, register.
2401 */
2402 IEM_MC_BEGIN(0, 2);
2403 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2405
2406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2408 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2409 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2410
2411 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2412 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2413
2414 IEM_MC_ADVANCE_RIP();
2415 IEM_MC_END();
2416 }
2417 return VINF_SUCCESS;
2418}
2419
2420/** Opcode 0x66 0x0f 0x29 - movapd Wpd,Vpd */
2421FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2422{
2423 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
2424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2425 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2426 {
2427 /*
2428 * Register, register.
2429 */
2430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2431 IEM_MC_BEGIN(0, 0);
2432 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2433 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2434 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2435 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2436 IEM_MC_ADVANCE_RIP();
2437 IEM_MC_END();
2438 }
2439 else
2440 {
2441 /*
2442 * Memory, register.
2443 */
2444 IEM_MC_BEGIN(0, 2);
2445 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2447
2448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2450 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2451 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2452
2453 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2454 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2455
2456 IEM_MC_ADVANCE_RIP();
2457 IEM_MC_END();
2458 }
2459 return VINF_SUCCESS;
2460}
2461
2462/* Opcode 0xf3 0x0f 0x29 - invalid */
2463/* Opcode 0xf2 0x0f 0x29 - invalid */
2464
2465
2466/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2467FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2468/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2469FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2470/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2471FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2472/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2473FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2474
2475
2476/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
2477FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2478{
2479 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2480 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2481 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2482 {
2483 /*
2484 * memory, register.
2485 */
2486 IEM_MC_BEGIN(0, 2);
2487 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2489
2490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2492 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2493 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2494
2495 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2496 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2497
2498 IEM_MC_ADVANCE_RIP();
2499 IEM_MC_END();
2500 }
2501 /* The register, register encoding is invalid. */
2502 else
2503 return IEMOP_RAISE_INVALID_OPCODE();
2504 return VINF_SUCCESS;
2505}
2506
2507/** Opcode 0x66 0x0f 0x2b - movntpd Mpd, Vpd */
2508FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2509{
2510 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2511 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2512 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2513 {
2514 /*
2515 * memory, register.
2516 */
2517 IEM_MC_BEGIN(0, 2);
2518 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2520
2521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2523 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2524 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2525
2526 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2527 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2528
2529 IEM_MC_ADVANCE_RIP();
2530 IEM_MC_END();
2531 }
2532 /* The register, register encoding is invalid. */
2533 else
2534 return IEMOP_RAISE_INVALID_OPCODE();
2535 return VINF_SUCCESS;
2536}
2537/* Opcode 0xf3 0x0f 0x2b - invalid */
2538/* Opcode 0xf2 0x0f 0x2b - invalid */
2539
2540
2541/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2542FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2543/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2544FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2545/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2546FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2547/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2548FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2549
2550/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2551FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2552/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2553FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2554/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2555FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2556/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2557FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2558
2559/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2560FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2561/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2562FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2563/* Opcode 0xf3 0x0f 0x2e - invalid */
2564/* Opcode 0xf2 0x0f 0x2e - invalid */
2565
2566/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2567FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2568/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2569FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2570/* Opcode 0xf3 0x0f 0x2f - invalid */
2571/* Opcode 0xf2 0x0f 0x2f - invalid */
2572
2573/** Opcode 0x0f 0x30. */
2574FNIEMOP_DEF(iemOp_wrmsr)
2575{
2576 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2578 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2579}
2580
2581
2582/** Opcode 0x0f 0x31. */
2583FNIEMOP_DEF(iemOp_rdtsc)
2584{
2585 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2587 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2588}
2589
2590
2591/** Opcode 0x0f 0x33. */
2592FNIEMOP_DEF(iemOp_rdmsr)
2593{
2594 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2596 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2597}
2598
2599
2600/** Opcode 0x0f 0x34. */
2601FNIEMOP_DEF(iemOp_rdpmc)
2602{
2603 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2605 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2606}
2607
2608
2609/** Opcode 0x0f 0x34. */
2610FNIEMOP_STUB(iemOp_sysenter);
2611/** Opcode 0x0f 0x35. */
2612FNIEMOP_STUB(iemOp_sysexit);
2613/** Opcode 0x0f 0x37. */
2614FNIEMOP_STUB(iemOp_getsec);
2615
2616
2617/** Opcode 0x0f 0x38. */
2618FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2619{
2620#ifdef IEM_WITH_THREE_0F_38
2621 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2622 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2623#else
2624 IEMOP_BITCH_ABOUT_STUB();
2625 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2626#endif
2627}
2628
2629
2630/** Opcode 0x0f 0x3a. */
2631FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2632{
2633#ifdef IEM_WITH_THREE_0F_3A
2634 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2635 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2636#else
2637 IEMOP_BITCH_ABOUT_STUB();
2638 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2639#endif
2640}
2641
2642
2643/**
2644 * Implements a conditional move.
2645 *
2646 * Wish there was an obvious way to do this where we could share and reduce
2647 * code bloat.
2648 *
2649 * @param a_Cnd The conditional "microcode" operation.
2650 */
2651#define CMOV_X(a_Cnd) \
2652 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2653 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2654 { \
2655 switch (pVCpu->iem.s.enmEffOpSize) \
2656 { \
2657 case IEMMODE_16BIT: \
2658 IEM_MC_BEGIN(0, 1); \
2659 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2660 a_Cnd { \
2661 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2662 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2663 } IEM_MC_ENDIF(); \
2664 IEM_MC_ADVANCE_RIP(); \
2665 IEM_MC_END(); \
2666 return VINF_SUCCESS; \
2667 \
2668 case IEMMODE_32BIT: \
2669 IEM_MC_BEGIN(0, 1); \
2670 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2671 a_Cnd { \
2672 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2673 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2674 } IEM_MC_ELSE() { \
2675 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2676 } IEM_MC_ENDIF(); \
2677 IEM_MC_ADVANCE_RIP(); \
2678 IEM_MC_END(); \
2679 return VINF_SUCCESS; \
2680 \
2681 case IEMMODE_64BIT: \
2682 IEM_MC_BEGIN(0, 1); \
2683 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2684 a_Cnd { \
2685 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2686 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2687 } IEM_MC_ENDIF(); \
2688 IEM_MC_ADVANCE_RIP(); \
2689 IEM_MC_END(); \
2690 return VINF_SUCCESS; \
2691 \
2692 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2693 } \
2694 } \
2695 else \
2696 { \
2697 switch (pVCpu->iem.s.enmEffOpSize) \
2698 { \
2699 case IEMMODE_16BIT: \
2700 IEM_MC_BEGIN(0, 2); \
2701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2702 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2704 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2705 a_Cnd { \
2706 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2707 } IEM_MC_ENDIF(); \
2708 IEM_MC_ADVANCE_RIP(); \
2709 IEM_MC_END(); \
2710 return VINF_SUCCESS; \
2711 \
2712 case IEMMODE_32BIT: \
2713 IEM_MC_BEGIN(0, 2); \
2714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2715 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2717 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2718 a_Cnd { \
2719 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2720 } IEM_MC_ELSE() { \
2721 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2722 } IEM_MC_ENDIF(); \
2723 IEM_MC_ADVANCE_RIP(); \
2724 IEM_MC_END(); \
2725 return VINF_SUCCESS; \
2726 \
2727 case IEMMODE_64BIT: \
2728 IEM_MC_BEGIN(0, 2); \
2729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2730 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2732 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2733 a_Cnd { \
2734 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2735 } IEM_MC_ENDIF(); \
2736 IEM_MC_ADVANCE_RIP(); \
2737 IEM_MC_END(); \
2738 return VINF_SUCCESS; \
2739 \
2740 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2741 } \
2742 } do {} while (0)
2743
2744
2745
2746/** Opcode 0x0f 0x40. */
2747FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2748{
2749 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2750 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2751}
2752
2753
2754/** Opcode 0x0f 0x41. */
2755FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2756{
2757 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2758 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2759}
2760
2761
2762/** Opcode 0x0f 0x42. */
2763FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2764{
2765 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2766 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2767}
2768
2769
2770/** Opcode 0x0f 0x43. */
2771FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2772{
2773 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2774 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2775}
2776
2777
2778/** Opcode 0x0f 0x44. */
2779FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2780{
2781 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2782 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2783}
2784
2785
2786/** Opcode 0x0f 0x45. */
2787FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2788{
2789 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2790 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2791}
2792
2793
2794/** Opcode 0x0f 0x46. */
2795FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2796{
2797 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2798 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2799}
2800
2801
2802/** Opcode 0x0f 0x47. */
2803FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2804{
2805 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2806 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2807}
2808
2809
2810/** Opcode 0x0f 0x48. */
2811FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2812{
2813 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2814 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2815}
2816
2817
2818/** Opcode 0x0f 0x49. */
2819FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2820{
2821 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2822 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2823}
2824
2825
2826/** Opcode 0x0f 0x4a. */
2827FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2828{
2829 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2830 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2831}
2832
2833
2834/** Opcode 0x0f 0x4b. */
2835FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2836{
2837 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2838 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2839}
2840
2841
2842/** Opcode 0x0f 0x4c. */
2843FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2844{
2845 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2846 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2847}
2848
2849
2850/** Opcode 0x0f 0x4d. */
2851FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2852{
2853 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2854 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2855}
2856
2857
2858/** Opcode 0x0f 0x4e. */
2859FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2860{
2861 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2862 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2863}
2864
2865
2866/** Opcode 0x0f 0x4f. */
2867FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2868{
2869 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2870 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2871}
2872
2873#undef CMOV_X
2874
2875/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2876FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2877/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2878FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2879/* Opcode 0xf3 0x0f 0x50 - invalid */
2880/* Opcode 0xf2 0x0f 0x50 - invalid */
2881
2882/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2883FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2884/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2885FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2886/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2887FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2888/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2889FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2890
2891/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2892FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2893/* Opcode 0x66 0x0f 0x52 - invalid */
2894/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2895FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2896/* Opcode 0xf2 0x0f 0x52 - invalid */
2897
2898/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2899FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2900/* Opcode 0x66 0x0f 0x53 - invalid */
2901/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2902FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2903/* Opcode 0xf2 0x0f 0x53 - invalid */
2904
2905/** Opcode 0x0f 0x54 - andps Vps, Wps */
2906FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2907/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2908FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2909/* Opcode 0xf3 0x0f 0x54 - invalid */
2910/* Opcode 0xf2 0x0f 0x54 - invalid */
2911
2912/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2913FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2914/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2915FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2916/* Opcode 0xf3 0x0f 0x55 - invalid */
2917/* Opcode 0xf2 0x0f 0x55 - invalid */
2918
2919/** Opcode 0x0f 0x56 - orps Vps, Wps */
2920FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2921/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2922FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2923/* Opcode 0xf3 0x0f 0x56 - invalid */
2924/* Opcode 0xf2 0x0f 0x56 - invalid */
2925
2926/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2927FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2928/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2929FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2930/* Opcode 0xf3 0x0f 0x57 - invalid */
2931/* Opcode 0xf2 0x0f 0x57 - invalid */
2932
2933/** Opcode 0x0f 0x58 - addps Vps, Wps */
2934FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2935/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2936FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2937/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2938FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2939/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2940FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2941
2942/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2943FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2944/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2945FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2946/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2947FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2948/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2949FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2950
2951/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2952FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2953/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2954FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2955/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2956FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2957/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2958FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2959
2960/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2961FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2962/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2963FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2964/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2965FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2966/* Opcode 0xf2 0x0f 0x5b - invalid */
2967
2968/** Opcode 0x0f 0x5c - subps Vps, Wps */
2969FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2970/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2971FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2972/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2973FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2974/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2975FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2976
2977/** Opcode 0x0f 0x5d - minps Vps, Wps */
2978FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2979/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2980FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2981/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2982FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2983/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2984FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2985
2986/** Opcode 0x0f 0x5e - divps Vps, Wps */
2987FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2988/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2989FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2990/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2991FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2992/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2993FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2994
2995/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2996FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2997/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2998FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2999/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
3000FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
3001/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
3002FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
3003
3004/**
3005 * Common worker for MMX instructions on the forms:
3006 * pxxxx mm1, mm2/mem32
3007 *
3008 * The 2nd operand is the first half of a register, which in the memory case
3009 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3010 * memory accessed for MMX.
3011 *
3012 * Exceptions type 4.
3013 */
3014FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3015{
3016 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3017 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3018 {
3019 /*
3020 * Register, register.
3021 */
3022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3023 IEM_MC_BEGIN(2, 0);
3024 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3025 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3026 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3027 IEM_MC_PREPARE_SSE_USAGE();
3028 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3029 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3030 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3031 IEM_MC_ADVANCE_RIP();
3032 IEM_MC_END();
3033 }
3034 else
3035 {
3036 /*
3037 * Register, memory.
3038 */
3039 IEM_MC_BEGIN(2, 2);
3040 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3041 IEM_MC_LOCAL(uint64_t, uSrc);
3042 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3044
3045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3047 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3048 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3049
3050 IEM_MC_PREPARE_SSE_USAGE();
3051 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3052 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3053
3054 IEM_MC_ADVANCE_RIP();
3055 IEM_MC_END();
3056 }
3057 return VINF_SUCCESS;
3058}
3059
3060
3061/**
3062 * Common worker for SSE2 instructions on the forms:
3063 * pxxxx xmm1, xmm2/mem128
3064 *
3065 * The 2nd operand is the first half of a register, which in the memory case
3066 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3067 * memory accessed for MMX.
3068 *
3069 * Exceptions type 4.
3070 */
3071FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3072{
3073 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3074 if (!pImpl->pfnU64)
3075 return IEMOP_RAISE_INVALID_OPCODE();
3076 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3077 {
3078 /*
3079 * Register, register.
3080 */
3081 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3082 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3084 IEM_MC_BEGIN(2, 0);
3085 IEM_MC_ARG(uint64_t *, pDst, 0);
3086 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3087 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3088 IEM_MC_PREPARE_FPU_USAGE();
3089 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3090 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3091 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3092 IEM_MC_ADVANCE_RIP();
3093 IEM_MC_END();
3094 }
3095 else
3096 {
3097 /*
3098 * Register, memory.
3099 */
3100 IEM_MC_BEGIN(2, 2);
3101 IEM_MC_ARG(uint64_t *, pDst, 0);
3102 IEM_MC_LOCAL(uint32_t, uSrc);
3103 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3105
3106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3108 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3109 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3110
3111 IEM_MC_PREPARE_FPU_USAGE();
3112 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3113 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3114
3115 IEM_MC_ADVANCE_RIP();
3116 IEM_MC_END();
3117 }
3118 return VINF_SUCCESS;
3119}
3120
3121
3122/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3123FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3124{
3125 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3126 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3127}
3128
3129/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3130FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3131{
3132 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3133 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3134}
3135
3136/* Opcode 0xf3 0x0f 0x60 - invalid */
3137
3138
3139/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3140FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3141{
3142 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3143 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3144}
3145
3146/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3147FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3148{
3149 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3150 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3151}
3152
3153/* Opcode 0xf3 0x0f 0x61 - invalid */
3154
3155
3156/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3157FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3158{
3159 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3160 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3161}
3162
3163/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3164FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3165{
3166 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3167 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3168}
3169
3170/* Opcode 0xf3 0x0f 0x62 - invalid */
3171
3172
3173
3174/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3175FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3176/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3177FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3178/* Opcode 0xf3 0x0f 0x63 - invalid */
3179
3180/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3181FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3182/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3183FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3184/* Opcode 0xf3 0x0f 0x64 - invalid */
3185
3186/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3187FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3188/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3189FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3190/* Opcode 0xf3 0x0f 0x65 - invalid */
3191
3192/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3193FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3194/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3195FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3196/* Opcode 0xf3 0x0f 0x66 - invalid */
3197
3198/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3199FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3200/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3201FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3202/* Opcode 0xf3 0x0f 0x67 - invalid */
3203
3204
3205/**
3206 * Common worker for MMX instructions on the form:
3207 * pxxxx mm1, mm2/mem64
3208 *
3209 * The 2nd operand is the second half of a register, which in the memory case
3210 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3211 * where it may read the full 128 bits or only the upper 64 bits.
3212 *
3213 * Exceptions type 4.
3214 */
3215FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3216{
3217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3218 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3219 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3220 {
3221 /*
3222 * Register, register.
3223 */
3224 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3225 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3227 IEM_MC_BEGIN(2, 0);
3228 IEM_MC_ARG(uint64_t *, pDst, 0);
3229 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3230 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3231 IEM_MC_PREPARE_FPU_USAGE();
3232 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3233 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3234 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3235 IEM_MC_ADVANCE_RIP();
3236 IEM_MC_END();
3237 }
3238 else
3239 {
3240 /*
3241 * Register, memory.
3242 */
3243 IEM_MC_BEGIN(2, 2);
3244 IEM_MC_ARG(uint64_t *, pDst, 0);
3245 IEM_MC_LOCAL(uint64_t, uSrc);
3246 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3248
3249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3251 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3252 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3253
3254 IEM_MC_PREPARE_FPU_USAGE();
3255 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3256 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3257
3258 IEM_MC_ADVANCE_RIP();
3259 IEM_MC_END();
3260 }
3261 return VINF_SUCCESS;
3262}
3263
3264
3265/**
3266 * Common worker for SSE2 instructions on the form:
3267 * pxxxx xmm1, xmm2/mem128
3268 *
3269 * The 2nd operand is the second half of a register, which in the memory case
3270 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3271 * where it may read the full 128 bits or only the upper 64 bits.
3272 *
3273 * Exceptions type 4.
3274 */
3275FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3276{
3277 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3278 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3279 {
3280 /*
3281 * Register, register.
3282 */
3283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3284 IEM_MC_BEGIN(2, 0);
3285 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3286 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3287 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3288 IEM_MC_PREPARE_SSE_USAGE();
3289 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3290 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3291 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3292 IEM_MC_ADVANCE_RIP();
3293 IEM_MC_END();
3294 }
3295 else
3296 {
3297 /*
3298 * Register, memory.
3299 */
3300 IEM_MC_BEGIN(2, 2);
3301 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3302 IEM_MC_LOCAL(RTUINT128U, uSrc);
3303 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3305
3306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3308 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3309 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3310
3311 IEM_MC_PREPARE_SSE_USAGE();
3312 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3313 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3314
3315 IEM_MC_ADVANCE_RIP();
3316 IEM_MC_END();
3317 }
3318 return VINF_SUCCESS;
3319}
3320
3321
3322/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3323FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3324{
3325 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3326 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3327}
3328
3329/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3330FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3331{
3332 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3333 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3334}
3335/* Opcode 0xf3 0x0f 0x68 - invalid */
3336
3337
3338/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3339FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3340{
3341 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3342 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3343}
3344
3345/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3346FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3347{
3348 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3349 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3350
3351}
3352/* Opcode 0xf3 0x0f 0x69 - invalid */
3353
3354
3355/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3356FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3357{
3358 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3359 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3360}
3361
3362/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3363FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3364{
3365 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3366 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3367}
3368/* Opcode 0xf3 0x0f 0x6a - invalid */
3369
3370
3371/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3372FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3373/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3374FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3375/* Opcode 0xf3 0x0f 0x6b - invalid */
3376
3377
3378/* Opcode 0x0f 0x6c - invalid */
3379
3380/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3381FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3382{
3383 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3384 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3385}
3386
3387/* Opcode 0xf3 0x0f 0x6c - invalid */
3388/* Opcode 0xf2 0x0f 0x6c - invalid */
3389
3390
3391/* Opcode 0x0f 0x6d - invalid */
3392
3393/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3394FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3395{
3396 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3397 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3398}
3399
3400/* Opcode 0xf3 0x0f 0x6d - invalid */
3401
3402
3403/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
3404FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3405{
3406 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3407 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3408 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
3409 else
3410 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
3411 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3412 {
3413 /* MMX, greg */
3414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3415 IEM_MC_BEGIN(0, 1);
3416 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3417 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3418 IEM_MC_LOCAL(uint64_t, u64Tmp);
3419 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3420 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3421 else
3422 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3423 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3424 IEM_MC_ADVANCE_RIP();
3425 IEM_MC_END();
3426 }
3427 else
3428 {
3429 /* MMX, [mem] */
3430 IEM_MC_BEGIN(0, 2);
3431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3432 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3435 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3436 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3437 {
3438 IEM_MC_LOCAL(uint64_t, u64Tmp);
3439 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3440 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3441 }
3442 else
3443 {
3444 IEM_MC_LOCAL(uint32_t, u32Tmp);
3445 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3446 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3447 }
3448 IEM_MC_ADVANCE_RIP();
3449 IEM_MC_END();
3450 }
3451 return VINF_SUCCESS;
3452}
3453
3454/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
3455FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3456{
3457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3458 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3459 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
3460 else
3461 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
3462 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3463 {
3464 /* XMM, greg*/
3465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3466 IEM_MC_BEGIN(0, 1);
3467 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3468 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3469 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3470 {
3471 IEM_MC_LOCAL(uint64_t, u64Tmp);
3472 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3473 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3474 }
3475 else
3476 {
3477 IEM_MC_LOCAL(uint32_t, u32Tmp);
3478 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3479 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3480 }
3481 IEM_MC_ADVANCE_RIP();
3482 IEM_MC_END();
3483 }
3484 else
3485 {
3486 /* XMM, [mem] */
3487 IEM_MC_BEGIN(0, 2);
3488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3489 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3492 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3493 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3494 {
3495 IEM_MC_LOCAL(uint64_t, u64Tmp);
3496 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3497 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3498 }
3499 else
3500 {
3501 IEM_MC_LOCAL(uint32_t, u32Tmp);
3502 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3503 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3504 }
3505 IEM_MC_ADVANCE_RIP();
3506 IEM_MC_END();
3507 }
3508 return VINF_SUCCESS;
3509}
3510
3511/* Opcode 0xf3 0x0f 0x6e - invalid */
3512
3513
3514/** Opcode 0x0f 0x6f - movq Pq, Qq */
3515FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3516{
3517 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3518 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3519 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3520 {
3521 /*
3522 * Register, register.
3523 */
3524 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3525 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3527 IEM_MC_BEGIN(0, 1);
3528 IEM_MC_LOCAL(uint64_t, u64Tmp);
3529 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3530 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3531 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3532 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3533 IEM_MC_ADVANCE_RIP();
3534 IEM_MC_END();
3535 }
3536 else
3537 {
3538 /*
3539 * Register, memory.
3540 */
3541 IEM_MC_BEGIN(0, 2);
3542 IEM_MC_LOCAL(uint64_t, u64Tmp);
3543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3544
3545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3547 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3548 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3549 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3550 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3551
3552 IEM_MC_ADVANCE_RIP();
3553 IEM_MC_END();
3554 }
3555 return VINF_SUCCESS;
3556}
3557
3558/** Opcode 0x66 0x0f 0x6f - movdqa Vx, Wx */
3559FNIEMOP_DEF(iemOp_movdqa_Vx_Wx)
3560{
3561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3562 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3563 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3564 {
3565 /*
3566 * Register, register.
3567 */
3568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3569 IEM_MC_BEGIN(0, 0);
3570 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3571 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3572 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3573 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3574 IEM_MC_ADVANCE_RIP();
3575 IEM_MC_END();
3576 }
3577 else
3578 {
3579 /*
3580 * Register, memory.
3581 */
3582 IEM_MC_BEGIN(0, 2);
3583 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3585
3586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3588 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3589 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3590 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3591 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3592
3593 IEM_MC_ADVANCE_RIP();
3594 IEM_MC_END();
3595 }
3596 return VINF_SUCCESS;
3597}
3598
3599/** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */
3600FNIEMOP_DEF(iemOp_movdqu_Vx_Wx)
3601{
3602 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3603 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3605 {
3606 /*
3607 * Register, register.
3608 */
3609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3610 IEM_MC_BEGIN(0, 0);
3611 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3612 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3613 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3614 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3615 IEM_MC_ADVANCE_RIP();
3616 IEM_MC_END();
3617 }
3618 else
3619 {
3620 /*
3621 * Register, memory.
3622 */
3623 IEM_MC_BEGIN(0, 2);
3624 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3626
3627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3629 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3630 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3631 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3632 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3633
3634 IEM_MC_ADVANCE_RIP();
3635 IEM_MC_END();
3636 }
3637 return VINF_SUCCESS;
3638}
3639
3640
3641/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3642FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3643{
3644 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3645 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3646 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3647 {
3648 /*
3649 * Register, register.
3650 */
3651 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3653
3654 IEM_MC_BEGIN(3, 0);
3655 IEM_MC_ARG(uint64_t *, pDst, 0);
3656 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3657 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3658 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3659 IEM_MC_PREPARE_FPU_USAGE();
3660 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3661 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3662 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3663 IEM_MC_ADVANCE_RIP();
3664 IEM_MC_END();
3665 }
3666 else
3667 {
3668 /*
3669 * Register, memory.
3670 */
3671 IEM_MC_BEGIN(3, 2);
3672 IEM_MC_ARG(uint64_t *, pDst, 0);
3673 IEM_MC_LOCAL(uint64_t, uSrc);
3674 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3676
3677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3678 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3679 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3681 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3682
3683 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3684 IEM_MC_PREPARE_FPU_USAGE();
3685 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3686 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3687
3688 IEM_MC_ADVANCE_RIP();
3689 IEM_MC_END();
3690 }
3691 return VINF_SUCCESS;
3692}
3693
3694/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3695FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3696{
3697 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3698 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3699 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3700 {
3701 /*
3702 * Register, register.
3703 */
3704 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3706
3707 IEM_MC_BEGIN(3, 0);
3708 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3709 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3710 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3711 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3712 IEM_MC_PREPARE_SSE_USAGE();
3713 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3714 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3715 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3716 IEM_MC_ADVANCE_RIP();
3717 IEM_MC_END();
3718 }
3719 else
3720 {
3721 /*
3722 * Register, memory.
3723 */
3724 IEM_MC_BEGIN(3, 2);
3725 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3726 IEM_MC_LOCAL(RTUINT128U, uSrc);
3727 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3729
3730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3731 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3732 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3734 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3735
3736 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3737 IEM_MC_PREPARE_SSE_USAGE();
3738 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3739 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3740
3741 IEM_MC_ADVANCE_RIP();
3742 IEM_MC_END();
3743 }
3744 return VINF_SUCCESS;
3745}
3746
3747/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3748FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3749{
3750 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3753 {
3754 /*
3755 * Register, register.
3756 */
3757 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3759
3760 IEM_MC_BEGIN(3, 0);
3761 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3762 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3763 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3764 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3765 IEM_MC_PREPARE_SSE_USAGE();
3766 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3767 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3768 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3769 IEM_MC_ADVANCE_RIP();
3770 IEM_MC_END();
3771 }
3772 else
3773 {
3774 /*
3775 * Register, memory.
3776 */
3777 IEM_MC_BEGIN(3, 2);
3778 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3779 IEM_MC_LOCAL(RTUINT128U, uSrc);
3780 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3782
3783 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3784 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3785 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3787 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3788
3789 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3790 IEM_MC_PREPARE_SSE_USAGE();
3791 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3792 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3793
3794 IEM_MC_ADVANCE_RIP();
3795 IEM_MC_END();
3796 }
3797 return VINF_SUCCESS;
3798}
3799
3800/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3801FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3802{
3803 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3804 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3805 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3806 {
3807 /*
3808 * Register, register.
3809 */
3810 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3812
3813 IEM_MC_BEGIN(3, 0);
3814 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3815 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3816 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3817 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3818 IEM_MC_PREPARE_SSE_USAGE();
3819 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3820 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3821 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3822 IEM_MC_ADVANCE_RIP();
3823 IEM_MC_END();
3824 }
3825 else
3826 {
3827 /*
3828 * Register, memory.
3829 */
3830 IEM_MC_BEGIN(3, 2);
3831 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3832 IEM_MC_LOCAL(RTUINT128U, uSrc);
3833 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3835
3836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3837 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3838 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3840 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3841
3842 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3843 IEM_MC_PREPARE_SSE_USAGE();
3844 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3845 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3846
3847 IEM_MC_ADVANCE_RIP();
3848 IEM_MC_END();
3849 }
3850 return VINF_SUCCESS;
3851}
3852
3853
3854/** Opcode 0x0f 0x71 11/2. */
3855FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3856
3857/** Opcode 0x66 0x0f 0x71 11/2. */
3858FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3859
3860/** Opcode 0x0f 0x71 11/4. */
3861FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3862
3863/** Opcode 0x66 0x0f 0x71 11/4. */
3864FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3865
3866/** Opcode 0x0f 0x71 11/6. */
3867FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3868
3869/** Opcode 0x66 0x0f 0x71 11/6. */
3870FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3871
3872
3873/**
3874 * Group 12 jump table for register variant.
3875 */
3876IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3877{
3878 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3879 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3880 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3881 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3882 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3883 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3884 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3885 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3886};
3887AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3888
3889
3890/** Opcode 0x0f 0x71. */
3891FNIEMOP_DEF(iemOp_Grp12)
3892{
3893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3894 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3895 /* register, register */
3896 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3897 + pVCpu->iem.s.idxPrefix], bRm);
3898 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3899}
3900
3901
3902/** Opcode 0x0f 0x72 11/2. */
3903FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3904
3905/** Opcode 0x66 0x0f 0x72 11/2. */
3906FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
3907
3908/** Opcode 0x0f 0x72 11/4. */
3909FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3910
3911/** Opcode 0x66 0x0f 0x72 11/4. */
3912FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
3913
3914/** Opcode 0x0f 0x72 11/6. */
3915FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3916
3917/** Opcode 0x66 0x0f 0x72 11/6. */
3918FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
3919
3920
3921/**
3922 * Group 13 jump table for register variant.
3923 */
3924IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3925{
3926 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3927 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3928 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3929 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3930 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3931 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3932 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3933 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3934};
3935AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3936
3937/** Opcode 0x0f 0x72. */
3938FNIEMOP_DEF(iemOp_Grp13)
3939{
3940 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3941 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3942 /* register, register */
3943 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3944 + pVCpu->iem.s.idxPrefix], bRm);
3945 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3946}
3947
3948
3949/** Opcode 0x0f 0x73 11/2. */
3950FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3951
3952/** Opcode 0x66 0x0f 0x73 11/2. */
3953FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
3954
3955/** Opcode 0x66 0x0f 0x73 11/3. */
3956FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
3957
3958/** Opcode 0x0f 0x73 11/6. */
3959FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3960
3961/** Opcode 0x66 0x0f 0x73 11/6. */
3962FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
3963
3964/** Opcode 0x66 0x0f 0x73 11/7. */
3965FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
3966
3967/**
3968 * Group 14 jump table for register variant.
3969 */
3970IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
3971{
3972 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3973 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3974 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3975 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3976 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3977 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3978 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3979 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3980};
3981AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
3982
3983
3984/** Opcode 0x0f 0x73. */
3985FNIEMOP_DEF(iemOp_Grp14)
3986{
3987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3988 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3989 /* register, register */
3990 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3991 + pVCpu->iem.s.idxPrefix], bRm);
3992 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3993}
3994
3995
3996/**
3997 * Common worker for MMX instructions on the form:
3998 * pxxx mm1, mm2/mem64
3999 */
4000FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4001{
4002 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4003 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4004 {
4005 /*
4006 * Register, register.
4007 */
4008 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4009 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4011 IEM_MC_BEGIN(2, 0);
4012 IEM_MC_ARG(uint64_t *, pDst, 0);
4013 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4014 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4015 IEM_MC_PREPARE_FPU_USAGE();
4016 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4017 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4018 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4019 IEM_MC_ADVANCE_RIP();
4020 IEM_MC_END();
4021 }
4022 else
4023 {
4024 /*
4025 * Register, memory.
4026 */
4027 IEM_MC_BEGIN(2, 2);
4028 IEM_MC_ARG(uint64_t *, pDst, 0);
4029 IEM_MC_LOCAL(uint64_t, uSrc);
4030 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4032
4033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4035 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4036 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4037
4038 IEM_MC_PREPARE_FPU_USAGE();
4039 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4040 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4041
4042 IEM_MC_ADVANCE_RIP();
4043 IEM_MC_END();
4044 }
4045 return VINF_SUCCESS;
4046}
4047
4048
4049/**
4050 * Common worker for SSE2 instructions on the forms:
4051 * pxxx xmm1, xmm2/mem128
4052 *
4053 * Proper alignment of the 128-bit operand is enforced.
4054 * Exceptions type 4. SSE2 cpuid checks.
4055 */
4056FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4057{
4058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4059 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4060 {
4061 /*
4062 * Register, register.
4063 */
4064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4065 IEM_MC_BEGIN(2, 0);
4066 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4067 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4068 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4069 IEM_MC_PREPARE_SSE_USAGE();
4070 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4071 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4072 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4073 IEM_MC_ADVANCE_RIP();
4074 IEM_MC_END();
4075 }
4076 else
4077 {
4078 /*
4079 * Register, memory.
4080 */
4081 IEM_MC_BEGIN(2, 2);
4082 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4083 IEM_MC_LOCAL(RTUINT128U, uSrc);
4084 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4086
4087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4089 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4090 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4091
4092 IEM_MC_PREPARE_SSE_USAGE();
4093 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4094 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4095
4096 IEM_MC_ADVANCE_RIP();
4097 IEM_MC_END();
4098 }
4099 return VINF_SUCCESS;
4100}
4101
4102
4103/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4104FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4105{
4106 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4107 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4108}
4109
4110/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4111FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4112{
4113 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4114 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4115}
4116
4117/* Opcode 0xf3 0x0f 0x74 - invalid */
4118/* Opcode 0xf2 0x0f 0x74 - invalid */
4119
4120
4121/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4122FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4123{
4124 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4125 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4126}
4127
4128/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4129FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4130{
4131 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4132 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4133}
4134
4135/* Opcode 0xf3 0x0f 0x75 - invalid */
4136/* Opcode 0xf2 0x0f 0x75 - invalid */
4137
4138
4139/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4140FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4141{
4142 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4143 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4144}
4145
4146/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4147FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4148{
4149 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4150 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4151}
4152
4153/* Opcode 0xf3 0x0f 0x76 - invalid */
4154/* Opcode 0xf2 0x0f 0x76 - invalid */
4155
4156
4157/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4158FNIEMOP_STUB(iemOp_emms);
4159/* Opcode 0x66 0x0f 0x77 - invalid */
4160/* Opcode 0xf3 0x0f 0x77 - invalid */
4161/* Opcode 0xf2 0x0f 0x77 - invalid */
4162
4163/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4164FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4165/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4166FNIEMOP_STUB(iemOp_AmdGrp17);
4167/* Opcode 0xf3 0x0f 0x78 - invalid */
4168/* Opcode 0xf2 0x0f 0x78 - invalid */
4169
4170/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4171FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4172/* Opcode 0x66 0x0f 0x79 - invalid */
4173/* Opcode 0xf3 0x0f 0x79 - invalid */
4174/* Opcode 0xf2 0x0f 0x79 - invalid */
4175
4176/* Opcode 0x0f 0x7a - invalid */
4177/* Opcode 0x66 0x0f 0x7a - invalid */
4178/* Opcode 0xf3 0x0f 0x7a - invalid */
4179/* Opcode 0xf2 0x0f 0x7a - invalid */
4180
4181/* Opcode 0x0f 0x7b - invalid */
4182/* Opcode 0x66 0x0f 0x7b - invalid */
4183/* Opcode 0xf3 0x0f 0x7b - invalid */
4184/* Opcode 0xf2 0x0f 0x7b - invalid */
4185
4186/* Opcode 0x0f 0x7c - invalid */
4187/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4188FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4189/* Opcode 0xf3 0x0f 0x7c - invalid */
4190/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4191FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4192
4193/* Opcode 0x0f 0x7d - invalid */
4194/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4195FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4196/* Opcode 0xf3 0x0f 0x7d - invalid */
4197/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4198FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4199
4200
4201/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4202FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4203{
4204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4205 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4206 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
4207 else
4208 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
4209 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4210 {
4211 /* greg, MMX */
4212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4213 IEM_MC_BEGIN(0, 1);
4214 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4215 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4216 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4217 {
4218 IEM_MC_LOCAL(uint64_t, u64Tmp);
4219 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4220 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4221 }
4222 else
4223 {
4224 IEM_MC_LOCAL(uint32_t, u32Tmp);
4225 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4226 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4227 }
4228 IEM_MC_ADVANCE_RIP();
4229 IEM_MC_END();
4230 }
4231 else
4232 {
4233 /* [mem], MMX */
4234 IEM_MC_BEGIN(0, 2);
4235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4236 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4239 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4240 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4241 {
4242 IEM_MC_LOCAL(uint64_t, u64Tmp);
4243 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4244 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4245 }
4246 else
4247 {
4248 IEM_MC_LOCAL(uint32_t, u32Tmp);
4249 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4250 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4251 }
4252 IEM_MC_ADVANCE_RIP();
4253 IEM_MC_END();
4254 }
4255 return VINF_SUCCESS;
4256}
4257
4258/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
4259FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4260{
4261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4262 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4263 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
4264 else
4265 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
4266 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4267 {
4268 /* greg, XMM */
4269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4270 IEM_MC_BEGIN(0, 1);
4271 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4272 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4273 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4274 {
4275 IEM_MC_LOCAL(uint64_t, u64Tmp);
4276 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4277 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4278 }
4279 else
4280 {
4281 IEM_MC_LOCAL(uint32_t, u32Tmp);
4282 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4283 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4284 }
4285 IEM_MC_ADVANCE_RIP();
4286 IEM_MC_END();
4287 }
4288 else
4289 {
4290 /* [mem], XMM */
4291 IEM_MC_BEGIN(0, 2);
4292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4293 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4296 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4297 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4298 {
4299 IEM_MC_LOCAL(uint64_t, u64Tmp);
4300 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4301 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4302 }
4303 else
4304 {
4305 IEM_MC_LOCAL(uint32_t, u32Tmp);
4306 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4307 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4308 }
4309 IEM_MC_ADVANCE_RIP();
4310 IEM_MC_END();
4311 }
4312 return VINF_SUCCESS;
4313}
4314
4315/** Opcode 0xf3 0x0f 0x7e - movq Vq, Wq */
4316FNIEMOP_STUB(iemOp_movq_Vq_Wq);
4317/* Opcode 0xf2 0x0f 0x7e - invalid */
4318
4319
4320/** Opcode 0x0f 0x7f - movq Qq, Pq */
4321FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4322{
4323 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4325 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4326 {
4327 /*
4328 * Register, register.
4329 */
4330 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4331 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4333 IEM_MC_BEGIN(0, 1);
4334 IEM_MC_LOCAL(uint64_t, u64Tmp);
4335 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4336 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4337 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4338 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4339 IEM_MC_ADVANCE_RIP();
4340 IEM_MC_END();
4341 }
4342 else
4343 {
4344 /*
4345 * Register, memory.
4346 */
4347 IEM_MC_BEGIN(0, 2);
4348 IEM_MC_LOCAL(uint64_t, u64Tmp);
4349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4350
4351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4353 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4354 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4355
4356 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4357 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4358
4359 IEM_MC_ADVANCE_RIP();
4360 IEM_MC_END();
4361 }
4362 return VINF_SUCCESS;
4363}
4364
4365/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4366FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4367{
4368 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4369 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4370 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4371 {
4372 /*
4373 * Register, register.
4374 */
4375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4376 IEM_MC_BEGIN(0, 0);
4377 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4378 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4379 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4380 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4381 IEM_MC_ADVANCE_RIP();
4382 IEM_MC_END();
4383 }
4384 else
4385 {
4386 /*
4387 * Register, memory.
4388 */
4389 IEM_MC_BEGIN(0, 2);
4390 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4392
4393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4395 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4396 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4397
4398 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4399 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4400
4401 IEM_MC_ADVANCE_RIP();
4402 IEM_MC_END();
4403 }
4404 return VINF_SUCCESS;
4405}
4406
4407/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4408FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4409{
4410 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4411 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4412 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4413 {
4414 /*
4415 * Register, register.
4416 */
4417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4418 IEM_MC_BEGIN(0, 0);
4419 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4420 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4421 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4422 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4423 IEM_MC_ADVANCE_RIP();
4424 IEM_MC_END();
4425 }
4426 else
4427 {
4428 /*
4429 * Register, memory.
4430 */
4431 IEM_MC_BEGIN(0, 2);
4432 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4434
4435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4437 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4438 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4439
4440 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4441 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4442
4443 IEM_MC_ADVANCE_RIP();
4444 IEM_MC_END();
4445 }
4446 return VINF_SUCCESS;
4447}
4448
4449/* Opcode 0xf2 0x0f 0x7f - invalid */
4450
4451
4452
4453/** Opcode 0x0f 0x80. */
4454FNIEMOP_DEF(iemOp_jo_Jv)
4455{
4456 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4457 IEMOP_HLP_MIN_386();
4458 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4459 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4460 {
4461 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4463
4464 IEM_MC_BEGIN(0, 0);
4465 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4466 IEM_MC_REL_JMP_S16(i16Imm);
4467 } IEM_MC_ELSE() {
4468 IEM_MC_ADVANCE_RIP();
4469 } IEM_MC_ENDIF();
4470 IEM_MC_END();
4471 }
4472 else
4473 {
4474 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4476
4477 IEM_MC_BEGIN(0, 0);
4478 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4479 IEM_MC_REL_JMP_S32(i32Imm);
4480 } IEM_MC_ELSE() {
4481 IEM_MC_ADVANCE_RIP();
4482 } IEM_MC_ENDIF();
4483 IEM_MC_END();
4484 }
4485 return VINF_SUCCESS;
4486}
4487
4488
4489/** Opcode 0x0f 0x81. */
4490FNIEMOP_DEF(iemOp_jno_Jv)
4491{
4492 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4493 IEMOP_HLP_MIN_386();
4494 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4495 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4496 {
4497 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4499
4500 IEM_MC_BEGIN(0, 0);
4501 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4502 IEM_MC_ADVANCE_RIP();
4503 } IEM_MC_ELSE() {
4504 IEM_MC_REL_JMP_S16(i16Imm);
4505 } IEM_MC_ENDIF();
4506 IEM_MC_END();
4507 }
4508 else
4509 {
4510 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4512
4513 IEM_MC_BEGIN(0, 0);
4514 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4515 IEM_MC_ADVANCE_RIP();
4516 } IEM_MC_ELSE() {
4517 IEM_MC_REL_JMP_S32(i32Imm);
4518 } IEM_MC_ENDIF();
4519 IEM_MC_END();
4520 }
4521 return VINF_SUCCESS;
4522}
4523
4524
4525/** Opcode 0x0f 0x82. */
4526FNIEMOP_DEF(iemOp_jc_Jv)
4527{
4528 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4529 IEMOP_HLP_MIN_386();
4530 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4531 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4532 {
4533 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4535
4536 IEM_MC_BEGIN(0, 0);
4537 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4538 IEM_MC_REL_JMP_S16(i16Imm);
4539 } IEM_MC_ELSE() {
4540 IEM_MC_ADVANCE_RIP();
4541 } IEM_MC_ENDIF();
4542 IEM_MC_END();
4543 }
4544 else
4545 {
4546 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4548
4549 IEM_MC_BEGIN(0, 0);
4550 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4551 IEM_MC_REL_JMP_S32(i32Imm);
4552 } IEM_MC_ELSE() {
4553 IEM_MC_ADVANCE_RIP();
4554 } IEM_MC_ENDIF();
4555 IEM_MC_END();
4556 }
4557 return VINF_SUCCESS;
4558}
4559
4560
4561/** Opcode 0x0f 0x83. */
4562FNIEMOP_DEF(iemOp_jnc_Jv)
4563{
4564 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4565 IEMOP_HLP_MIN_386();
4566 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4567 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4568 {
4569 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4571
4572 IEM_MC_BEGIN(0, 0);
4573 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4574 IEM_MC_ADVANCE_RIP();
4575 } IEM_MC_ELSE() {
4576 IEM_MC_REL_JMP_S16(i16Imm);
4577 } IEM_MC_ENDIF();
4578 IEM_MC_END();
4579 }
4580 else
4581 {
4582 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4584
4585 IEM_MC_BEGIN(0, 0);
4586 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4587 IEM_MC_ADVANCE_RIP();
4588 } IEM_MC_ELSE() {
4589 IEM_MC_REL_JMP_S32(i32Imm);
4590 } IEM_MC_ENDIF();
4591 IEM_MC_END();
4592 }
4593 return VINF_SUCCESS;
4594}
4595
4596
4597/** Opcode 0x0f 0x84. */
4598FNIEMOP_DEF(iemOp_je_Jv)
4599{
4600 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4601 IEMOP_HLP_MIN_386();
4602 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4603 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4604 {
4605 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4607
4608 IEM_MC_BEGIN(0, 0);
4609 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4610 IEM_MC_REL_JMP_S16(i16Imm);
4611 } IEM_MC_ELSE() {
4612 IEM_MC_ADVANCE_RIP();
4613 } IEM_MC_ENDIF();
4614 IEM_MC_END();
4615 }
4616 else
4617 {
4618 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4620
4621 IEM_MC_BEGIN(0, 0);
4622 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4623 IEM_MC_REL_JMP_S32(i32Imm);
4624 } IEM_MC_ELSE() {
4625 IEM_MC_ADVANCE_RIP();
4626 } IEM_MC_ENDIF();
4627 IEM_MC_END();
4628 }
4629 return VINF_SUCCESS;
4630}
4631
4632
4633/** Opcode 0x0f 0x85. */
4634FNIEMOP_DEF(iemOp_jne_Jv)
4635{
4636 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4637 IEMOP_HLP_MIN_386();
4638 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4639 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4640 {
4641 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4643
4644 IEM_MC_BEGIN(0, 0);
4645 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4646 IEM_MC_ADVANCE_RIP();
4647 } IEM_MC_ELSE() {
4648 IEM_MC_REL_JMP_S16(i16Imm);
4649 } IEM_MC_ENDIF();
4650 IEM_MC_END();
4651 }
4652 else
4653 {
4654 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4656
4657 IEM_MC_BEGIN(0, 0);
4658 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4659 IEM_MC_ADVANCE_RIP();
4660 } IEM_MC_ELSE() {
4661 IEM_MC_REL_JMP_S32(i32Imm);
4662 } IEM_MC_ENDIF();
4663 IEM_MC_END();
4664 }
4665 return VINF_SUCCESS;
4666}
4667
4668
4669/** Opcode 0x0f 0x86. */
4670FNIEMOP_DEF(iemOp_jbe_Jv)
4671{
4672 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4673 IEMOP_HLP_MIN_386();
4674 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4675 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4676 {
4677 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4679
4680 IEM_MC_BEGIN(0, 0);
4681 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4682 IEM_MC_REL_JMP_S16(i16Imm);
4683 } IEM_MC_ELSE() {
4684 IEM_MC_ADVANCE_RIP();
4685 } IEM_MC_ENDIF();
4686 IEM_MC_END();
4687 }
4688 else
4689 {
4690 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4692
4693 IEM_MC_BEGIN(0, 0);
4694 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4695 IEM_MC_REL_JMP_S32(i32Imm);
4696 } IEM_MC_ELSE() {
4697 IEM_MC_ADVANCE_RIP();
4698 } IEM_MC_ENDIF();
4699 IEM_MC_END();
4700 }
4701 return VINF_SUCCESS;
4702}
4703
4704
4705/** Opcode 0x0f 0x87. */
4706FNIEMOP_DEF(iemOp_jnbe_Jv)
4707{
4708 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4709 IEMOP_HLP_MIN_386();
4710 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4711 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4712 {
4713 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4715
4716 IEM_MC_BEGIN(0, 0);
4717 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4718 IEM_MC_ADVANCE_RIP();
4719 } IEM_MC_ELSE() {
4720 IEM_MC_REL_JMP_S16(i16Imm);
4721 } IEM_MC_ENDIF();
4722 IEM_MC_END();
4723 }
4724 else
4725 {
4726 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4728
4729 IEM_MC_BEGIN(0, 0);
4730 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4731 IEM_MC_ADVANCE_RIP();
4732 } IEM_MC_ELSE() {
4733 IEM_MC_REL_JMP_S32(i32Imm);
4734 } IEM_MC_ENDIF();
4735 IEM_MC_END();
4736 }
4737 return VINF_SUCCESS;
4738}
4739
4740
4741/** Opcode 0x0f 0x88. */
4742FNIEMOP_DEF(iemOp_js_Jv)
4743{
4744 IEMOP_MNEMONIC(js_Jv, "js Jv");
4745 IEMOP_HLP_MIN_386();
4746 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4747 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4748 {
4749 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4751
4752 IEM_MC_BEGIN(0, 0);
4753 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4754 IEM_MC_REL_JMP_S16(i16Imm);
4755 } IEM_MC_ELSE() {
4756 IEM_MC_ADVANCE_RIP();
4757 } IEM_MC_ENDIF();
4758 IEM_MC_END();
4759 }
4760 else
4761 {
4762 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4764
4765 IEM_MC_BEGIN(0, 0);
4766 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4767 IEM_MC_REL_JMP_S32(i32Imm);
4768 } IEM_MC_ELSE() {
4769 IEM_MC_ADVANCE_RIP();
4770 } IEM_MC_ENDIF();
4771 IEM_MC_END();
4772 }
4773 return VINF_SUCCESS;
4774}
4775
4776
4777/** Opcode 0x0f 0x89. */
4778FNIEMOP_DEF(iemOp_jns_Jv)
4779{
4780 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4781 IEMOP_HLP_MIN_386();
4782 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4783 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4784 {
4785 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4787
4788 IEM_MC_BEGIN(0, 0);
4789 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4790 IEM_MC_ADVANCE_RIP();
4791 } IEM_MC_ELSE() {
4792 IEM_MC_REL_JMP_S16(i16Imm);
4793 } IEM_MC_ENDIF();
4794 IEM_MC_END();
4795 }
4796 else
4797 {
4798 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4800
4801 IEM_MC_BEGIN(0, 0);
4802 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4803 IEM_MC_ADVANCE_RIP();
4804 } IEM_MC_ELSE() {
4805 IEM_MC_REL_JMP_S32(i32Imm);
4806 } IEM_MC_ENDIF();
4807 IEM_MC_END();
4808 }
4809 return VINF_SUCCESS;
4810}
4811
4812
4813/** Opcode 0x0f 0x8a. */
4814FNIEMOP_DEF(iemOp_jp_Jv)
4815{
4816 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4817 IEMOP_HLP_MIN_386();
4818 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4819 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4820 {
4821 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4823
4824 IEM_MC_BEGIN(0, 0);
4825 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4826 IEM_MC_REL_JMP_S16(i16Imm);
4827 } IEM_MC_ELSE() {
4828 IEM_MC_ADVANCE_RIP();
4829 } IEM_MC_ENDIF();
4830 IEM_MC_END();
4831 }
4832 else
4833 {
4834 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4836
4837 IEM_MC_BEGIN(0, 0);
4838 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4839 IEM_MC_REL_JMP_S32(i32Imm);
4840 } IEM_MC_ELSE() {
4841 IEM_MC_ADVANCE_RIP();
4842 } IEM_MC_ENDIF();
4843 IEM_MC_END();
4844 }
4845 return VINF_SUCCESS;
4846}
4847
4848
4849/** Opcode 0x0f 0x8b. */
4850FNIEMOP_DEF(iemOp_jnp_Jv)
4851{
4852 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4853 IEMOP_HLP_MIN_386();
4854 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4855 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4856 {
4857 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4859
4860 IEM_MC_BEGIN(0, 0);
4861 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4862 IEM_MC_ADVANCE_RIP();
4863 } IEM_MC_ELSE() {
4864 IEM_MC_REL_JMP_S16(i16Imm);
4865 } IEM_MC_ENDIF();
4866 IEM_MC_END();
4867 }
4868 else
4869 {
4870 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4872
4873 IEM_MC_BEGIN(0, 0);
4874 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4875 IEM_MC_ADVANCE_RIP();
4876 } IEM_MC_ELSE() {
4877 IEM_MC_REL_JMP_S32(i32Imm);
4878 } IEM_MC_ENDIF();
4879 IEM_MC_END();
4880 }
4881 return VINF_SUCCESS;
4882}
4883
4884
4885/** Opcode 0x0f 0x8c. */
4886FNIEMOP_DEF(iemOp_jl_Jv)
4887{
4888 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4889 IEMOP_HLP_MIN_386();
4890 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4891 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4892 {
4893 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4895
4896 IEM_MC_BEGIN(0, 0);
4897 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4898 IEM_MC_REL_JMP_S16(i16Imm);
4899 } IEM_MC_ELSE() {
4900 IEM_MC_ADVANCE_RIP();
4901 } IEM_MC_ENDIF();
4902 IEM_MC_END();
4903 }
4904 else
4905 {
4906 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4908
4909 IEM_MC_BEGIN(0, 0);
4910 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4911 IEM_MC_REL_JMP_S32(i32Imm);
4912 } IEM_MC_ELSE() {
4913 IEM_MC_ADVANCE_RIP();
4914 } IEM_MC_ENDIF();
4915 IEM_MC_END();
4916 }
4917 return VINF_SUCCESS;
4918}
4919
4920
4921/** Opcode 0x0f 0x8d. */
4922FNIEMOP_DEF(iemOp_jnl_Jv)
4923{
4924 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4925 IEMOP_HLP_MIN_386();
4926 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4927 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4928 {
4929 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4931
4932 IEM_MC_BEGIN(0, 0);
4933 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4934 IEM_MC_ADVANCE_RIP();
4935 } IEM_MC_ELSE() {
4936 IEM_MC_REL_JMP_S16(i16Imm);
4937 } IEM_MC_ENDIF();
4938 IEM_MC_END();
4939 }
4940 else
4941 {
4942 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4944
4945 IEM_MC_BEGIN(0, 0);
4946 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4947 IEM_MC_ADVANCE_RIP();
4948 } IEM_MC_ELSE() {
4949 IEM_MC_REL_JMP_S32(i32Imm);
4950 } IEM_MC_ENDIF();
4951 IEM_MC_END();
4952 }
4953 return VINF_SUCCESS;
4954}
4955
4956
4957/** Opcode 0x0f 0x8e. */
4958FNIEMOP_DEF(iemOp_jle_Jv)
4959{
4960 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4961 IEMOP_HLP_MIN_386();
4962 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4963 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4964 {
4965 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4967
4968 IEM_MC_BEGIN(0, 0);
4969 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4970 IEM_MC_REL_JMP_S16(i16Imm);
4971 } IEM_MC_ELSE() {
4972 IEM_MC_ADVANCE_RIP();
4973 } IEM_MC_ENDIF();
4974 IEM_MC_END();
4975 }
4976 else
4977 {
4978 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4980
4981 IEM_MC_BEGIN(0, 0);
4982 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4983 IEM_MC_REL_JMP_S32(i32Imm);
4984 } IEM_MC_ELSE() {
4985 IEM_MC_ADVANCE_RIP();
4986 } IEM_MC_ENDIF();
4987 IEM_MC_END();
4988 }
4989 return VINF_SUCCESS;
4990}
4991
4992
4993/** Opcode 0x0f 0x8f. */
4994FNIEMOP_DEF(iemOp_jnle_Jv)
4995{
4996 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4997 IEMOP_HLP_MIN_386();
4998 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4999 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5000 {
5001 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5003
5004 IEM_MC_BEGIN(0, 0);
5005 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5006 IEM_MC_ADVANCE_RIP();
5007 } IEM_MC_ELSE() {
5008 IEM_MC_REL_JMP_S16(i16Imm);
5009 } IEM_MC_ENDIF();
5010 IEM_MC_END();
5011 }
5012 else
5013 {
5014 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5016
5017 IEM_MC_BEGIN(0, 0);
5018 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5019 IEM_MC_ADVANCE_RIP();
5020 } IEM_MC_ELSE() {
5021 IEM_MC_REL_JMP_S32(i32Imm);
5022 } IEM_MC_ENDIF();
5023 IEM_MC_END();
5024 }
5025 return VINF_SUCCESS;
5026}
5027
5028
5029/** Opcode 0x0f 0x90. */
5030FNIEMOP_DEF(iemOp_seto_Eb)
5031{
5032 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5033 IEMOP_HLP_MIN_386();
5034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5035
5036 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5037 * any way. AMD says it's "unused", whatever that means. We're
5038 * ignoring for now. */
5039 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5040 {
5041 /* register target */
5042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5043 IEM_MC_BEGIN(0, 0);
5044 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5045 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5046 } IEM_MC_ELSE() {
5047 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5048 } IEM_MC_ENDIF();
5049 IEM_MC_ADVANCE_RIP();
5050 IEM_MC_END();
5051 }
5052 else
5053 {
5054 /* memory target */
5055 IEM_MC_BEGIN(0, 1);
5056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5059 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5060 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5061 } IEM_MC_ELSE() {
5062 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5063 } IEM_MC_ENDIF();
5064 IEM_MC_ADVANCE_RIP();
5065 IEM_MC_END();
5066 }
5067 return VINF_SUCCESS;
5068}
5069
5070
5071/** Opcode 0x0f 0x91. */
5072FNIEMOP_DEF(iemOp_setno_Eb)
5073{
5074 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5075 IEMOP_HLP_MIN_386();
5076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5077
5078 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5079 * any way. AMD says it's "unused", whatever that means. We're
5080 * ignoring for now. */
5081 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5082 {
5083 /* register target */
5084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5085 IEM_MC_BEGIN(0, 0);
5086 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5087 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5088 } IEM_MC_ELSE() {
5089 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5090 } IEM_MC_ENDIF();
5091 IEM_MC_ADVANCE_RIP();
5092 IEM_MC_END();
5093 }
5094 else
5095 {
5096 /* memory target */
5097 IEM_MC_BEGIN(0, 1);
5098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5099 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5101 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5102 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5103 } IEM_MC_ELSE() {
5104 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5105 } IEM_MC_ENDIF();
5106 IEM_MC_ADVANCE_RIP();
5107 IEM_MC_END();
5108 }
5109 return VINF_SUCCESS;
5110}
5111
5112
5113/** Opcode 0x0f 0x92. */
5114FNIEMOP_DEF(iemOp_setc_Eb)
5115{
5116 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5117 IEMOP_HLP_MIN_386();
5118 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5119
5120 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5121 * any way. AMD says it's "unused", whatever that means. We're
5122 * ignoring for now. */
5123 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5124 {
5125 /* register target */
5126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5127 IEM_MC_BEGIN(0, 0);
5128 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5129 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5130 } IEM_MC_ELSE() {
5131 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5132 } IEM_MC_ENDIF();
5133 IEM_MC_ADVANCE_RIP();
5134 IEM_MC_END();
5135 }
5136 else
5137 {
5138 /* memory target */
5139 IEM_MC_BEGIN(0, 1);
5140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5143 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5144 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5145 } IEM_MC_ELSE() {
5146 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5147 } IEM_MC_ENDIF();
5148 IEM_MC_ADVANCE_RIP();
5149 IEM_MC_END();
5150 }
5151 return VINF_SUCCESS;
5152}
5153
5154
5155/** Opcode 0x0f 0x93. */
5156FNIEMOP_DEF(iemOp_setnc_Eb)
5157{
5158 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5159 IEMOP_HLP_MIN_386();
5160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5161
5162 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5163 * any way. AMD says it's "unused", whatever that means. We're
5164 * ignoring for now. */
5165 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5166 {
5167 /* register target */
5168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5169 IEM_MC_BEGIN(0, 0);
5170 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5171 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5172 } IEM_MC_ELSE() {
5173 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5174 } IEM_MC_ENDIF();
5175 IEM_MC_ADVANCE_RIP();
5176 IEM_MC_END();
5177 }
5178 else
5179 {
5180 /* memory target */
5181 IEM_MC_BEGIN(0, 1);
5182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5185 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5186 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5187 } IEM_MC_ELSE() {
5188 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5189 } IEM_MC_ENDIF();
5190 IEM_MC_ADVANCE_RIP();
5191 IEM_MC_END();
5192 }
5193 return VINF_SUCCESS;
5194}
5195
5196
5197/** Opcode 0x0f 0x94. */
5198FNIEMOP_DEF(iemOp_sete_Eb)
5199{
5200 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5201 IEMOP_HLP_MIN_386();
5202 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5203
5204 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5205 * any way. AMD says it's "unused", whatever that means. We're
5206 * ignoring for now. */
5207 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5208 {
5209 /* register target */
5210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5211 IEM_MC_BEGIN(0, 0);
5212 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5213 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5214 } IEM_MC_ELSE() {
5215 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5216 } IEM_MC_ENDIF();
5217 IEM_MC_ADVANCE_RIP();
5218 IEM_MC_END();
5219 }
5220 else
5221 {
5222 /* memory target */
5223 IEM_MC_BEGIN(0, 1);
5224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5227 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5228 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5229 } IEM_MC_ELSE() {
5230 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5231 } IEM_MC_ENDIF();
5232 IEM_MC_ADVANCE_RIP();
5233 IEM_MC_END();
5234 }
5235 return VINF_SUCCESS;
5236}
5237
5238
5239/** Opcode 0x0f 0x95. */
5240FNIEMOP_DEF(iemOp_setne_Eb)
5241{
5242 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5243 IEMOP_HLP_MIN_386();
5244 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5245
5246 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5247 * any way. AMD says it's "unused", whatever that means. We're
5248 * ignoring for now. */
5249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5250 {
5251 /* register target */
5252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5253 IEM_MC_BEGIN(0, 0);
5254 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5255 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5256 } IEM_MC_ELSE() {
5257 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5258 } IEM_MC_ENDIF();
5259 IEM_MC_ADVANCE_RIP();
5260 IEM_MC_END();
5261 }
5262 else
5263 {
5264 /* memory target */
5265 IEM_MC_BEGIN(0, 1);
5266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5269 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5270 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5271 } IEM_MC_ELSE() {
5272 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5273 } IEM_MC_ENDIF();
5274 IEM_MC_ADVANCE_RIP();
5275 IEM_MC_END();
5276 }
5277 return VINF_SUCCESS;
5278}
5279
5280
5281/** Opcode 0x0f 0x96. */
5282FNIEMOP_DEF(iemOp_setbe_Eb)
5283{
5284 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5285 IEMOP_HLP_MIN_386();
5286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5287
5288 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5289 * any way. AMD says it's "unused", whatever that means. We're
5290 * ignoring for now. */
5291 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5292 {
5293 /* register target */
5294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5295 IEM_MC_BEGIN(0, 0);
5296 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5297 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5298 } IEM_MC_ELSE() {
5299 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5300 } IEM_MC_ENDIF();
5301 IEM_MC_ADVANCE_RIP();
5302 IEM_MC_END();
5303 }
5304 else
5305 {
5306 /* memory target */
5307 IEM_MC_BEGIN(0, 1);
5308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5309 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5311 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5312 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5313 } IEM_MC_ELSE() {
5314 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5315 } IEM_MC_ENDIF();
5316 IEM_MC_ADVANCE_RIP();
5317 IEM_MC_END();
5318 }
5319 return VINF_SUCCESS;
5320}
5321
5322
5323/** Opcode 0x0f 0x97. */
5324FNIEMOP_DEF(iemOp_setnbe_Eb)
5325{
5326 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5327 IEMOP_HLP_MIN_386();
5328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5329
5330 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5331 * any way. AMD says it's "unused", whatever that means. We're
5332 * ignoring for now. */
5333 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5334 {
5335 /* register target */
5336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5337 IEM_MC_BEGIN(0, 0);
5338 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5339 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5340 } IEM_MC_ELSE() {
5341 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5342 } IEM_MC_ENDIF();
5343 IEM_MC_ADVANCE_RIP();
5344 IEM_MC_END();
5345 }
5346 else
5347 {
5348 /* memory target */
5349 IEM_MC_BEGIN(0, 1);
5350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5353 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5354 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5355 } IEM_MC_ELSE() {
5356 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5357 } IEM_MC_ENDIF();
5358 IEM_MC_ADVANCE_RIP();
5359 IEM_MC_END();
5360 }
5361 return VINF_SUCCESS;
5362}
5363
5364
5365/** Opcode 0x0f 0x98. */
5366FNIEMOP_DEF(iemOp_sets_Eb)
5367{
5368 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5369 IEMOP_HLP_MIN_386();
5370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5371
5372 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5373 * any way. AMD says it's "unused", whatever that means. We're
5374 * ignoring for now. */
5375 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5376 {
5377 /* register target */
5378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5379 IEM_MC_BEGIN(0, 0);
5380 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5381 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5382 } IEM_MC_ELSE() {
5383 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5384 } IEM_MC_ENDIF();
5385 IEM_MC_ADVANCE_RIP();
5386 IEM_MC_END();
5387 }
5388 else
5389 {
5390 /* memory target */
5391 IEM_MC_BEGIN(0, 1);
5392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5395 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5396 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5397 } IEM_MC_ELSE() {
5398 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5399 } IEM_MC_ENDIF();
5400 IEM_MC_ADVANCE_RIP();
5401 IEM_MC_END();
5402 }
5403 return VINF_SUCCESS;
5404}
5405
5406
5407/** Opcode 0x0f 0x99. */
5408FNIEMOP_DEF(iemOp_setns_Eb)
5409{
5410 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5411 IEMOP_HLP_MIN_386();
5412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5413
5414 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5415 * any way. AMD says it's "unused", whatever that means. We're
5416 * ignoring for now. */
5417 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5418 {
5419 /* register target */
5420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5421 IEM_MC_BEGIN(0, 0);
5422 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5423 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5424 } IEM_MC_ELSE() {
5425 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5426 } IEM_MC_ENDIF();
5427 IEM_MC_ADVANCE_RIP();
5428 IEM_MC_END();
5429 }
5430 else
5431 {
5432 /* memory target */
5433 IEM_MC_BEGIN(0, 1);
5434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5437 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5438 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5439 } IEM_MC_ELSE() {
5440 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5441 } IEM_MC_ENDIF();
5442 IEM_MC_ADVANCE_RIP();
5443 IEM_MC_END();
5444 }
5445 return VINF_SUCCESS;
5446}
5447
5448
5449/** Opcode 0x0f 0x9a. */
5450FNIEMOP_DEF(iemOp_setp_Eb)
5451{
5452 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5453 IEMOP_HLP_MIN_386();
5454 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5455
5456 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5457 * any way. AMD says it's "unused", whatever that means. We're
5458 * ignoring for now. */
5459 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5460 {
5461 /* register target */
5462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5463 IEM_MC_BEGIN(0, 0);
5464 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5465 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5466 } IEM_MC_ELSE() {
5467 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5468 } IEM_MC_ENDIF();
5469 IEM_MC_ADVANCE_RIP();
5470 IEM_MC_END();
5471 }
5472 else
5473 {
5474 /* memory target */
5475 IEM_MC_BEGIN(0, 1);
5476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5479 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5480 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5481 } IEM_MC_ELSE() {
5482 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5483 } IEM_MC_ENDIF();
5484 IEM_MC_ADVANCE_RIP();
5485 IEM_MC_END();
5486 }
5487 return VINF_SUCCESS;
5488}
5489
5490
5491/** Opcode 0x0f 0x9b. */
5492FNIEMOP_DEF(iemOp_setnp_Eb)
5493{
5494 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5495 IEMOP_HLP_MIN_386();
5496 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5497
5498 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5499 * any way. AMD says it's "unused", whatever that means. We're
5500 * ignoring for now. */
5501 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5502 {
5503 /* register target */
5504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5505 IEM_MC_BEGIN(0, 0);
5506 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5507 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5508 } IEM_MC_ELSE() {
5509 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5510 } IEM_MC_ENDIF();
5511 IEM_MC_ADVANCE_RIP();
5512 IEM_MC_END();
5513 }
5514 else
5515 {
5516 /* memory target */
5517 IEM_MC_BEGIN(0, 1);
5518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5521 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5522 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5523 } IEM_MC_ELSE() {
5524 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5525 } IEM_MC_ENDIF();
5526 IEM_MC_ADVANCE_RIP();
5527 IEM_MC_END();
5528 }
5529 return VINF_SUCCESS;
5530}
5531
5532
5533/** Opcode 0x0f 0x9c. */
5534FNIEMOP_DEF(iemOp_setl_Eb)
5535{
5536 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5537 IEMOP_HLP_MIN_386();
5538 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5539
5540 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5541 * any way. AMD says it's "unused", whatever that means. We're
5542 * ignoring for now. */
5543 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5544 {
5545 /* register target */
5546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5547 IEM_MC_BEGIN(0, 0);
5548 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5549 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5550 } IEM_MC_ELSE() {
5551 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5552 } IEM_MC_ENDIF();
5553 IEM_MC_ADVANCE_RIP();
5554 IEM_MC_END();
5555 }
5556 else
5557 {
5558 /* memory target */
5559 IEM_MC_BEGIN(0, 1);
5560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5563 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5564 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5565 } IEM_MC_ELSE() {
5566 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5567 } IEM_MC_ENDIF();
5568 IEM_MC_ADVANCE_RIP();
5569 IEM_MC_END();
5570 }
5571 return VINF_SUCCESS;
5572}
5573
5574
5575/** Opcode 0x0f 0x9d. */
5576FNIEMOP_DEF(iemOp_setnl_Eb)
5577{
5578 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5579 IEMOP_HLP_MIN_386();
5580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5581
5582 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5583 * any way. AMD says it's "unused", whatever that means. We're
5584 * ignoring for now. */
5585 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5586 {
5587 /* register target */
5588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5589 IEM_MC_BEGIN(0, 0);
5590 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5591 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5592 } IEM_MC_ELSE() {
5593 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5594 } IEM_MC_ENDIF();
5595 IEM_MC_ADVANCE_RIP();
5596 IEM_MC_END();
5597 }
5598 else
5599 {
5600 /* memory target */
5601 IEM_MC_BEGIN(0, 1);
5602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5605 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5606 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5607 } IEM_MC_ELSE() {
5608 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5609 } IEM_MC_ENDIF();
5610 IEM_MC_ADVANCE_RIP();
5611 IEM_MC_END();
5612 }
5613 return VINF_SUCCESS;
5614}
5615
5616
5617/** Opcode 0x0f 0x9e. */
5618FNIEMOP_DEF(iemOp_setle_Eb)
5619{
5620 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5621 IEMOP_HLP_MIN_386();
5622 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5623
5624 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5625 * any way. AMD says it's "unused", whatever that means. We're
5626 * ignoring for now. */
5627 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5628 {
5629 /* register target */
5630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5631 IEM_MC_BEGIN(0, 0);
5632 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5633 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5634 } IEM_MC_ELSE() {
5635 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5636 } IEM_MC_ENDIF();
5637 IEM_MC_ADVANCE_RIP();
5638 IEM_MC_END();
5639 }
5640 else
5641 {
5642 /* memory target */
5643 IEM_MC_BEGIN(0, 1);
5644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5647 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5648 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5649 } IEM_MC_ELSE() {
5650 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5651 } IEM_MC_ENDIF();
5652 IEM_MC_ADVANCE_RIP();
5653 IEM_MC_END();
5654 }
5655 return VINF_SUCCESS;
5656}
5657
5658
5659/** Opcode 0x0f 0x9f. */
5660FNIEMOP_DEF(iemOp_setnle_Eb)
5661{
5662 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5663 IEMOP_HLP_MIN_386();
5664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5665
5666 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5667 * any way. AMD says it's "unused", whatever that means. We're
5668 * ignoring for now. */
5669 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5670 {
5671 /* register target */
5672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5673 IEM_MC_BEGIN(0, 0);
5674 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5675 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5676 } IEM_MC_ELSE() {
5677 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5678 } IEM_MC_ENDIF();
5679 IEM_MC_ADVANCE_RIP();
5680 IEM_MC_END();
5681 }
5682 else
5683 {
5684 /* memory target */
5685 IEM_MC_BEGIN(0, 1);
5686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5689 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5690 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5691 } IEM_MC_ELSE() {
5692 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5693 } IEM_MC_ENDIF();
5694 IEM_MC_ADVANCE_RIP();
5695 IEM_MC_END();
5696 }
5697 return VINF_SUCCESS;
5698}
5699
5700
5701/**
5702 * Common 'push segment-register' helper.
5703 */
5704FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5705{
5706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5707 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5708 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5709
5710 switch (pVCpu->iem.s.enmEffOpSize)
5711 {
5712 case IEMMODE_16BIT:
5713 IEM_MC_BEGIN(0, 1);
5714 IEM_MC_LOCAL(uint16_t, u16Value);
5715 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5716 IEM_MC_PUSH_U16(u16Value);
5717 IEM_MC_ADVANCE_RIP();
5718 IEM_MC_END();
5719 break;
5720
5721 case IEMMODE_32BIT:
5722 IEM_MC_BEGIN(0, 1);
5723 IEM_MC_LOCAL(uint32_t, u32Value);
5724 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5725 IEM_MC_PUSH_U32_SREG(u32Value);
5726 IEM_MC_ADVANCE_RIP();
5727 IEM_MC_END();
5728 break;
5729
5730 case IEMMODE_64BIT:
5731 IEM_MC_BEGIN(0, 1);
5732 IEM_MC_LOCAL(uint64_t, u64Value);
5733 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5734 IEM_MC_PUSH_U64(u64Value);
5735 IEM_MC_ADVANCE_RIP();
5736 IEM_MC_END();
5737 break;
5738 }
5739
5740 return VINF_SUCCESS;
5741}
5742
5743
5744/** Opcode 0x0f 0xa0. */
5745FNIEMOP_DEF(iemOp_push_fs)
5746{
5747 IEMOP_MNEMONIC(push_fs, "push fs");
5748 IEMOP_HLP_MIN_386();
5749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5750 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5751}
5752
5753
5754/** Opcode 0x0f 0xa1. */
5755FNIEMOP_DEF(iemOp_pop_fs)
5756{
5757 IEMOP_MNEMONIC(pop_fs, "pop fs");
5758 IEMOP_HLP_MIN_386();
5759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5760 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5761}
5762
5763
5764/** Opcode 0x0f 0xa2. */
5765FNIEMOP_DEF(iemOp_cpuid)
5766{
5767 IEMOP_MNEMONIC(cpuid, "cpuid");
5768 IEMOP_HLP_MIN_486(); /* not all 486es. */
5769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5770 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5771}
5772
5773
5774/**
5775 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5776 * iemOp_bts_Ev_Gv.
5777 */
5778FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5779{
5780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5781 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5782
5783 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5784 {
5785 /* register destination. */
5786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5787 switch (pVCpu->iem.s.enmEffOpSize)
5788 {
5789 case IEMMODE_16BIT:
5790 IEM_MC_BEGIN(3, 0);
5791 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5792 IEM_MC_ARG(uint16_t, u16Src, 1);
5793 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5794
5795 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5796 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5797 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5798 IEM_MC_REF_EFLAGS(pEFlags);
5799 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5800
5801 IEM_MC_ADVANCE_RIP();
5802 IEM_MC_END();
5803 return VINF_SUCCESS;
5804
5805 case IEMMODE_32BIT:
5806 IEM_MC_BEGIN(3, 0);
5807 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5808 IEM_MC_ARG(uint32_t, u32Src, 1);
5809 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5810
5811 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5812 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5813 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5814 IEM_MC_REF_EFLAGS(pEFlags);
5815 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5816
5817 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5818 IEM_MC_ADVANCE_RIP();
5819 IEM_MC_END();
5820 return VINF_SUCCESS;
5821
5822 case IEMMODE_64BIT:
5823 IEM_MC_BEGIN(3, 0);
5824 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5825 IEM_MC_ARG(uint64_t, u64Src, 1);
5826 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5827
5828 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5829 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5830 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5831 IEM_MC_REF_EFLAGS(pEFlags);
5832 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5833
5834 IEM_MC_ADVANCE_RIP();
5835 IEM_MC_END();
5836 return VINF_SUCCESS;
5837
5838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5839 }
5840 }
5841 else
5842 {
5843 /* memory destination. */
5844
5845 uint32_t fAccess;
5846 if (pImpl->pfnLockedU16)
5847 fAccess = IEM_ACCESS_DATA_RW;
5848 else /* BT */
5849 fAccess = IEM_ACCESS_DATA_R;
5850
5851 /** @todo test negative bit offsets! */
5852 switch (pVCpu->iem.s.enmEffOpSize)
5853 {
5854 case IEMMODE_16BIT:
5855 IEM_MC_BEGIN(3, 2);
5856 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5857 IEM_MC_ARG(uint16_t, u16Src, 1);
5858 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5860 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5861
5862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5863 if (pImpl->pfnLockedU16)
5864 IEMOP_HLP_DONE_DECODING();
5865 else
5866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5867 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5868 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5869 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5870 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5871 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5872 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5873 IEM_MC_FETCH_EFLAGS(EFlags);
5874
5875 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5876 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5877 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5878 else
5879 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5880 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5881
5882 IEM_MC_COMMIT_EFLAGS(EFlags);
5883 IEM_MC_ADVANCE_RIP();
5884 IEM_MC_END();
5885 return VINF_SUCCESS;
5886
5887 case IEMMODE_32BIT:
5888 IEM_MC_BEGIN(3, 2);
5889 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5890 IEM_MC_ARG(uint32_t, u32Src, 1);
5891 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5893 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5894
5895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5896 if (pImpl->pfnLockedU16)
5897 IEMOP_HLP_DONE_DECODING();
5898 else
5899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5900 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5901 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5902 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5903 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5904 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5905 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5906 IEM_MC_FETCH_EFLAGS(EFlags);
5907
5908 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5909 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5910 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5911 else
5912 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5913 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5914
5915 IEM_MC_COMMIT_EFLAGS(EFlags);
5916 IEM_MC_ADVANCE_RIP();
5917 IEM_MC_END();
5918 return VINF_SUCCESS;
5919
5920 case IEMMODE_64BIT:
5921 IEM_MC_BEGIN(3, 2);
5922 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5923 IEM_MC_ARG(uint64_t, u64Src, 1);
5924 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5926 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5927
5928 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5929 if (pImpl->pfnLockedU16)
5930 IEMOP_HLP_DONE_DECODING();
5931 else
5932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5933 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5934 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5935 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5936 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5937 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5938 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5939 IEM_MC_FETCH_EFLAGS(EFlags);
5940
5941 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5942 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5943 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5944 else
5945 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5946 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5947
5948 IEM_MC_COMMIT_EFLAGS(EFlags);
5949 IEM_MC_ADVANCE_RIP();
5950 IEM_MC_END();
5951 return VINF_SUCCESS;
5952
5953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5954 }
5955 }
5956}
5957
5958
5959/** Opcode 0x0f 0xa3. */
5960FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5961{
5962 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5963 IEMOP_HLP_MIN_386();
5964 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5965}
5966
5967
5968/**
5969 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5970 */
5971FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5972{
5973 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5974 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5975
5976 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5977 {
5978 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5980
5981 switch (pVCpu->iem.s.enmEffOpSize)
5982 {
5983 case IEMMODE_16BIT:
5984 IEM_MC_BEGIN(4, 0);
5985 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5986 IEM_MC_ARG(uint16_t, u16Src, 1);
5987 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5988 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5989
5990 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5991 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5992 IEM_MC_REF_EFLAGS(pEFlags);
5993 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5994
5995 IEM_MC_ADVANCE_RIP();
5996 IEM_MC_END();
5997 return VINF_SUCCESS;
5998
5999 case IEMMODE_32BIT:
6000 IEM_MC_BEGIN(4, 0);
6001 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6002 IEM_MC_ARG(uint32_t, u32Src, 1);
6003 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6004 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6005
6006 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6007 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6008 IEM_MC_REF_EFLAGS(pEFlags);
6009 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6010
6011 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6012 IEM_MC_ADVANCE_RIP();
6013 IEM_MC_END();
6014 return VINF_SUCCESS;
6015
6016 case IEMMODE_64BIT:
6017 IEM_MC_BEGIN(4, 0);
6018 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6019 IEM_MC_ARG(uint64_t, u64Src, 1);
6020 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6021 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6022
6023 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6024 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6025 IEM_MC_REF_EFLAGS(pEFlags);
6026 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6027
6028 IEM_MC_ADVANCE_RIP();
6029 IEM_MC_END();
6030 return VINF_SUCCESS;
6031
6032 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6033 }
6034 }
6035 else
6036 {
6037 switch (pVCpu->iem.s.enmEffOpSize)
6038 {
6039 case IEMMODE_16BIT:
6040 IEM_MC_BEGIN(4, 2);
6041 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6042 IEM_MC_ARG(uint16_t, u16Src, 1);
6043 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6044 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6046
6047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6048 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6049 IEM_MC_ASSIGN(cShiftArg, cShift);
6050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6051 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6052 IEM_MC_FETCH_EFLAGS(EFlags);
6053 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6054 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6055
6056 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6057 IEM_MC_COMMIT_EFLAGS(EFlags);
6058 IEM_MC_ADVANCE_RIP();
6059 IEM_MC_END();
6060 return VINF_SUCCESS;
6061
6062 case IEMMODE_32BIT:
6063 IEM_MC_BEGIN(4, 2);
6064 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6065 IEM_MC_ARG(uint32_t, u32Src, 1);
6066 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6067 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6069
6070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6071 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6072 IEM_MC_ASSIGN(cShiftArg, cShift);
6073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6074 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6075 IEM_MC_FETCH_EFLAGS(EFlags);
6076 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6077 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6078
6079 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6080 IEM_MC_COMMIT_EFLAGS(EFlags);
6081 IEM_MC_ADVANCE_RIP();
6082 IEM_MC_END();
6083 return VINF_SUCCESS;
6084
6085 case IEMMODE_64BIT:
6086 IEM_MC_BEGIN(4, 2);
6087 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6088 IEM_MC_ARG(uint64_t, u64Src, 1);
6089 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6090 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6092
6093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6094 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6095 IEM_MC_ASSIGN(cShiftArg, cShift);
6096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6097 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6098 IEM_MC_FETCH_EFLAGS(EFlags);
6099 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6100 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6101
6102 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6103 IEM_MC_COMMIT_EFLAGS(EFlags);
6104 IEM_MC_ADVANCE_RIP();
6105 IEM_MC_END();
6106 return VINF_SUCCESS;
6107
6108 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6109 }
6110 }
6111}
6112
6113
6114/**
6115 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6116 */
6117FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6118{
6119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6120 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6121
6122 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6123 {
6124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6125
6126 switch (pVCpu->iem.s.enmEffOpSize)
6127 {
6128 case IEMMODE_16BIT:
6129 IEM_MC_BEGIN(4, 0);
6130 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6131 IEM_MC_ARG(uint16_t, u16Src, 1);
6132 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6133 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6134
6135 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6136 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6137 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6138 IEM_MC_REF_EFLAGS(pEFlags);
6139 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6140
6141 IEM_MC_ADVANCE_RIP();
6142 IEM_MC_END();
6143 return VINF_SUCCESS;
6144
6145 case IEMMODE_32BIT:
6146 IEM_MC_BEGIN(4, 0);
6147 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6148 IEM_MC_ARG(uint32_t, u32Src, 1);
6149 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6150 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6151
6152 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6153 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6154 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6155 IEM_MC_REF_EFLAGS(pEFlags);
6156 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6157
6158 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6159 IEM_MC_ADVANCE_RIP();
6160 IEM_MC_END();
6161 return VINF_SUCCESS;
6162
6163 case IEMMODE_64BIT:
6164 IEM_MC_BEGIN(4, 0);
6165 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6166 IEM_MC_ARG(uint64_t, u64Src, 1);
6167 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6168 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6169
6170 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6171 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6172 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6173 IEM_MC_REF_EFLAGS(pEFlags);
6174 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6175
6176 IEM_MC_ADVANCE_RIP();
6177 IEM_MC_END();
6178 return VINF_SUCCESS;
6179
6180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6181 }
6182 }
6183 else
6184 {
6185 switch (pVCpu->iem.s.enmEffOpSize)
6186 {
6187 case IEMMODE_16BIT:
6188 IEM_MC_BEGIN(4, 2);
6189 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6190 IEM_MC_ARG(uint16_t, u16Src, 1);
6191 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6192 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6194
6195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6197 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6198 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6199 IEM_MC_FETCH_EFLAGS(EFlags);
6200 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6201 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6202
6203 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6204 IEM_MC_COMMIT_EFLAGS(EFlags);
6205 IEM_MC_ADVANCE_RIP();
6206 IEM_MC_END();
6207 return VINF_SUCCESS;
6208
6209 case IEMMODE_32BIT:
6210 IEM_MC_BEGIN(4, 2);
6211 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6212 IEM_MC_ARG(uint32_t, u32Src, 1);
6213 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6214 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6215 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6216
6217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6219 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6220 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6221 IEM_MC_FETCH_EFLAGS(EFlags);
6222 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6223 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6224
6225 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6226 IEM_MC_COMMIT_EFLAGS(EFlags);
6227 IEM_MC_ADVANCE_RIP();
6228 IEM_MC_END();
6229 return VINF_SUCCESS;
6230
6231 case IEMMODE_64BIT:
6232 IEM_MC_BEGIN(4, 2);
6233 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6234 IEM_MC_ARG(uint64_t, u64Src, 1);
6235 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6236 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6238
6239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6241 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6242 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6243 IEM_MC_FETCH_EFLAGS(EFlags);
6244 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6245 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6246
6247 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6248 IEM_MC_COMMIT_EFLAGS(EFlags);
6249 IEM_MC_ADVANCE_RIP();
6250 IEM_MC_END();
6251 return VINF_SUCCESS;
6252
6253 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6254 }
6255 }
6256}
6257
6258
6259
6260/** Opcode 0x0f 0xa4. */
6261FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6262{
6263 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6264 IEMOP_HLP_MIN_386();
6265 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6266}
6267
6268
6269/** Opcode 0x0f 0xa5. */
6270FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6271{
6272 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6273 IEMOP_HLP_MIN_386();
6274 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6275}
6276
6277
6278/** Opcode 0x0f 0xa8. */
6279FNIEMOP_DEF(iemOp_push_gs)
6280{
6281 IEMOP_MNEMONIC(push_gs, "push gs");
6282 IEMOP_HLP_MIN_386();
6283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6284 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6285}
6286
6287
6288/** Opcode 0x0f 0xa9. */
6289FNIEMOP_DEF(iemOp_pop_gs)
6290{
6291 IEMOP_MNEMONIC(pop_gs, "pop gs");
6292 IEMOP_HLP_MIN_386();
6293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6294 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6295}
6296
6297
6298/** Opcode 0x0f 0xaa. */
6299FNIEMOP_DEF(iemOp_rsm)
6300{
6301 IEMOP_MNEMONIC(rsm, "rsm");
6302 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
6303 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
6304 * intercept). */
6305 IEMOP_BITCH_ABOUT_STUB();
6306 return IEMOP_RAISE_INVALID_OPCODE();
6307}
6308
6309//IEMOP_HLP_MIN_386();
6310
6311
6312/** Opcode 0x0f 0xab. */
6313FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6314{
6315 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6316 IEMOP_HLP_MIN_386();
6317 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6318}
6319
6320
6321/** Opcode 0x0f 0xac. */
6322FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6323{
6324 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6325 IEMOP_HLP_MIN_386();
6326 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6327}
6328
6329
6330/** Opcode 0x0f 0xad. */
6331FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6332{
6333 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6334 IEMOP_HLP_MIN_386();
6335 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6336}
6337
6338
6339/** Opcode 0x0f 0xae mem/0. */
6340FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6341{
6342 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6343 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6344 return IEMOP_RAISE_INVALID_OPCODE();
6345
6346 IEM_MC_BEGIN(3, 1);
6347 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6348 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6349 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6352 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6353 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6354 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6355 IEM_MC_END();
6356 return VINF_SUCCESS;
6357}
6358
6359
6360/** Opcode 0x0f 0xae mem/1. */
6361FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6362{
6363 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6364 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6365 return IEMOP_RAISE_INVALID_OPCODE();
6366
6367 IEM_MC_BEGIN(3, 1);
6368 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6369 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6370 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6373 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6374 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6375 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6376 IEM_MC_END();
6377 return VINF_SUCCESS;
6378}
6379
6380
6381/**
6382 * @opmaps grp15
6383 * @opcode !11/2
6384 * @oppfx none
6385 * @opcpuid sse
6386 * @opgroup og_sse_mxcsrsm
6387 * @opxcpttype 5
6388 * @optest op1=0 -> mxcsr=0
6389 * @optest op1=0x2083 -> mxcsr=0x2083
6390 * @optest op1=0xfffffffe -> value.xcpt=0xd
6391 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6392 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6393 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6394 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6395 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6396 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6397 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6398 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6399 */
6400FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6401{
6402 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6403 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6404 return IEMOP_RAISE_INVALID_OPCODE();
6405
6406 IEM_MC_BEGIN(2, 0);
6407 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6408 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6411 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6412 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6413 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6414 IEM_MC_END();
6415 return VINF_SUCCESS;
6416}
6417
6418
6419/**
6420 * @opmaps grp15
6421 * @opcode !11/3
6422 * @oppfx none
6423 * @opcpuid sse
6424 * @opgroup og_sse_mxcsrsm
6425 * @opxcpttype 5
6426 * @optest mxcsr=0 -> op1=0
6427 * @optest mxcsr=0x2083 -> op1=0x2083
6428 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6429 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6430 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6431 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6432 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6433 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6434 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6435 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6436 */
6437FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6438{
6439 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, MdWO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6440 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6441 return IEMOP_RAISE_INVALID_OPCODE();
6442
6443 IEM_MC_BEGIN(2, 0);
6444 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6445 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6448 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6449 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6450 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6451 IEM_MC_END();
6452 return VINF_SUCCESS;
6453}
6454
6455
6456/**
6457 * @opmaps grp15
6458 * @opcode !11/4
6459 * @oppfx none
6460 * @opcpuid xsave
6461 * @opgroup og_system
6462 * @opxcpttype none
6463 */
6464FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6465{
6466 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, MRW, DISOPTYPE_HARMLESS, 0);
6467 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6468 return IEMOP_RAISE_INVALID_OPCODE();
6469
6470 IEM_MC_BEGIN(3, 0);
6471 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6472 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6473 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6476 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6477 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6478 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6479 IEM_MC_END();
6480 return VINF_SUCCESS;
6481}
6482
6483
6484/**
6485 * @opmaps grp15
6486 * @opcode !11/5
6487 * @oppfx none
6488 * @opcpuid xsave
6489 * @opgroup og_system
6490 * @opxcpttype none
6491 */
6492FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6493{
6494 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, MRO, DISOPTYPE_HARMLESS, 0);
6495 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6496 return IEMOP_RAISE_INVALID_OPCODE();
6497
6498 IEM_MC_BEGIN(3, 0);
6499 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6500 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6501 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6504 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6505 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6506 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6507 IEM_MC_END();
6508 return VINF_SUCCESS;
6509}
6510
6511/** Opcode 0x0f 0xae mem/6. */
6512FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6513
6514/**
6515 * @opmaps grp15
6516 * @opcode !11/7
6517 * @oppfx none
6518 * @opcpuid clfsh
6519 * @opgroup og_cachectl
6520 * @optest op1=1 ->
6521 */
6522FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6523{
6524 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6525 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6526 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6527
6528 IEM_MC_BEGIN(2, 0);
6529 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6530 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6533 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6534 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6535 IEM_MC_END();
6536 return VINF_SUCCESS;
6537}
6538
6539/**
6540 * @opmaps grp15
6541 * @opcode !11/7
6542 * @oppfx 0x66
6543 * @opcpuid clflushopt
6544 * @opgroup og_cachectl
6545 * @optest op1=1 ->
6546 */
6547FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6548{
6549 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6550 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6551 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6552
6553 IEM_MC_BEGIN(2, 0);
6554 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6555 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6558 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6559 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6560 IEM_MC_END();
6561 return VINF_SUCCESS;
6562}
6563
6564
6565/** Opcode 0x0f 0xae 11b/5. */
6566FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6567{
6568 RT_NOREF_PV(bRm);
6569 IEMOP_MNEMONIC(lfence, "lfence");
6570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6571 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6572 return IEMOP_RAISE_INVALID_OPCODE();
6573
6574 IEM_MC_BEGIN(0, 0);
6575 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6576 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6577 else
6578 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6579 IEM_MC_ADVANCE_RIP();
6580 IEM_MC_END();
6581 return VINF_SUCCESS;
6582}
6583
6584
6585/** Opcode 0x0f 0xae 11b/6. */
6586FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6587{
6588 RT_NOREF_PV(bRm);
6589 IEMOP_MNEMONIC(mfence, "mfence");
6590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6591 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6592 return IEMOP_RAISE_INVALID_OPCODE();
6593
6594 IEM_MC_BEGIN(0, 0);
6595 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6596 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6597 else
6598 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6599 IEM_MC_ADVANCE_RIP();
6600 IEM_MC_END();
6601 return VINF_SUCCESS;
6602}
6603
6604
6605/** Opcode 0x0f 0xae 11b/7. */
6606FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6607{
6608 RT_NOREF_PV(bRm);
6609 IEMOP_MNEMONIC(sfence, "sfence");
6610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6611 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6612 return IEMOP_RAISE_INVALID_OPCODE();
6613
6614 IEM_MC_BEGIN(0, 0);
6615 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6616 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6617 else
6618 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6619 IEM_MC_ADVANCE_RIP();
6620 IEM_MC_END();
6621 return VINF_SUCCESS;
6622}
6623
6624
6625/** Opcode 0xf3 0x0f 0xae 11b/0. */
6626FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6627
6628/** Opcode 0xf3 0x0f 0xae 11b/1. */
6629FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6630
6631/** Opcode 0xf3 0x0f 0xae 11b/2. */
6632FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6633
6634/** Opcode 0xf3 0x0f 0xae 11b/3. */
6635FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6636
6637
6638/**
6639 * Group 15 jump table for register variant.
6640 */
6641IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6642{ /* pfx: none, 066h, 0f3h, 0f2h */
6643 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6644 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6645 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6646 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6647 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6648 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6649 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6650 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6651};
6652AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6653
6654
6655/**
6656 * Group 15 jump table for memory variant.
6657 */
6658IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6659{ /* pfx: none, 066h, 0f3h, 0f2h */
6660 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6661 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6662 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6663 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6664 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6665 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6666 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6667 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6668};
6669AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6670
6671
6672/** Opcode 0x0f 0xae. */
6673FNIEMOP_DEF(iemOp_Grp15)
6674{
6675 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6676 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6677 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6678 /* register, register */
6679 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6680 + pVCpu->iem.s.idxPrefix], bRm);
6681 /* memory, register */
6682 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6683 + pVCpu->iem.s.idxPrefix], bRm);
6684}
6685
6686
6687/** Opcode 0x0f 0xaf. */
6688FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6689{
6690 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6691 IEMOP_HLP_MIN_386();
6692 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6693 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6694}
6695
6696
6697/** Opcode 0x0f 0xb0. */
6698FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6699{
6700 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6701 IEMOP_HLP_MIN_486();
6702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6703
6704 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6705 {
6706 IEMOP_HLP_DONE_DECODING();
6707 IEM_MC_BEGIN(4, 0);
6708 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6709 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6710 IEM_MC_ARG(uint8_t, u8Src, 2);
6711 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6712
6713 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6714 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6715 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6716 IEM_MC_REF_EFLAGS(pEFlags);
6717 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6718 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6719 else
6720 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6721
6722 IEM_MC_ADVANCE_RIP();
6723 IEM_MC_END();
6724 }
6725 else
6726 {
6727 IEM_MC_BEGIN(4, 3);
6728 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6729 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6730 IEM_MC_ARG(uint8_t, u8Src, 2);
6731 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6733 IEM_MC_LOCAL(uint8_t, u8Al);
6734
6735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6736 IEMOP_HLP_DONE_DECODING();
6737 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6738 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6739 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6740 IEM_MC_FETCH_EFLAGS(EFlags);
6741 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6742 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6743 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6744 else
6745 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6746
6747 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6748 IEM_MC_COMMIT_EFLAGS(EFlags);
6749 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6750 IEM_MC_ADVANCE_RIP();
6751 IEM_MC_END();
6752 }
6753 return VINF_SUCCESS;
6754}
6755
6756/** Opcode 0x0f 0xb1. */
6757FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6758{
6759 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6760 IEMOP_HLP_MIN_486();
6761 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6762
6763 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6764 {
6765 IEMOP_HLP_DONE_DECODING();
6766 switch (pVCpu->iem.s.enmEffOpSize)
6767 {
6768 case IEMMODE_16BIT:
6769 IEM_MC_BEGIN(4, 0);
6770 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6771 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6772 IEM_MC_ARG(uint16_t, u16Src, 2);
6773 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6774
6775 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6776 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6777 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6778 IEM_MC_REF_EFLAGS(pEFlags);
6779 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6780 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6781 else
6782 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6783
6784 IEM_MC_ADVANCE_RIP();
6785 IEM_MC_END();
6786 return VINF_SUCCESS;
6787
6788 case IEMMODE_32BIT:
6789 IEM_MC_BEGIN(4, 0);
6790 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6791 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6792 IEM_MC_ARG(uint32_t, u32Src, 2);
6793 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6794
6795 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6796 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6797 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6798 IEM_MC_REF_EFLAGS(pEFlags);
6799 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6800 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6801 else
6802 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6803
6804 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6805 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6806 IEM_MC_ADVANCE_RIP();
6807 IEM_MC_END();
6808 return VINF_SUCCESS;
6809
6810 case IEMMODE_64BIT:
6811 IEM_MC_BEGIN(4, 0);
6812 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6813 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6814#ifdef RT_ARCH_X86
6815 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6816#else
6817 IEM_MC_ARG(uint64_t, u64Src, 2);
6818#endif
6819 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6820
6821 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6822 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6823 IEM_MC_REF_EFLAGS(pEFlags);
6824#ifdef RT_ARCH_X86
6825 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6826 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6827 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6828 else
6829 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6830#else
6831 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6832 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6833 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6834 else
6835 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6836#endif
6837
6838 IEM_MC_ADVANCE_RIP();
6839 IEM_MC_END();
6840 return VINF_SUCCESS;
6841
6842 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6843 }
6844 }
6845 else
6846 {
6847 switch (pVCpu->iem.s.enmEffOpSize)
6848 {
6849 case IEMMODE_16BIT:
6850 IEM_MC_BEGIN(4, 3);
6851 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6852 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6853 IEM_MC_ARG(uint16_t, u16Src, 2);
6854 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6856 IEM_MC_LOCAL(uint16_t, u16Ax);
6857
6858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6859 IEMOP_HLP_DONE_DECODING();
6860 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6861 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6862 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6863 IEM_MC_FETCH_EFLAGS(EFlags);
6864 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6865 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6866 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6867 else
6868 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6869
6870 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6871 IEM_MC_COMMIT_EFLAGS(EFlags);
6872 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6873 IEM_MC_ADVANCE_RIP();
6874 IEM_MC_END();
6875 return VINF_SUCCESS;
6876
6877 case IEMMODE_32BIT:
6878 IEM_MC_BEGIN(4, 3);
6879 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6880 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6881 IEM_MC_ARG(uint32_t, u32Src, 2);
6882 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6884 IEM_MC_LOCAL(uint32_t, u32Eax);
6885
6886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6887 IEMOP_HLP_DONE_DECODING();
6888 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6889 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6890 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6891 IEM_MC_FETCH_EFLAGS(EFlags);
6892 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6893 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6894 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6895 else
6896 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6897
6898 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6899 IEM_MC_COMMIT_EFLAGS(EFlags);
6900 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6901 IEM_MC_ADVANCE_RIP();
6902 IEM_MC_END();
6903 return VINF_SUCCESS;
6904
6905 case IEMMODE_64BIT:
6906 IEM_MC_BEGIN(4, 3);
6907 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6908 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6909#ifdef RT_ARCH_X86
6910 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6911#else
6912 IEM_MC_ARG(uint64_t, u64Src, 2);
6913#endif
6914 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6916 IEM_MC_LOCAL(uint64_t, u64Rax);
6917
6918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6919 IEMOP_HLP_DONE_DECODING();
6920 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6921 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6922 IEM_MC_FETCH_EFLAGS(EFlags);
6923 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6924#ifdef RT_ARCH_X86
6925 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6926 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6927 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6928 else
6929 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6930#else
6931 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6932 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6933 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6934 else
6935 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6936#endif
6937
6938 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6939 IEM_MC_COMMIT_EFLAGS(EFlags);
6940 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6941 IEM_MC_ADVANCE_RIP();
6942 IEM_MC_END();
6943 return VINF_SUCCESS;
6944
6945 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6946 }
6947 }
6948}
6949
6950
6951FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6952{
6953 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6954 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6955
6956 switch (pVCpu->iem.s.enmEffOpSize)
6957 {
6958 case IEMMODE_16BIT:
6959 IEM_MC_BEGIN(5, 1);
6960 IEM_MC_ARG(uint16_t, uSel, 0);
6961 IEM_MC_ARG(uint16_t, offSeg, 1);
6962 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6963 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6964 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6965 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6966 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6968 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6969 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6970 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6971 IEM_MC_END();
6972 return VINF_SUCCESS;
6973
6974 case IEMMODE_32BIT:
6975 IEM_MC_BEGIN(5, 1);
6976 IEM_MC_ARG(uint16_t, uSel, 0);
6977 IEM_MC_ARG(uint32_t, offSeg, 1);
6978 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6979 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6980 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6981 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6984 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6985 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6986 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6987 IEM_MC_END();
6988 return VINF_SUCCESS;
6989
6990 case IEMMODE_64BIT:
6991 IEM_MC_BEGIN(5, 1);
6992 IEM_MC_ARG(uint16_t, uSel, 0);
6993 IEM_MC_ARG(uint64_t, offSeg, 1);
6994 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6995 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6996 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6997 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7000 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7001 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7002 else
7003 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7004 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7005 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7006 IEM_MC_END();
7007 return VINF_SUCCESS;
7008
7009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7010 }
7011}
7012
7013
7014/** Opcode 0x0f 0xb2. */
7015FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7016{
7017 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7018 IEMOP_HLP_MIN_386();
7019 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7020 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7021 return IEMOP_RAISE_INVALID_OPCODE();
7022 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7023}
7024
7025
7026/** Opcode 0x0f 0xb3. */
7027FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7028{
7029 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7030 IEMOP_HLP_MIN_386();
7031 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7032}
7033
7034
7035/** Opcode 0x0f 0xb4. */
7036FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7037{
7038 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7039 IEMOP_HLP_MIN_386();
7040 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7041 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7042 return IEMOP_RAISE_INVALID_OPCODE();
7043 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7044}
7045
7046
7047/** Opcode 0x0f 0xb5. */
7048FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7049{
7050 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7051 IEMOP_HLP_MIN_386();
7052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7053 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7054 return IEMOP_RAISE_INVALID_OPCODE();
7055 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7056}
7057
7058
7059/** Opcode 0x0f 0xb6. */
7060FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7061{
7062 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7063 IEMOP_HLP_MIN_386();
7064
7065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7066
7067 /*
7068 * If rm is denoting a register, no more instruction bytes.
7069 */
7070 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7071 {
7072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7073 switch (pVCpu->iem.s.enmEffOpSize)
7074 {
7075 case IEMMODE_16BIT:
7076 IEM_MC_BEGIN(0, 1);
7077 IEM_MC_LOCAL(uint16_t, u16Value);
7078 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7079 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7080 IEM_MC_ADVANCE_RIP();
7081 IEM_MC_END();
7082 return VINF_SUCCESS;
7083
7084 case IEMMODE_32BIT:
7085 IEM_MC_BEGIN(0, 1);
7086 IEM_MC_LOCAL(uint32_t, u32Value);
7087 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7088 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7089 IEM_MC_ADVANCE_RIP();
7090 IEM_MC_END();
7091 return VINF_SUCCESS;
7092
7093 case IEMMODE_64BIT:
7094 IEM_MC_BEGIN(0, 1);
7095 IEM_MC_LOCAL(uint64_t, u64Value);
7096 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7097 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7098 IEM_MC_ADVANCE_RIP();
7099 IEM_MC_END();
7100 return VINF_SUCCESS;
7101
7102 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7103 }
7104 }
7105 else
7106 {
7107 /*
7108 * We're loading a register from memory.
7109 */
7110 switch (pVCpu->iem.s.enmEffOpSize)
7111 {
7112 case IEMMODE_16BIT:
7113 IEM_MC_BEGIN(0, 2);
7114 IEM_MC_LOCAL(uint16_t, u16Value);
7115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7118 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7119 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7120 IEM_MC_ADVANCE_RIP();
7121 IEM_MC_END();
7122 return VINF_SUCCESS;
7123
7124 case IEMMODE_32BIT:
7125 IEM_MC_BEGIN(0, 2);
7126 IEM_MC_LOCAL(uint32_t, u32Value);
7127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7130 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7131 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7132 IEM_MC_ADVANCE_RIP();
7133 IEM_MC_END();
7134 return VINF_SUCCESS;
7135
7136 case IEMMODE_64BIT:
7137 IEM_MC_BEGIN(0, 2);
7138 IEM_MC_LOCAL(uint64_t, u64Value);
7139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7142 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7143 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7144 IEM_MC_ADVANCE_RIP();
7145 IEM_MC_END();
7146 return VINF_SUCCESS;
7147
7148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7149 }
7150 }
7151}
7152
7153
7154/** Opcode 0x0f 0xb7. */
7155FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7156{
7157 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7158 IEMOP_HLP_MIN_386();
7159
7160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7161
7162 /** @todo Not entirely sure how the operand size prefix is handled here,
7163 * assuming that it will be ignored. Would be nice to have a few
7164 * test for this. */
7165 /*
7166 * If rm is denoting a register, no more instruction bytes.
7167 */
7168 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7169 {
7170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7171 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7172 {
7173 IEM_MC_BEGIN(0, 1);
7174 IEM_MC_LOCAL(uint32_t, u32Value);
7175 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7176 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7177 IEM_MC_ADVANCE_RIP();
7178 IEM_MC_END();
7179 }
7180 else
7181 {
7182 IEM_MC_BEGIN(0, 1);
7183 IEM_MC_LOCAL(uint64_t, u64Value);
7184 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7185 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7186 IEM_MC_ADVANCE_RIP();
7187 IEM_MC_END();
7188 }
7189 }
7190 else
7191 {
7192 /*
7193 * We're loading a register from memory.
7194 */
7195 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7196 {
7197 IEM_MC_BEGIN(0, 2);
7198 IEM_MC_LOCAL(uint32_t, u32Value);
7199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7202 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7203 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7204 IEM_MC_ADVANCE_RIP();
7205 IEM_MC_END();
7206 }
7207 else
7208 {
7209 IEM_MC_BEGIN(0, 2);
7210 IEM_MC_LOCAL(uint64_t, u64Value);
7211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7214 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7215 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7216 IEM_MC_ADVANCE_RIP();
7217 IEM_MC_END();
7218 }
7219 }
7220 return VINF_SUCCESS;
7221}
7222
7223
7224/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7225FNIEMOP_UD_STUB(iemOp_jmpe);
7226/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7227FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7228
7229
7230/**
7231 * @opcode 0xb9
7232 * @opinvalid intel-modrm
7233 * @optest ->
7234 */
7235FNIEMOP_DEF(iemOp_Grp10)
7236{
7237 /*
7238 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7239 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7240 */
7241 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7242 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZE); /* just picked Gb,Eb here. */
7243 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7244}
7245
7246
7247/** Opcode 0x0f 0xba. */
7248FNIEMOP_DEF(iemOp_Grp8)
7249{
7250 IEMOP_HLP_MIN_386();
7251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7252 PCIEMOPBINSIZES pImpl;
7253 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7254 {
7255 case 0: case 1: case 2: case 3:
7256 /* Both AMD and Intel want full modr/m decoding and imm8. */
7257 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7258 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7259 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7260 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7261 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7262 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7263 }
7264 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7265
7266 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7267 {
7268 /* register destination. */
7269 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7271
7272 switch (pVCpu->iem.s.enmEffOpSize)
7273 {
7274 case IEMMODE_16BIT:
7275 IEM_MC_BEGIN(3, 0);
7276 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7277 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7278 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7279
7280 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7281 IEM_MC_REF_EFLAGS(pEFlags);
7282 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7283
7284 IEM_MC_ADVANCE_RIP();
7285 IEM_MC_END();
7286 return VINF_SUCCESS;
7287
7288 case IEMMODE_32BIT:
7289 IEM_MC_BEGIN(3, 0);
7290 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7291 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7292 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7293
7294 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7295 IEM_MC_REF_EFLAGS(pEFlags);
7296 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7297
7298 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7299 IEM_MC_ADVANCE_RIP();
7300 IEM_MC_END();
7301 return VINF_SUCCESS;
7302
7303 case IEMMODE_64BIT:
7304 IEM_MC_BEGIN(3, 0);
7305 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7306 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7307 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7308
7309 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7310 IEM_MC_REF_EFLAGS(pEFlags);
7311 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7312
7313 IEM_MC_ADVANCE_RIP();
7314 IEM_MC_END();
7315 return VINF_SUCCESS;
7316
7317 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7318 }
7319 }
7320 else
7321 {
7322 /* memory destination. */
7323
7324 uint32_t fAccess;
7325 if (pImpl->pfnLockedU16)
7326 fAccess = IEM_ACCESS_DATA_RW;
7327 else /* BT */
7328 fAccess = IEM_ACCESS_DATA_R;
7329
7330 /** @todo test negative bit offsets! */
7331 switch (pVCpu->iem.s.enmEffOpSize)
7332 {
7333 case IEMMODE_16BIT:
7334 IEM_MC_BEGIN(3, 1);
7335 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7336 IEM_MC_ARG(uint16_t, u16Src, 1);
7337 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7339
7340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7341 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7342 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7343 if (pImpl->pfnLockedU16)
7344 IEMOP_HLP_DONE_DECODING();
7345 else
7346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7347 IEM_MC_FETCH_EFLAGS(EFlags);
7348 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7349 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7350 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7351 else
7352 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7353 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7354
7355 IEM_MC_COMMIT_EFLAGS(EFlags);
7356 IEM_MC_ADVANCE_RIP();
7357 IEM_MC_END();
7358 return VINF_SUCCESS;
7359
7360 case IEMMODE_32BIT:
7361 IEM_MC_BEGIN(3, 1);
7362 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7363 IEM_MC_ARG(uint32_t, u32Src, 1);
7364 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7366
7367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7368 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7369 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7370 if (pImpl->pfnLockedU16)
7371 IEMOP_HLP_DONE_DECODING();
7372 else
7373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7374 IEM_MC_FETCH_EFLAGS(EFlags);
7375 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7376 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7377 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7378 else
7379 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7380 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7381
7382 IEM_MC_COMMIT_EFLAGS(EFlags);
7383 IEM_MC_ADVANCE_RIP();
7384 IEM_MC_END();
7385 return VINF_SUCCESS;
7386
7387 case IEMMODE_64BIT:
7388 IEM_MC_BEGIN(3, 1);
7389 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7390 IEM_MC_ARG(uint64_t, u64Src, 1);
7391 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7393
7394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7395 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7396 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7397 if (pImpl->pfnLockedU16)
7398 IEMOP_HLP_DONE_DECODING();
7399 else
7400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7401 IEM_MC_FETCH_EFLAGS(EFlags);
7402 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7403 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7405 else
7406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7407 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7408
7409 IEM_MC_COMMIT_EFLAGS(EFlags);
7410 IEM_MC_ADVANCE_RIP();
7411 IEM_MC_END();
7412 return VINF_SUCCESS;
7413
7414 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7415 }
7416 }
7417}
7418
7419
7420/** Opcode 0x0f 0xbb. */
7421FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7422{
7423 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7424 IEMOP_HLP_MIN_386();
7425 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7426}
7427
7428
7429/** Opcode 0x0f 0xbc. */
7430FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7431{
7432 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7433 IEMOP_HLP_MIN_386();
7434 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7435 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7436}
7437
7438
7439/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7440FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7441
7442
7443/** Opcode 0x0f 0xbd. */
7444FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7445{
7446 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7447 IEMOP_HLP_MIN_386();
7448 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7449 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7450}
7451
7452
7453/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7454FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7455
7456
7457/** Opcode 0x0f 0xbe. */
7458FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7459{
7460 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7461 IEMOP_HLP_MIN_386();
7462
7463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7464
7465 /*
7466 * If rm is denoting a register, no more instruction bytes.
7467 */
7468 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7469 {
7470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7471 switch (pVCpu->iem.s.enmEffOpSize)
7472 {
7473 case IEMMODE_16BIT:
7474 IEM_MC_BEGIN(0, 1);
7475 IEM_MC_LOCAL(uint16_t, u16Value);
7476 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7477 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7478 IEM_MC_ADVANCE_RIP();
7479 IEM_MC_END();
7480 return VINF_SUCCESS;
7481
7482 case IEMMODE_32BIT:
7483 IEM_MC_BEGIN(0, 1);
7484 IEM_MC_LOCAL(uint32_t, u32Value);
7485 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7486 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7487 IEM_MC_ADVANCE_RIP();
7488 IEM_MC_END();
7489 return VINF_SUCCESS;
7490
7491 case IEMMODE_64BIT:
7492 IEM_MC_BEGIN(0, 1);
7493 IEM_MC_LOCAL(uint64_t, u64Value);
7494 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7495 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7496 IEM_MC_ADVANCE_RIP();
7497 IEM_MC_END();
7498 return VINF_SUCCESS;
7499
7500 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7501 }
7502 }
7503 else
7504 {
7505 /*
7506 * We're loading a register from memory.
7507 */
7508 switch (pVCpu->iem.s.enmEffOpSize)
7509 {
7510 case IEMMODE_16BIT:
7511 IEM_MC_BEGIN(0, 2);
7512 IEM_MC_LOCAL(uint16_t, u16Value);
7513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7516 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7517 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7518 IEM_MC_ADVANCE_RIP();
7519 IEM_MC_END();
7520 return VINF_SUCCESS;
7521
7522 case IEMMODE_32BIT:
7523 IEM_MC_BEGIN(0, 2);
7524 IEM_MC_LOCAL(uint32_t, u32Value);
7525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7528 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7529 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7530 IEM_MC_ADVANCE_RIP();
7531 IEM_MC_END();
7532 return VINF_SUCCESS;
7533
7534 case IEMMODE_64BIT:
7535 IEM_MC_BEGIN(0, 2);
7536 IEM_MC_LOCAL(uint64_t, u64Value);
7537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7540 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7541 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7542 IEM_MC_ADVANCE_RIP();
7543 IEM_MC_END();
7544 return VINF_SUCCESS;
7545
7546 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7547 }
7548 }
7549}
7550
7551
7552/** Opcode 0x0f 0xbf. */
7553FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7554{
7555 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7556 IEMOP_HLP_MIN_386();
7557
7558 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7559
7560 /** @todo Not entirely sure how the operand size prefix is handled here,
7561 * assuming that it will be ignored. Would be nice to have a few
7562 * test for this. */
7563 /*
7564 * If rm is denoting a register, no more instruction bytes.
7565 */
7566 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7567 {
7568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7569 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7570 {
7571 IEM_MC_BEGIN(0, 1);
7572 IEM_MC_LOCAL(uint32_t, u32Value);
7573 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7574 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7575 IEM_MC_ADVANCE_RIP();
7576 IEM_MC_END();
7577 }
7578 else
7579 {
7580 IEM_MC_BEGIN(0, 1);
7581 IEM_MC_LOCAL(uint64_t, u64Value);
7582 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7583 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7584 IEM_MC_ADVANCE_RIP();
7585 IEM_MC_END();
7586 }
7587 }
7588 else
7589 {
7590 /*
7591 * We're loading a register from memory.
7592 */
7593 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7594 {
7595 IEM_MC_BEGIN(0, 2);
7596 IEM_MC_LOCAL(uint32_t, u32Value);
7597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7600 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7601 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7602 IEM_MC_ADVANCE_RIP();
7603 IEM_MC_END();
7604 }
7605 else
7606 {
7607 IEM_MC_BEGIN(0, 2);
7608 IEM_MC_LOCAL(uint64_t, u64Value);
7609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7612 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7613 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7614 IEM_MC_ADVANCE_RIP();
7615 IEM_MC_END();
7616 }
7617 }
7618 return VINF_SUCCESS;
7619}
7620
7621
7622/** Opcode 0x0f 0xc0. */
7623FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7624{
7625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7626 IEMOP_HLP_MIN_486();
7627 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7628
7629 /*
7630 * If rm is denoting a register, no more instruction bytes.
7631 */
7632 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7633 {
7634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7635
7636 IEM_MC_BEGIN(3, 0);
7637 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7638 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7639 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7640
7641 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7642 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7643 IEM_MC_REF_EFLAGS(pEFlags);
7644 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7645
7646 IEM_MC_ADVANCE_RIP();
7647 IEM_MC_END();
7648 }
7649 else
7650 {
7651 /*
7652 * We're accessing memory.
7653 */
7654 IEM_MC_BEGIN(3, 3);
7655 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7656 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7657 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7658 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7660
7661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7662 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7663 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7664 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7665 IEM_MC_FETCH_EFLAGS(EFlags);
7666 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7667 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7668 else
7669 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7670
7671 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7672 IEM_MC_COMMIT_EFLAGS(EFlags);
7673 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7674 IEM_MC_ADVANCE_RIP();
7675 IEM_MC_END();
7676 return VINF_SUCCESS;
7677 }
7678 return VINF_SUCCESS;
7679}
7680
7681
7682/** Opcode 0x0f 0xc1. */
7683FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7684{
7685 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7686 IEMOP_HLP_MIN_486();
7687 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7688
7689 /*
7690 * If rm is denoting a register, no more instruction bytes.
7691 */
7692 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7693 {
7694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7695
7696 switch (pVCpu->iem.s.enmEffOpSize)
7697 {
7698 case IEMMODE_16BIT:
7699 IEM_MC_BEGIN(3, 0);
7700 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7701 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7702 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7703
7704 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7705 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7706 IEM_MC_REF_EFLAGS(pEFlags);
7707 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7708
7709 IEM_MC_ADVANCE_RIP();
7710 IEM_MC_END();
7711 return VINF_SUCCESS;
7712
7713 case IEMMODE_32BIT:
7714 IEM_MC_BEGIN(3, 0);
7715 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7716 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7717 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7718
7719 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7720 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7721 IEM_MC_REF_EFLAGS(pEFlags);
7722 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7723
7724 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7725 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7726 IEM_MC_ADVANCE_RIP();
7727 IEM_MC_END();
7728 return VINF_SUCCESS;
7729
7730 case IEMMODE_64BIT:
7731 IEM_MC_BEGIN(3, 0);
7732 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7733 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7734 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7735
7736 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7737 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7738 IEM_MC_REF_EFLAGS(pEFlags);
7739 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7740
7741 IEM_MC_ADVANCE_RIP();
7742 IEM_MC_END();
7743 return VINF_SUCCESS;
7744
7745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7746 }
7747 }
7748 else
7749 {
7750 /*
7751 * We're accessing memory.
7752 */
7753 switch (pVCpu->iem.s.enmEffOpSize)
7754 {
7755 case IEMMODE_16BIT:
7756 IEM_MC_BEGIN(3, 3);
7757 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7758 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7759 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7760 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7762
7763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7764 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7765 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7766 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7767 IEM_MC_FETCH_EFLAGS(EFlags);
7768 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7769 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7770 else
7771 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7772
7773 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7774 IEM_MC_COMMIT_EFLAGS(EFlags);
7775 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7776 IEM_MC_ADVANCE_RIP();
7777 IEM_MC_END();
7778 return VINF_SUCCESS;
7779
7780 case IEMMODE_32BIT:
7781 IEM_MC_BEGIN(3, 3);
7782 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7783 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7784 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7785 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7787
7788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7789 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7790 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7791 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7792 IEM_MC_FETCH_EFLAGS(EFlags);
7793 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7794 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7795 else
7796 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7797
7798 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7799 IEM_MC_COMMIT_EFLAGS(EFlags);
7800 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7801 IEM_MC_ADVANCE_RIP();
7802 IEM_MC_END();
7803 return VINF_SUCCESS;
7804
7805 case IEMMODE_64BIT:
7806 IEM_MC_BEGIN(3, 3);
7807 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7808 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7809 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7810 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7812
7813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7814 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7815 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7816 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7817 IEM_MC_FETCH_EFLAGS(EFlags);
7818 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7819 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7820 else
7821 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7822
7823 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7824 IEM_MC_COMMIT_EFLAGS(EFlags);
7825 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7826 IEM_MC_ADVANCE_RIP();
7827 IEM_MC_END();
7828 return VINF_SUCCESS;
7829
7830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7831 }
7832 }
7833}
7834
7835
7836/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
7837FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
7838/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
7839FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
7840/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
7841FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
7842/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
7843FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
7844
7845
7846/** Opcode 0x0f 0xc3. */
7847FNIEMOP_DEF(iemOp_movnti_My_Gy)
7848{
7849 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7850
7851 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7852
7853 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7854 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7855 {
7856 switch (pVCpu->iem.s.enmEffOpSize)
7857 {
7858 case IEMMODE_32BIT:
7859 IEM_MC_BEGIN(0, 2);
7860 IEM_MC_LOCAL(uint32_t, u32Value);
7861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7862
7863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7865 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7866 return IEMOP_RAISE_INVALID_OPCODE();
7867
7868 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7869 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7870 IEM_MC_ADVANCE_RIP();
7871 IEM_MC_END();
7872 break;
7873
7874 case IEMMODE_64BIT:
7875 IEM_MC_BEGIN(0, 2);
7876 IEM_MC_LOCAL(uint64_t, u64Value);
7877 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7878
7879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7881 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7882 return IEMOP_RAISE_INVALID_OPCODE();
7883
7884 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7885 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7886 IEM_MC_ADVANCE_RIP();
7887 IEM_MC_END();
7888 break;
7889
7890 case IEMMODE_16BIT:
7891 /** @todo check this form. */
7892 return IEMOP_RAISE_INVALID_OPCODE();
7893 }
7894 }
7895 else
7896 return IEMOP_RAISE_INVALID_OPCODE();
7897 return VINF_SUCCESS;
7898}
7899/* Opcode 0x66 0x0f 0xc3 - invalid */
7900/* Opcode 0xf3 0x0f 0xc3 - invalid */
7901/* Opcode 0xf2 0x0f 0xc3 - invalid */
7902
7903/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
7904FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7905/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
7906FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
7907/* Opcode 0xf3 0x0f 0xc4 - invalid */
7908/* Opcode 0xf2 0x0f 0xc4 - invalid */
7909
7910/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7911FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7912/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
7913FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
7914/* Opcode 0xf3 0x0f 0xc5 - invalid */
7915/* Opcode 0xf2 0x0f 0xc5 - invalid */
7916
7917/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
7918FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
7919/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
7920FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
7921/* Opcode 0xf3 0x0f 0xc6 - invalid */
7922/* Opcode 0xf2 0x0f 0xc6 - invalid */
7923
7924
7925/** Opcode 0x0f 0xc7 !11/1. */
7926FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7927{
7928 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7929
7930 IEM_MC_BEGIN(4, 3);
7931 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7932 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7933 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7934 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7935 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7936 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7938
7939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7940 IEMOP_HLP_DONE_DECODING();
7941 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7942
7943 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7944 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7945 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7946
7947 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7948 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7949 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7950
7951 IEM_MC_FETCH_EFLAGS(EFlags);
7952 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7953 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7954 else
7955 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7956
7957 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7958 IEM_MC_COMMIT_EFLAGS(EFlags);
7959 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7960 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7961 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7962 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7963 IEM_MC_ENDIF();
7964 IEM_MC_ADVANCE_RIP();
7965
7966 IEM_MC_END();
7967 return VINF_SUCCESS;
7968}
7969
7970
7971/** Opcode REX.W 0x0f 0xc7 !11/1. */
7972FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7973{
7974 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7975 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7976 {
7977#if 0
7978 RT_NOREF(bRm);
7979 IEMOP_BITCH_ABOUT_STUB();
7980 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7981#else
7982 IEM_MC_BEGIN(4, 3);
7983 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7984 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7985 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7986 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7987 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7988 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7990
7991 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7992 IEMOP_HLP_DONE_DECODING();
7993 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7994 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7995
7996 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7997 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7998 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7999
8000 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8001 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8002 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8003
8004 IEM_MC_FETCH_EFLAGS(EFlags);
8005# ifdef RT_ARCH_AMD64
8006 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8007 {
8008 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8009 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8010 else
8011 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8012 }
8013 else
8014# endif
8015 {
8016 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8017 accesses and not all all atomic, which works fine on in UNI CPU guest
8018 configuration (ignoring DMA). If guest SMP is active we have no choice
8019 but to use a rendezvous callback here. Sigh. */
8020 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8021 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8022 else
8023 {
8024 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8025 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8026 }
8027 }
8028
8029 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8030 IEM_MC_COMMIT_EFLAGS(EFlags);
8031 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8032 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8033 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8034 IEM_MC_ENDIF();
8035 IEM_MC_ADVANCE_RIP();
8036
8037 IEM_MC_END();
8038 return VINF_SUCCESS;
8039#endif
8040 }
8041 Log(("cmpxchg16b -> #UD\n"));
8042 return IEMOP_RAISE_INVALID_OPCODE();
8043}
8044
8045FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8046{
8047 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8048 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8049 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8050}
8051
8052/** Opcode 0x0f 0xc7 11/6. */
8053FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8054
8055/** Opcode 0x0f 0xc7 !11/6. */
8056FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8057
8058/** Opcode 0x66 0x0f 0xc7 !11/6. */
8059FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8060
8061/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8062FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8063
8064/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8065FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8066
8067/** Opcode 0x0f 0xc7 11/7. */
8068FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8069
8070
8071/**
8072 * Group 9 jump table for register variant.
8073 */
8074IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8075{ /* pfx: none, 066h, 0f3h, 0f2h */
8076 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8077 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8078 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8079 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8080 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8081 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8082 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8083 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8084};
8085AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8086
8087
8088/**
8089 * Group 9 jump table for memory variant.
8090 */
8091IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8092{ /* pfx: none, 066h, 0f3h, 0f2h */
8093 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8094 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8095 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8096 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8097 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8098 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8099 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8100 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8101};
8102AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8103
8104
8105/** Opcode 0x0f 0xc7. */
8106FNIEMOP_DEF(iemOp_Grp9)
8107{
8108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8109 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8110 /* register, register */
8111 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8112 + pVCpu->iem.s.idxPrefix], bRm);
8113 /* memory, register */
8114 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8115 + pVCpu->iem.s.idxPrefix], bRm);
8116}
8117
8118
8119/**
8120 * Common 'bswap register' helper.
8121 */
8122FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8123{
8124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8125 switch (pVCpu->iem.s.enmEffOpSize)
8126 {
8127 case IEMMODE_16BIT:
8128 IEM_MC_BEGIN(1, 0);
8129 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8130 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8131 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8132 IEM_MC_ADVANCE_RIP();
8133 IEM_MC_END();
8134 return VINF_SUCCESS;
8135
8136 case IEMMODE_32BIT:
8137 IEM_MC_BEGIN(1, 0);
8138 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8139 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8140 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8141 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8142 IEM_MC_ADVANCE_RIP();
8143 IEM_MC_END();
8144 return VINF_SUCCESS;
8145
8146 case IEMMODE_64BIT:
8147 IEM_MC_BEGIN(1, 0);
8148 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8149 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8150 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8151 IEM_MC_ADVANCE_RIP();
8152 IEM_MC_END();
8153 return VINF_SUCCESS;
8154
8155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8156 }
8157}
8158
8159
8160/** Opcode 0x0f 0xc8. */
8161FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8162{
8163 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8164 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8165 prefix. REX.B is the correct prefix it appears. For a parallel
8166 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8167 IEMOP_HLP_MIN_486();
8168 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8169}
8170
8171
8172/** Opcode 0x0f 0xc9. */
8173FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8174{
8175 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8176 IEMOP_HLP_MIN_486();
8177 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8178}
8179
8180
8181/** Opcode 0x0f 0xca. */
8182FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8183{
8184 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8185 IEMOP_HLP_MIN_486();
8186 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8187}
8188
8189
8190/** Opcode 0x0f 0xcb. */
8191FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8192{
8193 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8194 IEMOP_HLP_MIN_486();
8195 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8196}
8197
8198
8199/** Opcode 0x0f 0xcc. */
8200FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8201{
8202 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8203 IEMOP_HLP_MIN_486();
8204 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8205}
8206
8207
8208/** Opcode 0x0f 0xcd. */
8209FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8210{
8211 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8212 IEMOP_HLP_MIN_486();
8213 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8214}
8215
8216
8217/** Opcode 0x0f 0xce. */
8218FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8219{
8220 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8221 IEMOP_HLP_MIN_486();
8222 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8223}
8224
8225
8226/** Opcode 0x0f 0xcf. */
8227FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8228{
8229 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8230 IEMOP_HLP_MIN_486();
8231 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8232}
8233
8234
8235/* Opcode 0x0f 0xd0 - invalid */
8236/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8237FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8238/* Opcode 0xf3 0x0f 0xd0 - invalid */
8239/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8240FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8241
8242/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8243FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8244/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8245FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8246/* Opcode 0xf3 0x0f 0xd1 - invalid */
8247/* Opcode 0xf2 0x0f 0xd1 - invalid */
8248
8249/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8250FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8251/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8252FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8253/* Opcode 0xf3 0x0f 0xd2 - invalid */
8254/* Opcode 0xf2 0x0f 0xd2 - invalid */
8255
8256/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8257FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8258/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8259FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8260/* Opcode 0xf3 0x0f 0xd3 - invalid */
8261/* Opcode 0xf2 0x0f 0xd3 - invalid */
8262
8263/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8264FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8265/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8266FNIEMOP_STUB(iemOp_paddq_Vx_W);
8267/* Opcode 0xf3 0x0f 0xd4 - invalid */
8268/* Opcode 0xf2 0x0f 0xd4 - invalid */
8269
8270/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8271FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8272/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8273FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8274/* Opcode 0xf3 0x0f 0xd5 - invalid */
8275/* Opcode 0xf2 0x0f 0xd5 - invalid */
8276
8277/* Opcode 0x0f 0xd6 - invalid */
8278
8279/**
8280 * @opcode 0xd6
8281 * @oppfx 0x66
8282 * @opcpuid sse2
8283 * @opgroup og_sse2_pcksclr_datamove
8284 * @opxcpttype none
8285 * @optest op1=-1 op2=2 -> op1=2
8286 * @optest op1=0 op2=-42 -> op1=-42
8287 */
8288FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8289{
8290 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
8291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8292 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8293 {
8294 /*
8295 * Register, register.
8296 */
8297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8298 IEM_MC_BEGIN(0, 2);
8299 IEM_MC_LOCAL(uint64_t, uSrc);
8300
8301 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8302 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8303
8304 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8305 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8306
8307 IEM_MC_ADVANCE_RIP();
8308 IEM_MC_END();
8309 }
8310 else
8311 {
8312 /*
8313 * Memory, register.
8314 */
8315 IEM_MC_BEGIN(0, 2);
8316 IEM_MC_LOCAL(uint64_t, uSrc);
8317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8318
8319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8321 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8322 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8323
8324 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8325 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8326
8327 IEM_MC_ADVANCE_RIP();
8328 IEM_MC_END();
8329 }
8330 return VINF_SUCCESS;
8331}
8332
8333
8334/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
8335FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
8336/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
8337FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
8338#if 0
8339FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
8340{
8341 /* Docs says register only. */
8342 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8343
8344 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8345 {
8346 case IEM_OP_PRF_SIZE_OP: /* SSE */
8347 I E M O P _ M N E M O N I C(movq_Wq_Vq, "movq Wq,Vq");
8348 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8349 IEM_MC_BEGIN(2, 0);
8350 IEM_MC_ARG(uint64_t *, pDst, 0);
8351 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8352 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8353 IEM_MC_PREPARE_SSE_USAGE();
8354 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8355 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8356 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8357 IEM_MC_ADVANCE_RIP();
8358 IEM_MC_END();
8359 return VINF_SUCCESS;
8360
8361 case 0: /* MMX */
8362 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
8363 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8364 IEM_MC_BEGIN(2, 0);
8365 IEM_MC_ARG(uint64_t *, pDst, 0);
8366 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8367 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8368 IEM_MC_PREPARE_FPU_USAGE();
8369 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8370 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8371 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8372 IEM_MC_ADVANCE_RIP();
8373 IEM_MC_END();
8374 return VINF_SUCCESS;
8375
8376 default:
8377 return IEMOP_RAISE_INVALID_OPCODE();
8378 }
8379}
8380#endif
8381
8382
8383/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8384FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8385{
8386 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8387 /** @todo testcase: Check that the instruction implicitly clears the high
8388 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8389 * and opcode modifications are made to work with the whole width (not
8390 * just 128). */
8391 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8392 /* Docs says register only. */
8393 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8394 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8395 {
8396 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8397 IEM_MC_BEGIN(2, 0);
8398 IEM_MC_ARG(uint64_t *, pDst, 0);
8399 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8400 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8401 IEM_MC_PREPARE_FPU_USAGE();
8402 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8403 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8404 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8405 IEM_MC_ADVANCE_RIP();
8406 IEM_MC_END();
8407 return VINF_SUCCESS;
8408 }
8409 return IEMOP_RAISE_INVALID_OPCODE();
8410}
8411
8412/** Opcode 0x66 0x0f 0xd7 - */
8413FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8414{
8415 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8416 /** @todo testcase: Check that the instruction implicitly clears the high
8417 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8418 * and opcode modifications are made to work with the whole width (not
8419 * just 128). */
8420 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8421 /* Docs says register only. */
8422 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8423 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8424 {
8425 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8426 IEM_MC_BEGIN(2, 0);
8427 IEM_MC_ARG(uint64_t *, pDst, 0);
8428 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8429 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8430 IEM_MC_PREPARE_SSE_USAGE();
8431 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8432 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8433 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8434 IEM_MC_ADVANCE_RIP();
8435 IEM_MC_END();
8436 return VINF_SUCCESS;
8437 }
8438 return IEMOP_RAISE_INVALID_OPCODE();
8439}
8440
8441/* Opcode 0xf3 0x0f 0xd7 - invalid */
8442/* Opcode 0xf2 0x0f 0xd7 - invalid */
8443
8444
8445/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8446FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8447/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8448FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8449/* Opcode 0xf3 0x0f 0xd8 - invalid */
8450/* Opcode 0xf2 0x0f 0xd8 - invalid */
8451
8452/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8453FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8454/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8455FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8456/* Opcode 0xf3 0x0f 0xd9 - invalid */
8457/* Opcode 0xf2 0x0f 0xd9 - invalid */
8458
8459/** Opcode 0x0f 0xda - pminub Pq, Qq */
8460FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8461/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8462FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8463/* Opcode 0xf3 0x0f 0xda - invalid */
8464/* Opcode 0xf2 0x0f 0xda - invalid */
8465
8466/** Opcode 0x0f 0xdb - pand Pq, Qq */
8467FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8468/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8469FNIEMOP_STUB(iemOp_pand_Vx_W);
8470/* Opcode 0xf3 0x0f 0xdb - invalid */
8471/* Opcode 0xf2 0x0f 0xdb - invalid */
8472
8473/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8474FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8475/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8476FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8477/* Opcode 0xf3 0x0f 0xdc - invalid */
8478/* Opcode 0xf2 0x0f 0xdc - invalid */
8479
8480/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8481FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8482/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8483FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8484/* Opcode 0xf3 0x0f 0xdd - invalid */
8485/* Opcode 0xf2 0x0f 0xdd - invalid */
8486
8487/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8488FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8489/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8490FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8491/* Opcode 0xf3 0x0f 0xde - invalid */
8492/* Opcode 0xf2 0x0f 0xde - invalid */
8493
8494/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8495FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8496/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8497FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8498/* Opcode 0xf3 0x0f 0xdf - invalid */
8499/* Opcode 0xf2 0x0f 0xdf - invalid */
8500
8501/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8502FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8503/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8504FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8505/* Opcode 0xf3 0x0f 0xe0 - invalid */
8506/* Opcode 0xf2 0x0f 0xe0 - invalid */
8507
8508/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8509FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8510/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8511FNIEMOP_STUB(iemOp_psraw_Vx_W);
8512/* Opcode 0xf3 0x0f 0xe1 - invalid */
8513/* Opcode 0xf2 0x0f 0xe1 - invalid */
8514
8515/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8516FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8517/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8518FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8519/* Opcode 0xf3 0x0f 0xe2 - invalid */
8520/* Opcode 0xf2 0x0f 0xe2 - invalid */
8521
8522/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8523FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8524/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8525FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8526/* Opcode 0xf3 0x0f 0xe3 - invalid */
8527/* Opcode 0xf2 0x0f 0xe3 - invalid */
8528
8529/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8530FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8531/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8532FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8533/* Opcode 0xf3 0x0f 0xe4 - invalid */
8534/* Opcode 0xf2 0x0f 0xe4 - invalid */
8535
8536/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8537FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8538/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8539FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8540/* Opcode 0xf3 0x0f 0xe5 - invalid */
8541/* Opcode 0xf2 0x0f 0xe5 - invalid */
8542
8543/* Opcode 0x0f 0xe6 - invalid */
8544/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8545FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8546/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8547FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8548/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8549FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8550
8551
8552/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8553FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8554{
8555 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8556 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8557 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8558 {
8559 /* Register, memory. */
8560 IEM_MC_BEGIN(0, 2);
8561 IEM_MC_LOCAL(uint64_t, uSrc);
8562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8563
8564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8566 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8567 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8568
8569 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8570 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8571
8572 IEM_MC_ADVANCE_RIP();
8573 IEM_MC_END();
8574 return VINF_SUCCESS;
8575 }
8576 /* The register, register encoding is invalid. */
8577 return IEMOP_RAISE_INVALID_OPCODE();
8578}
8579
8580/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
8581FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
8582{
8583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8584 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8585 {
8586 /* Register, memory. */
8587 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
8588 IEM_MC_BEGIN(0, 2);
8589 IEM_MC_LOCAL(RTUINT128U, uSrc);
8590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8591
8592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8594 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8595 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8596
8597 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8598 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8599
8600 IEM_MC_ADVANCE_RIP();
8601 IEM_MC_END();
8602 return VINF_SUCCESS;
8603 }
8604
8605 /* The register, register encoding is invalid. */
8606 return IEMOP_RAISE_INVALID_OPCODE();
8607}
8608
8609/* Opcode 0xf3 0x0f 0xe7 - invalid */
8610/* Opcode 0xf2 0x0f 0xe7 - invalid */
8611
8612
8613/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8614FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8615/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8616FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8617/* Opcode 0xf3 0x0f 0xe8 - invalid */
8618/* Opcode 0xf2 0x0f 0xe8 - invalid */
8619
8620/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8621FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8622/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8623FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8624/* Opcode 0xf3 0x0f 0xe9 - invalid */
8625/* Opcode 0xf2 0x0f 0xe9 - invalid */
8626
8627/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8628FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8629/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8630FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8631/* Opcode 0xf3 0x0f 0xea - invalid */
8632/* Opcode 0xf2 0x0f 0xea - invalid */
8633
8634/** Opcode 0x0f 0xeb - por Pq, Qq */
8635FNIEMOP_STUB(iemOp_por_Pq_Qq);
8636/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8637FNIEMOP_STUB(iemOp_por_Vx_W);
8638/* Opcode 0xf3 0x0f 0xeb - invalid */
8639/* Opcode 0xf2 0x0f 0xeb - invalid */
8640
8641/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8642FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8643/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8644FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8645/* Opcode 0xf3 0x0f 0xec - invalid */
8646/* Opcode 0xf2 0x0f 0xec - invalid */
8647
8648/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8649FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8650/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8651FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8652/* Opcode 0xf3 0x0f 0xed - invalid */
8653/* Opcode 0xf2 0x0f 0xed - invalid */
8654
8655/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8656FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8657/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8658FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8659/* Opcode 0xf3 0x0f 0xee - invalid */
8660/* Opcode 0xf2 0x0f 0xee - invalid */
8661
8662
8663/** Opcode 0x0f 0xef - pxor Pq, Qq */
8664FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8665{
8666 IEMOP_MNEMONIC(pxor, "pxor");
8667 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8668}
8669
8670/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8671FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8672{
8673 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8674 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8675}
8676
8677/* Opcode 0xf3 0x0f 0xef - invalid */
8678/* Opcode 0xf2 0x0f 0xef - invalid */
8679
8680/* Opcode 0x0f 0xf0 - invalid */
8681/* Opcode 0x66 0x0f 0xf0 - invalid */
8682/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8683FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8684
8685/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8686FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8687/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8688FNIEMOP_STUB(iemOp_psllw_Vx_W);
8689/* Opcode 0xf2 0x0f 0xf1 - invalid */
8690
8691/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8692FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8693/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8694FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8695/* Opcode 0xf2 0x0f 0xf2 - invalid */
8696
8697/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8698FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8699/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8700FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8701/* Opcode 0xf2 0x0f 0xf3 - invalid */
8702
8703/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8704FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8705/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8706FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8707/* Opcode 0xf2 0x0f 0xf4 - invalid */
8708
8709/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8710FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8711/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
8712FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
8713/* Opcode 0xf2 0x0f 0xf5 - invalid */
8714
8715/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8716FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8717/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
8718FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
8719/* Opcode 0xf2 0x0f 0xf6 - invalid */
8720
8721/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8722FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8723/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
8724FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
8725/* Opcode 0xf2 0x0f 0xf7 - invalid */
8726
8727/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8728FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8729/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
8730FNIEMOP_STUB(iemOp_psubb_Vx_W);
8731/* Opcode 0xf2 0x0f 0xf8 - invalid */
8732
8733/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8734FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8735/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
8736FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
8737/* Opcode 0xf2 0x0f 0xf9 - invalid */
8738
8739/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8740FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8741/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
8742FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
8743/* Opcode 0xf2 0x0f 0xfa - invalid */
8744
8745/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8746FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8747/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
8748FNIEMOP_STUB(iemOp_psubq_Vx_W);
8749/* Opcode 0xf2 0x0f 0xfb - invalid */
8750
8751/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8752FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8753/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
8754FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
8755/* Opcode 0xf2 0x0f 0xfc - invalid */
8756
8757/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8758FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8759/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
8760FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
8761/* Opcode 0xf2 0x0f 0xfd - invalid */
8762
8763/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8764FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8765/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
8766FNIEMOP_STUB(iemOp_paddd_Vx_W);
8767/* Opcode 0xf2 0x0f 0xfe - invalid */
8768
8769
8770/** Opcode **** 0x0f 0xff - UD0 */
8771FNIEMOP_DEF(iemOp_ud0)
8772{
8773 IEMOP_MNEMONIC(ud0, "ud0");
8774 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8775 {
8776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8777#ifndef TST_IEM_CHECK_MC
8778 RTGCPTR GCPtrEff;
8779 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8780 if (rcStrict != VINF_SUCCESS)
8781 return rcStrict;
8782#endif
8783 IEMOP_HLP_DONE_DECODING();
8784 }
8785 return IEMOP_RAISE_INVALID_OPCODE();
8786}
8787
8788
8789
8790/**
8791 * Two byte opcode map, first byte 0x0f.
8792 *
8793 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
8794 * check if it needs updating as well when making changes.
8795 */
8796IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8797{
8798 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8799 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8800 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8801 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8802 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8803 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8804 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8805 /* 0x06 */ IEMOP_X4(iemOp_clts),
8806 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8807 /* 0x08 */ IEMOP_X4(iemOp_invd),
8808 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8809 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8810 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8811 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8812 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8813 /* 0x0e */ IEMOP_X4(iemOp_femms),
8814 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8815
8816 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
8817 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
8818 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
8819 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8820 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8821 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8822 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
8823 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8824 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8825 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8826 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8827 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8828 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8829 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8830 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8831 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8832
8833 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8834 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8835 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8836 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8837 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8838 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8839 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8840 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8841 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8842 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8843 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
8844 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8845 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
8846 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
8847 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8848 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8849
8850 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8851 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8852 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8853 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8854 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8855 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8856 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8857 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8858 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
8859 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8860 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
8861 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8862 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8863 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8864 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8865 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8866
8867 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8868 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8869 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8870 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8871 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8872 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8873 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8874 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8875 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8876 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8877 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8878 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8879 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8880 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8881 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8882 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8883
8884 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8885 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
8886 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
8887 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
8888 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8889 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8890 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8891 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8892 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
8893 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
8894 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
8895 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8896 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
8897 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
8898 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
8899 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
8900
8901 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8902 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8903 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8904 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8905 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8906 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8907 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8908 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8909 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8910 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8911 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8912 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8913 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8914 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8915 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8916 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vx_Wx, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM,
8917
8918 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
8919 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8920 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8921 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8922 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8923 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8924 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8925 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8926
8927 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8928 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8929 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8930 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8931 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
8932 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
8933 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
8934 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
8935
8936 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8937 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8938 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8939 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8940 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8941 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8942 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8943 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8944 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8945 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8946 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8947 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8948 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8949 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8950 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8951 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8952
8953 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8954 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8955 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8956 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8957 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8958 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8959 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8960 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8961 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8962 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8963 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8964 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8965 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8966 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8967 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8968 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8969
8970 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8971 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8972 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8973 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8974 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8975 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8976 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8977 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8978 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8979 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8980 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8981 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8982 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8983 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8984 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8985 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8986
8987 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8988 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8989 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8990 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8991 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8992 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8993 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8994 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8995 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8996 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8997 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8998 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8999 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9000 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9001 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9002 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9003
9004 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9005 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9006 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9007 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9008 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9009 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9010 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9011 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9012 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9013 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9014 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9015 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9016 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9017 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9018 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9019 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9020
9021 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9022 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9023 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9024 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9025 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9026 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9027 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9028 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9029 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9030 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9031 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9032 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9033 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9034 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9035 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9036 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9037
9038 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9039 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9040 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9041 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9042 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9043 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9044 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9045 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9046 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9047 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9048 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9049 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9050 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9051 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9052 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9053 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9054
9055 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9056 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9057 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9058 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9059 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9060 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9061 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9062 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9063 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9064 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9065 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9066 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9067 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9068 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9069 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9070 /* 0xff */ IEMOP_X4(iemOp_ud0),
9071};
9072AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9073
9074/** @} */
9075
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette