VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66581

Last change on this file since 66581 was 66581, checked in by vboxsync, 8 years ago

VMM: Nested Hw.virt: Implemented various SVM intercepts in IEM, addressed some todos.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 305.2 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66581 2017-04-17 03:00:00Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.215389.xyz. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
38 switch (pVCpu->iem.s.enmEffOpSize)
39 {
40 case IEMMODE_16BIT:
41 IEM_MC_BEGIN(0, 1);
42 IEM_MC_LOCAL(uint16_t, u16Ldtr);
43 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
44 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
45 IEM_MC_ADVANCE_RIP();
46 IEM_MC_END();
47 break;
48
49 case IEMMODE_32BIT:
50 IEM_MC_BEGIN(0, 1);
51 IEM_MC_LOCAL(uint32_t, u32Ldtr);
52 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
53 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
54 IEM_MC_ADVANCE_RIP();
55 IEM_MC_END();
56 break;
57
58 case IEMMODE_64BIT:
59 IEM_MC_BEGIN(0, 1);
60 IEM_MC_LOCAL(uint64_t, u64Ldtr);
61 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
62 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 break;
66
67 IEM_NOT_REACHED_DEFAULT_CASE_RET();
68 }
69 }
70 else
71 {
72 IEM_MC_BEGIN(0, 2);
73 IEM_MC_LOCAL(uint16_t, u16Ldtr);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
76 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
77 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
78 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
79 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/** Opcode 0x0f 0x00 /1. */
88FNIEMOPRM_DEF(iemOp_Grp6_str)
89{
90 IEMOP_MNEMONIC(str, "str Rv/Mw");
91 IEMOP_HLP_MIN_286();
92 IEMOP_HLP_NO_REAL_OR_V86_MODE();
93
94 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
95 {
96 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
97 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
98 switch (pVCpu->iem.s.enmEffOpSize)
99 {
100 case IEMMODE_16BIT:
101 IEM_MC_BEGIN(0, 1);
102 IEM_MC_LOCAL(uint16_t, u16Tr);
103 IEM_MC_FETCH_TR_U16(u16Tr);
104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
105 IEM_MC_ADVANCE_RIP();
106 IEM_MC_END();
107 break;
108
109 case IEMMODE_32BIT:
110 IEM_MC_BEGIN(0, 1);
111 IEM_MC_LOCAL(uint32_t, u32Tr);
112 IEM_MC_FETCH_TR_U32(u32Tr);
113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
114 IEM_MC_ADVANCE_RIP();
115 IEM_MC_END();
116 break;
117
118 case IEMMODE_64BIT:
119 IEM_MC_BEGIN(0, 1);
120 IEM_MC_LOCAL(uint64_t, u64Tr);
121 IEM_MC_FETCH_TR_U64(u64Tr);
122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 break;
126
127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
128 }
129 }
130 else
131 {
132 IEM_MC_BEGIN(0, 2);
133 IEM_MC_LOCAL(uint16_t, u16Tr);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
136 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
137 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
138 IEM_MC_FETCH_TR_U16(u16Tr);
139 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
140 IEM_MC_ADVANCE_RIP();
141 IEM_MC_END();
142 }
143 return VINF_SUCCESS;
144}
145
146
147/** Opcode 0x0f 0x00 /2. */
148FNIEMOPRM_DEF(iemOp_Grp6_lldt)
149{
150 IEMOP_MNEMONIC(lldt, "lldt Ew");
151 IEMOP_HLP_MIN_286();
152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
153
154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
155 {
156 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
157 IEM_MC_BEGIN(1, 0);
158 IEM_MC_ARG(uint16_t, u16Sel, 0);
159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
161 IEM_MC_END();
162 }
163 else
164 {
165 IEM_MC_BEGIN(1, 1);
166 IEM_MC_ARG(uint16_t, u16Sel, 0);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
169 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
170 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
171 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
172 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
173 IEM_MC_END();
174 }
175 return VINF_SUCCESS;
176}
177
178
179/** Opcode 0x0f 0x00 /3. */
180FNIEMOPRM_DEF(iemOp_Grp6_ltr)
181{
182 IEMOP_MNEMONIC(ltr, "ltr Ew");
183 IEMOP_HLP_MIN_286();
184 IEMOP_HLP_NO_REAL_OR_V86_MODE();
185
186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
187 {
188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
189 IEM_MC_BEGIN(1, 0);
190 IEM_MC_ARG(uint16_t, u16Sel, 0);
191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
192 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
193 IEM_MC_END();
194 }
195 else
196 {
197 IEM_MC_BEGIN(1, 1);
198 IEM_MC_ARG(uint16_t, u16Sel, 0);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
202 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
203 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
204 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
205 IEM_MC_END();
206 }
207 return VINF_SUCCESS;
208}
209
210
211/** Opcode 0x0f 0x00 /3. */
212FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
213{
214 IEMOP_HLP_MIN_286();
215 IEMOP_HLP_NO_REAL_OR_V86_MODE();
216
217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
218 {
219 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
220 IEM_MC_BEGIN(2, 0);
221 IEM_MC_ARG(uint16_t, u16Sel, 0);
222 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
223 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
224 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
225 IEM_MC_END();
226 }
227 else
228 {
229 IEM_MC_BEGIN(2, 1);
230 IEM_MC_ARG(uint16_t, u16Sel, 0);
231 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
234 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
235 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
236 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
237 IEM_MC_END();
238 }
239 return VINF_SUCCESS;
240}
241
242
243/** Opcode 0x0f 0x00 /4. */
244FNIEMOPRM_DEF(iemOp_Grp6_verr)
245{
246 IEMOP_MNEMONIC(verr, "verr Ew");
247 IEMOP_HLP_MIN_286();
248 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
249}
250
251
252/** Opcode 0x0f 0x00 /5. */
253FNIEMOPRM_DEF(iemOp_Grp6_verw)
254{
255 IEMOP_MNEMONIC(verw, "verw Ew");
256 IEMOP_HLP_MIN_286();
257 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
258}
259
260
261/**
262 * Group 6 jump table.
263 */
264IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
265{
266 iemOp_Grp6_sldt,
267 iemOp_Grp6_str,
268 iemOp_Grp6_lldt,
269 iemOp_Grp6_ltr,
270 iemOp_Grp6_verr,
271 iemOp_Grp6_verw,
272 iemOp_InvalidWithRM,
273 iemOp_InvalidWithRM
274};
275
276/** Opcode 0x0f 0x00. */
277FNIEMOP_DEF(iemOp_Grp6)
278{
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
280 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
281}
282
283
284/** Opcode 0x0f 0x01 /0. */
285FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
286{
287 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
288 IEMOP_HLP_MIN_286();
289 IEMOP_HLP_64BIT_OP_SIZE();
290 IEM_MC_BEGIN(2, 1);
291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
292 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
295 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
296 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
297 IEM_MC_END();
298 return VINF_SUCCESS;
299}
300
301
302/** Opcode 0x0f 0x01 /0. */
303FNIEMOP_DEF(iemOp_Grp7_vmcall)
304{
305 IEMOP_BITCH_ABOUT_STUB();
306 return IEMOP_RAISE_INVALID_OPCODE();
307}
308
309
310/** Opcode 0x0f 0x01 /0. */
311FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
312{
313 IEMOP_BITCH_ABOUT_STUB();
314 return IEMOP_RAISE_INVALID_OPCODE();
315}
316
317
318/** Opcode 0x0f 0x01 /0. */
319FNIEMOP_DEF(iemOp_Grp7_vmresume)
320{
321 IEMOP_BITCH_ABOUT_STUB();
322 return IEMOP_RAISE_INVALID_OPCODE();
323}
324
325
326/** Opcode 0x0f 0x01 /0. */
327FNIEMOP_DEF(iemOp_Grp7_vmxoff)
328{
329 IEMOP_BITCH_ABOUT_STUB();
330 return IEMOP_RAISE_INVALID_OPCODE();
331}
332
333
334/** Opcode 0x0f 0x01 /1. */
335FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
336{
337 IEMOP_MNEMONIC(sidt, "sidt Ms");
338 IEMOP_HLP_MIN_286();
339 IEMOP_HLP_64BIT_OP_SIZE();
340 IEM_MC_BEGIN(2, 1);
341 IEM_MC_ARG(uint8_t, iEffSeg, 0);
342 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
345 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
346 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
347 IEM_MC_END();
348 return VINF_SUCCESS;
349}
350
351
352/** Opcode 0x0f 0x01 /1. */
353FNIEMOP_DEF(iemOp_Grp7_monitor)
354{
355 IEMOP_MNEMONIC(monitor, "monitor");
356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
357 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
358}
359
360
361/** Opcode 0x0f 0x01 /1. */
362FNIEMOP_DEF(iemOp_Grp7_mwait)
363{
364 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
367}
368
369
370/** Opcode 0x0f 0x01 /2. */
371FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
372{
373 IEMOP_MNEMONIC(lgdt, "lgdt");
374 IEMOP_HLP_64BIT_OP_SIZE();
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd0. */
389FNIEMOP_DEF(iemOp_Grp7_xgetbv)
390{
391 IEMOP_MNEMONIC(xgetbv, "xgetbv");
392 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
393 {
394 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
396 }
397 return IEMOP_RAISE_INVALID_OPCODE();
398}
399
400
401/** Opcode 0x0f 0x01 0xd1. */
402FNIEMOP_DEF(iemOp_Grp7_xsetbv)
403{
404 IEMOP_MNEMONIC(xsetbv, "xsetbv");
405 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
406 {
407 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
408 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
409 }
410 return IEMOP_RAISE_INVALID_OPCODE();
411}
412
413
414/** Opcode 0x0f 0x01 /3. */
415FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
416{
417 IEMOP_MNEMONIC(lidt, "lidt");
418 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
419 ? IEMMODE_64BIT
420 : pVCpu->iem.s.enmEffOpSize;
421 IEM_MC_BEGIN(3, 1);
422 IEM_MC_ARG(uint8_t, iEffSeg, 0);
423 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
424 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
428 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
429 IEM_MC_END();
430 return VINF_SUCCESS;
431}
432
433
434#ifdef VBOX_WITH_NESTED_HWVIRT
435/** Opcode 0x0f 0x01 0xd8. */
436FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
437{
438 IEMOP_MNEMONIC(vmrun, "vmrun");
439 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
440}
441
442/** Opcode 0x0f 0x01 0xd9. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
444{
445 IEMOP_MNEMONIC(vmmcall, "vmmcall");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
447}
448
449
450/** Opcode 0x0f 0x01 0xda. */
451FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
452{
453 IEMOP_MNEMONIC(vmload, "vmload");
454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
455}
456
457
458/** Opcode 0x0f 0x01 0xdb. */
459FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
460{
461 IEMOP_MNEMONIC(vmsave, "vmsave");
462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
463}
464
465
466/** Opcode 0x0f 0x01 0xdc. */
467FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
468{
469 IEMOP_MNEMONIC(stgi, "stgi");
470 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
471}
472
473
474/** Opcode 0x0f 0x01 0xdd. */
475FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
476{
477 IEMOP_MNEMONIC(clgi, "clgi");
478 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
479}
480
481
482/** Opcode 0x0f 0x01 0xdf. */
483FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
484{
485 IEMOP_MNEMONIC(invlpga, "invlpga");
486 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
487}
488
489
490/** Opcode 0x0f 0x01 0xde. */
491FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
492{
493 IEMOP_MNEMONIC(skinit, "skinit");
494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
495}
496#else
497/** Opcode 0x0f 0x01 0xd8. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
499
500/** Opcode 0x0f 0x01 0xd9. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
502
503/** Opcode 0x0f 0x01 0xda. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
505
506/** Opcode 0x0f 0x01 0xdb. */
507FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
508
509/** Opcode 0x0f 0x01 0xdc. */
510FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
511
512/** Opcode 0x0f 0x01 0xdd. */
513FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
514
515/** Opcode 0x0f 0x01 0xdf. */
516FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520#endif /* VBOX_WITH_NESTED_HWVIRT */
521
522/** Opcode 0x0f 0x01 /4. */
523FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
524{
525 IEMOP_MNEMONIC(smsw, "smsw");
526 IEMOP_HLP_MIN_286();
527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
528 {
529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
530 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
531 switch (pVCpu->iem.s.enmEffOpSize)
532 {
533 case IEMMODE_16BIT:
534 IEM_MC_BEGIN(0, 1);
535 IEM_MC_LOCAL(uint16_t, u16Tmp);
536 IEM_MC_FETCH_CR0_U16(u16Tmp);
537 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
538 { /* likely */ }
539 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
541 else
542 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
543 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
544 IEM_MC_ADVANCE_RIP();
545 IEM_MC_END();
546 return VINF_SUCCESS;
547
548 case IEMMODE_32BIT:
549 IEM_MC_BEGIN(0, 1);
550 IEM_MC_LOCAL(uint32_t, u32Tmp);
551 IEM_MC_FETCH_CR0_U32(u32Tmp);
552 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 return VINF_SUCCESS;
556
557 case IEMMODE_64BIT:
558 IEM_MC_BEGIN(0, 1);
559 IEM_MC_LOCAL(uint64_t, u64Tmp);
560 IEM_MC_FETCH_CR0_U64(u64Tmp);
561 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
562 IEM_MC_ADVANCE_RIP();
563 IEM_MC_END();
564 return VINF_SUCCESS;
565
566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
567 }
568 }
569 else
570 {
571 /* Ignore operand size here, memory refs are always 16-bit. */
572 IEM_MC_BEGIN(0, 2);
573 IEM_MC_LOCAL(uint16_t, u16Tmp);
574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
578 IEM_MC_FETCH_CR0_U16(u16Tmp);
579 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
580 { /* likely */ }
581 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
582 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
583 else
584 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
585 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
586 IEM_MC_ADVANCE_RIP();
587 IEM_MC_END();
588 return VINF_SUCCESS;
589 }
590}
591
592
593/** Opcode 0x0f 0x01 /6. */
594FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
595{
596 /* The operand size is effectively ignored, all is 16-bit and only the
597 lower 3-bits are used. */
598 IEMOP_MNEMONIC(lmsw, "lmsw");
599 IEMOP_HLP_MIN_286();
600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
601 {
602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
603 IEM_MC_BEGIN(1, 0);
604 IEM_MC_ARG(uint16_t, u16Tmp, 0);
605 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
606 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
607 IEM_MC_END();
608 }
609 else
610 {
611 IEM_MC_BEGIN(1, 1);
612 IEM_MC_ARG(uint16_t, u16Tmp, 0);
613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
617 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
618 IEM_MC_END();
619 }
620 return VINF_SUCCESS;
621}
622
623
624/** Opcode 0x0f 0x01 /7. */
625FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
626{
627 IEMOP_MNEMONIC(invlpg, "invlpg");
628 IEMOP_HLP_MIN_486();
629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
630 IEM_MC_BEGIN(1, 1);
631 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
633 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
634 IEM_MC_END();
635 return VINF_SUCCESS;
636}
637
638
639/** Opcode 0x0f 0x01 /7. */
640FNIEMOP_DEF(iemOp_Grp7_swapgs)
641{
642 IEMOP_MNEMONIC(swapgs, "swapgs");
643 IEMOP_HLP_ONLY_64BIT();
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
646}
647
648
649/** Opcode 0x0f 0x01 /7. */
650FNIEMOP_DEF(iemOp_Grp7_rdtscp)
651{
652 IEMOP_MNEMONIC(rdtscp, "rdtscp");
653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
654 /** @todo SVM intercept removal from here. */
655 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RDTSCP, SVM_EXIT_RDTSCP, 0, 0);
656 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
657}
658
659
660/**
661 * Group 7 jump table, memory variant.
662 */
663IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
664{
665 iemOp_Grp7_sgdt,
666 iemOp_Grp7_sidt,
667 iemOp_Grp7_lgdt,
668 iemOp_Grp7_lidt,
669 iemOp_Grp7_smsw,
670 iemOp_InvalidWithRM,
671 iemOp_Grp7_lmsw,
672 iemOp_Grp7_invlpg
673};
674
675
676/** Opcode 0x0f 0x01. */
677FNIEMOP_DEF(iemOp_Grp7)
678{
679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
680 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
681 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
682
683 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
684 {
685 case 0:
686 switch (bRm & X86_MODRM_RM_MASK)
687 {
688 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
689 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
690 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
691 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 1:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
700 }
701 return IEMOP_RAISE_INVALID_OPCODE();
702
703 case 2:
704 switch (bRm & X86_MODRM_RM_MASK)
705 {
706 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
707 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
708 }
709 return IEMOP_RAISE_INVALID_OPCODE();
710
711 case 3:
712 switch (bRm & X86_MODRM_RM_MASK)
713 {
714 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
715 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
716 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
717 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
718 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
719 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
720 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
721 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
723 }
724
725 case 4:
726 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
727
728 case 5:
729 return IEMOP_RAISE_INVALID_OPCODE();
730
731 case 6:
732 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
733
734 case 7:
735 switch (bRm & X86_MODRM_RM_MASK)
736 {
737 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
738 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
739 }
740 return IEMOP_RAISE_INVALID_OPCODE();
741
742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
743 }
744}
745
746/** Opcode 0x0f 0x00 /3. */
747FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
748{
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
751
752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
753 {
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 switch (pVCpu->iem.s.enmEffOpSize)
756 {
757 case IEMMODE_16BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 case IEMMODE_32BIT:
773 case IEMMODE_64BIT:
774 {
775 IEM_MC_BEGIN(3, 0);
776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
777 IEM_MC_ARG(uint16_t, u16Sel, 1);
778 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
779
780 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
781 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
782 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
783
784 IEM_MC_END();
785 return VINF_SUCCESS;
786 }
787
788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
789 }
790 }
791 else
792 {
793 switch (pVCpu->iem.s.enmEffOpSize)
794 {
795 case IEMMODE_16BIT:
796 {
797 IEM_MC_BEGIN(3, 1);
798 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
799 IEM_MC_ARG(uint16_t, u16Sel, 1);
800 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
802
803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
804 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
805
806 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
807 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
808 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
809
810 IEM_MC_END();
811 return VINF_SUCCESS;
812 }
813
814 case IEMMODE_32BIT:
815 case IEMMODE_64BIT:
816 {
817 IEM_MC_BEGIN(3, 1);
818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 1);
820 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
822
823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
824 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
825/** @todo testcase: make sure it's a 16-bit read. */
826
827 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
828 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
829 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
830
831 IEM_MC_END();
832 return VINF_SUCCESS;
833 }
834
835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
836 }
837 }
838}
839
840
841
842/** Opcode 0x0f 0x02. */
843FNIEMOP_DEF(iemOp_lar_Gv_Ew)
844{
845 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
846 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
847}
848
849
850/** Opcode 0x0f 0x03. */
851FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
852{
853 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
854 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
855}
856
857
858/** Opcode 0x0f 0x05. */
859FNIEMOP_DEF(iemOp_syscall)
860{
861 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
863 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
864}
865
866
867/** Opcode 0x0f 0x06. */
868FNIEMOP_DEF(iemOp_clts)
869{
870 IEMOP_MNEMONIC(clts, "clts");
871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
872 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
873}
874
875
876/** Opcode 0x0f 0x07. */
877FNIEMOP_DEF(iemOp_sysret)
878{
879 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
881 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
882}
883
884
885/** Opcode 0x0f 0x08. */
886FNIEMOP_DEF(iemOp_invd)
887{
888 IEMOP_MNEMONIC(invd, "invd");
889#ifdef VBOX_WITH_NESTED_HWVIRT
890 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
891 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
892#endif
893 /** @todo implement invd for the regular case (above only handles nested SVM
894 * exits). */
895 IEMOP_BITCH_ABOUT_STUB();
896 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
897}
898
899// IEMOP_HLP_MIN_486();
900
901
902/** Opcode 0x0f 0x09. */
903FNIEMOP_DEF(iemOp_wbinvd)
904{
905 IEMOP_MNEMONIC(wbinvd, "wbinvd");
906 IEMOP_HLP_MIN_486();
907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
908 IEM_MC_BEGIN(0, 0);
909 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
910 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
911 IEM_MC_ADVANCE_RIP();
912 IEM_MC_END();
913 return VINF_SUCCESS; /* ignore for now */
914}
915
916
917/** Opcode 0x0f 0x0b. */
918FNIEMOP_DEF(iemOp_ud2)
919{
920 IEMOP_MNEMONIC(ud2, "ud2");
921 return IEMOP_RAISE_INVALID_OPCODE();
922}
923
924/** Opcode 0x0f 0x0d. */
925FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
926{
927 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
928 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
929 {
930 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
931 return IEMOP_RAISE_INVALID_OPCODE();
932 }
933
934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
935 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
936 {
937 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
938 return IEMOP_RAISE_INVALID_OPCODE();
939 }
940
941 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
942 {
943 case 2: /* Aliased to /0 for the time being. */
944 case 4: /* Aliased to /0 for the time being. */
945 case 5: /* Aliased to /0 for the time being. */
946 case 6: /* Aliased to /0 for the time being. */
947 case 7: /* Aliased to /0 for the time being. */
948 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
949 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
950 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
951 IEM_NOT_REACHED_DEFAULT_CASE_RET();
952 }
953
954 IEM_MC_BEGIN(0, 1);
955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
958 /* Currently a NOP. */
959 NOREF(GCPtrEffSrc);
960 IEM_MC_ADVANCE_RIP();
961 IEM_MC_END();
962 return VINF_SUCCESS;
963}
964
965
966/** Opcode 0x0f 0x0e. */
967FNIEMOP_STUB(iemOp_femms);
968
969
970/** Opcode 0x0f 0x0f. */
971FNIEMOP_DEF(iemOp_3Dnow)
972{
973 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
974 {
975 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
976 return IEMOP_RAISE_INVALID_OPCODE();
977 }
978
979#ifdef IEM_WITH_3DNOW
980 /* This is pretty sparse, use switch instead of table. */
981 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
982 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
983#else
984 IEMOP_BITCH_ABOUT_STUB();
985 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
986#endif
987}
988
989
990/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
991FNIEMOP_STUB(iemOp_movups_Vps_Wps);
992/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
993FNIEMOP_STUB(iemOp_movupd_Vpd_Wpd);
994
995
996/**
997 * @opcode 0x10
998 * @oppfx 0xf3
999 * @opcpuid sse
1000 * @opgroup og_sse_simdfp_datamove
1001 * @opxcpttype 5
1002 * @optest op1=1 op2=2 -> op1=2
1003 * @optest op1=0 op2=-22 -> op1=-22
1004 */
1005FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1006{
1007 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZxReg, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1008 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1009 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1010 {
1011 /*
1012 * Register, register.
1013 */
1014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1015 IEM_MC_BEGIN(0, 1);
1016 IEM_MC_LOCAL(uint32_t, uSrc);
1017
1018 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1019 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1020 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1021 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1022
1023 IEM_MC_ADVANCE_RIP();
1024 IEM_MC_END();
1025 }
1026 else
1027 {
1028 /*
1029 * Memory, register.
1030 */
1031 IEM_MC_BEGIN(0, 2);
1032 IEM_MC_LOCAL(uint32_t, uSrc);
1033 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1034
1035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1037 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1038 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1039
1040 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1041 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1042
1043 IEM_MC_ADVANCE_RIP();
1044 IEM_MC_END();
1045 }
1046 return VINF_SUCCESS;
1047}
1048
1049
1050/** Opcode 0xf2 0x0f 0x10 - movsd Vx, Wsd */
1051FNIEMOP_STUB(iemOp_movsd_Vx_Wsd);
1052
1053
1054/**
1055 * @opcode 0x11
1056 * @oppfx none
1057 * @opcpuid sse
1058 * @opgroup og_sse_simdfp_datamove
1059 * @opxcpttype 4UA
1060 * @optest op1=1 op2=2 -> op1=2
1061 * @optest op1=0 op2=-42 -> op1=-42
1062 */
1063FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1064{
1065 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1066 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1067 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1068 {
1069 /*
1070 * Register, register.
1071 */
1072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1073 IEM_MC_BEGIN(0, 0);
1074 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1075 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1076 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1077 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1078 IEM_MC_ADVANCE_RIP();
1079 IEM_MC_END();
1080 }
1081 else
1082 {
1083 /*
1084 * Memory, register.
1085 */
1086 IEM_MC_BEGIN(0, 2);
1087 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1089
1090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1092 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1093 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1094
1095 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1096 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1097
1098 IEM_MC_ADVANCE_RIP();
1099 IEM_MC_END();
1100 }
1101 return VINF_SUCCESS;
1102}
1103
1104
1105/**
1106 * @opcode 0x11
1107 * @oppfx 0x66
1108 * @opcpuid sse2
1109 * @opgroup og_sse2_pcksclr_datamove
1110 * @opxcpttype 4UA
1111 * @optest op1=1 op2=2 -> op1=2
1112 * @optest op1=0 op2=-42 -> op1=-42
1113 */
1114FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1115{
1116 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1117 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1118 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1119 {
1120 /*
1121 * Register, register.
1122 */
1123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1124 IEM_MC_BEGIN(0, 0);
1125 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1126 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1127 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1128 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1129 IEM_MC_ADVANCE_RIP();
1130 IEM_MC_END();
1131 }
1132 else
1133 {
1134 /*
1135 * Memory, register.
1136 */
1137 IEM_MC_BEGIN(0, 2);
1138 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1140
1141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1143 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1144 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1145
1146 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1147 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1148
1149 IEM_MC_ADVANCE_RIP();
1150 IEM_MC_END();
1151 }
1152 return VINF_SUCCESS;
1153}
1154
1155
1156/**
1157 * @opcode 0x11
1158 * @oppfx 0xf3
1159 * @opcpuid sse
1160 * @opgroup og_sse_simdfp_datamove
1161 * @opxcpttype 5
1162 * @optest op1=1 op2=2 -> op1=2
1163 * @optest op1=0 op2=-22 -> op1=-22
1164 */
1165FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1166{
1167 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1169 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1170 {
1171 /*
1172 * Register, register.
1173 */
1174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1175 IEM_MC_BEGIN(0, 1);
1176 IEM_MC_LOCAL(uint32_t, uSrc);
1177
1178 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1179 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1180 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1181 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1182
1183 IEM_MC_ADVANCE_RIP();
1184 IEM_MC_END();
1185 }
1186 else
1187 {
1188 /*
1189 * Memory, register.
1190 */
1191 IEM_MC_BEGIN(0, 2);
1192 IEM_MC_LOCAL(uint32_t, uSrc);
1193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1194
1195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1197 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1198 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1199
1200 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1201 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1202
1203 IEM_MC_ADVANCE_RIP();
1204 IEM_MC_END();
1205 }
1206 return VINF_SUCCESS;
1207}
1208
1209
1210/**
1211 * @opcode 0x11
1212 * @oppfx 0xf2
1213 * @opcpuid sse2
1214 * @opgroup og_sse2_pcksclr_datamove
1215 * @opxcpttype 5
1216 * @optest op1=1 op2=2 -> op1=2
1217 * @optest op1=0 op2=-42 -> op1=-42
1218 */
1219FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1220{
1221 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1222 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1223 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1224 {
1225 /*
1226 * Register, register.
1227 */
1228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1229 IEM_MC_BEGIN(0, 1);
1230 IEM_MC_LOCAL(uint64_t, uSrc);
1231
1232 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1233 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1234 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1235 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1236
1237 IEM_MC_ADVANCE_RIP();
1238 IEM_MC_END();
1239 }
1240 else
1241 {
1242 /*
1243 * Memory, register.
1244 */
1245 IEM_MC_BEGIN(0, 2);
1246 IEM_MC_LOCAL(uint64_t, uSrc);
1247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1248
1249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1251 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1252 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1253
1254 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1255 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1256
1257 IEM_MC_ADVANCE_RIP();
1258 IEM_MC_END();
1259 }
1260 return VINF_SUCCESS;
1261}
1262
1263
1264FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1265{
1266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1268 {
1269 /**
1270 * @opcode 0x12
1271 * @opcodesub 11 mr/reg
1272 * @oppfx none
1273 * @opcpuid sse
1274 * @opgroup og_sse_simdfp_datamove
1275 * @opxcpttype 5
1276 * @optest op1=1 op2=2 -> op1=2
1277 * @optest op1=0 op2=-42 -> op1=-42
1278 */
1279 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1280
1281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1282 IEM_MC_BEGIN(0, 1);
1283 IEM_MC_LOCAL(uint64_t, uSrc);
1284
1285 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1286 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1287 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1288 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1289
1290 IEM_MC_ADVANCE_RIP();
1291 IEM_MC_END();
1292 }
1293 else
1294 {
1295 /**
1296 * @opdone
1297 * @opcode 0x12
1298 * @opcodesub !11 mr/reg
1299 * @oppfx none
1300 * @opcpuid sse
1301 * @opgroup og_sse_simdfp_datamove
1302 * @opxcpttype 5
1303 * @optest op1=1 op2=2 -> op1=2
1304 * @optest op1=0 op2=-42 -> op1=-42
1305 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1306 */
1307 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1308
1309 IEM_MC_BEGIN(0, 2);
1310 IEM_MC_LOCAL(uint64_t, uSrc);
1311 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1312
1313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1315 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1316 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1317
1318 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1319 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1320
1321 IEM_MC_ADVANCE_RIP();
1322 IEM_MC_END();
1323 }
1324 return VINF_SUCCESS;
1325}
1326
1327
1328/**
1329 * @opcode 0x12
1330 * @opcodesub !11 mr/reg
1331 * @oppfx 0x66
1332 * @opcpuid sse2
1333 * @opgroup og_sse2_pcksclr_datamove
1334 * @opxcpttype 5
1335 * @optest op1=1 op2=2 -> op1=2
1336 * @optest op1=0 op2=-42 -> op1=-42
1337 */
1338FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1339{
1340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1341 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1342 {
1343 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1344
1345 IEM_MC_BEGIN(0, 2);
1346 IEM_MC_LOCAL(uint64_t, uSrc);
1347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1348
1349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1351 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1352 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1353
1354 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1355 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1356
1357 IEM_MC_ADVANCE_RIP();
1358 IEM_MC_END();
1359 return VINF_SUCCESS;
1360 }
1361
1362 /**
1363 * @opdone
1364 * @opmnemonic ud660f12m3
1365 * @opcode 0x12
1366 * @opcodesub 11 mr/reg
1367 * @oppfx 0x66
1368 * @opunused immediate
1369 * @opcpuid sse
1370 * @optest ->
1371 */
1372 return IEMOP_RAISE_INVALID_OPCODE();
1373}
1374
1375
1376/**
1377 * @opcode 0x12
1378 * @oppfx 0xf3
1379 * @opcpuid sse3
1380 * @opgroup og_sse3_pcksclr_datamove
1381 * @opxcpttype 4
1382 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1383 * op1=0x00000002000000020000000100000001
1384 */
1385FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1386{
1387 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1389 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1390 {
1391 /*
1392 * Register, register.
1393 */
1394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1395 IEM_MC_BEGIN(2, 0);
1396 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1397 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1398
1399 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1400 IEM_MC_PREPARE_SSE_USAGE();
1401
1402 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1403 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1404 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1405
1406 IEM_MC_ADVANCE_RIP();
1407 IEM_MC_END();
1408 }
1409 else
1410 {
1411 /*
1412 * Register, memory.
1413 */
1414 IEM_MC_BEGIN(2, 2);
1415 IEM_MC_LOCAL(RTUINT128U, uSrc);
1416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1417 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1418 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1419
1420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1422 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1423 IEM_MC_PREPARE_SSE_USAGE();
1424
1425 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1426 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1427 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1428
1429 IEM_MC_ADVANCE_RIP();
1430 IEM_MC_END();
1431 }
1432 return VINF_SUCCESS;
1433}
1434
1435
1436/**
1437 * @opcode 0x12
1438 * @oppfx 0xf2
1439 * @opcpuid sse3
1440 * @opgroup og_sse3_pcksclr_datamove
1441 * @opxcpttype 5
1442 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1443 * op1=0x22222222111111112222222211111111
1444 */
1445FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1446{
1447 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1448 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1449 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1450 {
1451 /*
1452 * Register, register.
1453 */
1454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1455 IEM_MC_BEGIN(2, 0);
1456 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1457 IEM_MC_ARG(uint64_t, uSrc, 1);
1458
1459 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1460 IEM_MC_PREPARE_SSE_USAGE();
1461
1462 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1463 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1464 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1465
1466 IEM_MC_ADVANCE_RIP();
1467 IEM_MC_END();
1468 }
1469 else
1470 {
1471 /*
1472 * Register, memory.
1473 */
1474 IEM_MC_BEGIN(2, 2);
1475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1476 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1477 IEM_MC_ARG(uint64_t, uSrc, 1);
1478
1479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1481 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1482 IEM_MC_PREPARE_SSE_USAGE();
1483
1484 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1485 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1486 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1487
1488 IEM_MC_ADVANCE_RIP();
1489 IEM_MC_END();
1490 }
1491 return VINF_SUCCESS;
1492}
1493
1494
1495/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1496FNIEMOP_STUB(iemOp_movlps_Mq_Vq);
1497
1498/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1499FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1500{
1501 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1503 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1504 {
1505#if 0
1506 /*
1507 * Register, register.
1508 */
1509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1510 IEM_MC_BEGIN(0, 1);
1511 IEM_MC_LOCAL(uint64_t, uSrc);
1512 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1513 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1514 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1515 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1516 IEM_MC_ADVANCE_RIP();
1517 IEM_MC_END();
1518#else
1519 return IEMOP_RAISE_INVALID_OPCODE();
1520#endif
1521 }
1522 else
1523 {
1524 /*
1525 * Memory, register.
1526 */
1527 IEM_MC_BEGIN(0, 2);
1528 IEM_MC_LOCAL(uint64_t, uSrc);
1529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1530
1531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1533 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1534 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1535
1536 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1537 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1538
1539 IEM_MC_ADVANCE_RIP();
1540 IEM_MC_END();
1541 }
1542 return VINF_SUCCESS;
1543}
1544
1545/* Opcode 0xf3 0x0f 0x13 - invalid */
1546/* Opcode 0xf2 0x0f 0x13 - invalid */
1547
1548/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1549FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1550/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1551FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1552/* Opcode 0xf3 0x0f 0x14 - invalid */
1553/* Opcode 0xf2 0x0f 0x14 - invalid */
1554/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1555FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1556/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1557FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1558/* Opcode 0xf3 0x0f 0x15 - invalid */
1559/* Opcode 0xf2 0x0f 0x15 - invalid */
1560/** Opcode 0x0f 0x16 - movhpsv1 Vdq, Mq movlhps Vdq, Uq */
1561FNIEMOP_STUB(iemOp_movhpsv1_Vdq_Mq__movlhps_Vdq_Uq); //NEXT
1562/** Opcode 0x66 0x0f 0x16 - movhpdv1 Vdq, Mq */
1563FNIEMOP_STUB(iemOp_movhpdv1_Vdq_Mq); //NEXT
1564/** Opcode 0xf3 0x0f 0x16 - movshdup Vx, Wx */
1565FNIEMOP_STUB(iemOp_movshdup_Vx_Wx); //NEXT
1566/* Opcode 0xf2 0x0f 0x16 - invalid */
1567/** Opcode 0x0f 0x17 - movhpsv1 Mq, Vq */
1568FNIEMOP_STUB(iemOp_movhpsv1_Mq_Vq); //NEXT
1569/** Opcode 0x66 0x0f 0x17 - movhpdv1 Mq, Vq */
1570FNIEMOP_STUB(iemOp_movhpdv1_Mq_Vq); //NEXT
1571/* Opcode 0xf3 0x0f 0x17 - invalid */
1572/* Opcode 0xf2 0x0f 0x17 - invalid */
1573
1574
1575/** Opcode 0x0f 0x18. */
1576FNIEMOP_DEF(iemOp_prefetch_Grp16)
1577{
1578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1579 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1580 {
1581 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1582 {
1583 case 4: /* Aliased to /0 for the time being according to AMD. */
1584 case 5: /* Aliased to /0 for the time being according to AMD. */
1585 case 6: /* Aliased to /0 for the time being according to AMD. */
1586 case 7: /* Aliased to /0 for the time being according to AMD. */
1587 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1588 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1589 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1590 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1591 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1592 }
1593
1594 IEM_MC_BEGIN(0, 1);
1595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1598 /* Currently a NOP. */
1599 NOREF(GCPtrEffSrc);
1600 IEM_MC_ADVANCE_RIP();
1601 IEM_MC_END();
1602 return VINF_SUCCESS;
1603 }
1604
1605 return IEMOP_RAISE_INVALID_OPCODE();
1606}
1607
1608
1609/** Opcode 0x0f 0x19..0x1f. */
1610FNIEMOP_DEF(iemOp_nop_Ev)
1611{
1612 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1614 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1615 {
1616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1617 IEM_MC_BEGIN(0, 0);
1618 IEM_MC_ADVANCE_RIP();
1619 IEM_MC_END();
1620 }
1621 else
1622 {
1623 IEM_MC_BEGIN(0, 1);
1624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1627 /* Currently a NOP. */
1628 NOREF(GCPtrEffSrc);
1629 IEM_MC_ADVANCE_RIP();
1630 IEM_MC_END();
1631 }
1632 return VINF_SUCCESS;
1633}
1634
1635
1636/** Opcode 0x0f 0x20. */
1637FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1638{
1639 /* mod is ignored, as is operand size overrides. */
1640 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1641 IEMOP_HLP_MIN_386();
1642 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1643 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1644 else
1645 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1646
1647 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1648 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1649 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1650 {
1651 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1652 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1653 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1654 iCrReg |= 8;
1655 }
1656 switch (iCrReg)
1657 {
1658 case 0: case 2: case 3: case 4: case 8:
1659 break;
1660 default:
1661 return IEMOP_RAISE_INVALID_OPCODE();
1662 }
1663 IEMOP_HLP_DONE_DECODING();
1664
1665 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1666}
1667
1668
1669/** Opcode 0x0f 0x21. */
1670FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1671{
1672 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1673 IEMOP_HLP_MIN_386();
1674 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1676 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1677 return IEMOP_RAISE_INVALID_OPCODE();
1678 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1679 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1680 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1681}
1682
1683
1684/** Opcode 0x0f 0x22. */
1685FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1686{
1687 /* mod is ignored, as is operand size overrides. */
1688 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1689 IEMOP_HLP_MIN_386();
1690 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1691 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1692 else
1693 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1694
1695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1696 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1697 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1698 {
1699 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1700 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1701 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1702 iCrReg |= 8;
1703 }
1704 switch (iCrReg)
1705 {
1706 case 0: case 2: case 3: case 4: case 8:
1707 break;
1708 default:
1709 return IEMOP_RAISE_INVALID_OPCODE();
1710 }
1711 IEMOP_HLP_DONE_DECODING();
1712
1713 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1714}
1715
1716
1717/** Opcode 0x0f 0x23. */
1718FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1719{
1720 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1721 IEMOP_HLP_MIN_386();
1722 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1724 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1725 return IEMOP_RAISE_INVALID_OPCODE();
1726 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1727 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1728 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1729}
1730
1731
1732/** Opcode 0x0f 0x24. */
1733FNIEMOP_DEF(iemOp_mov_Rd_Td)
1734{
1735 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1736 /** @todo works on 386 and 486. */
1737 /* The RM byte is not considered, see testcase. */
1738 return IEMOP_RAISE_INVALID_OPCODE();
1739}
1740
1741
1742/** Opcode 0x0f 0x26. */
1743FNIEMOP_DEF(iemOp_mov_Td_Rd)
1744{
1745 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1746 /** @todo works on 386 and 486. */
1747 /* The RM byte is not considered, see testcase. */
1748 return IEMOP_RAISE_INVALID_OPCODE();
1749}
1750
1751
1752/** Opcode 0x0f 0x28 - movaps Vps, Wps */
1753FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
1754{
1755 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1756 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1757 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1758 {
1759 /*
1760 * Register, register.
1761 */
1762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1763 IEM_MC_BEGIN(0, 0);
1764 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1765 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1766 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1767 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1768 IEM_MC_ADVANCE_RIP();
1769 IEM_MC_END();
1770 }
1771 else
1772 {
1773 /*
1774 * Register, memory.
1775 */
1776 IEM_MC_BEGIN(0, 2);
1777 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1779
1780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1782 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1783 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1784
1785 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1786 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1787
1788 IEM_MC_ADVANCE_RIP();
1789 IEM_MC_END();
1790 }
1791 return VINF_SUCCESS;
1792}
1793
1794/** Opcode 0x66 0x0f 0x28 - movapd Vpd, Wpd */
1795FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
1796{
1797 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1798 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1799 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1800 {
1801 /*
1802 * Register, register.
1803 */
1804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1805 IEM_MC_BEGIN(0, 0);
1806 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1807 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1808 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1809 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1810 IEM_MC_ADVANCE_RIP();
1811 IEM_MC_END();
1812 }
1813 else
1814 {
1815 /*
1816 * Register, memory.
1817 */
1818 IEM_MC_BEGIN(0, 2);
1819 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1821
1822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1824 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1825 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1826
1827 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1828 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1829
1830 IEM_MC_ADVANCE_RIP();
1831 IEM_MC_END();
1832 }
1833 return VINF_SUCCESS;
1834}
1835
1836/* Opcode 0xf3 0x0f 0x28 - invalid */
1837/* Opcode 0xf2 0x0f 0x28 - invalid */
1838
1839/** Opcode 0x0f 0x29 - movaps Wps, Vps */
1840FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
1841{
1842 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1843 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1844 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1845 {
1846 /*
1847 * Register, register.
1848 */
1849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1850 IEM_MC_BEGIN(0, 0);
1851 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1852 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1853 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1854 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1855 IEM_MC_ADVANCE_RIP();
1856 IEM_MC_END();
1857 }
1858 else
1859 {
1860 /*
1861 * Memory, register.
1862 */
1863 IEM_MC_BEGIN(0, 2);
1864 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1866
1867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1869 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1870 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1871
1872 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1873 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1874
1875 IEM_MC_ADVANCE_RIP();
1876 IEM_MC_END();
1877 }
1878 return VINF_SUCCESS;
1879}
1880
1881/** Opcode 0x66 0x0f 0x29 - movapd Wpd,Vpd */
1882FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
1883{
1884 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1885 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1886 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1887 {
1888 /*
1889 * Register, register.
1890 */
1891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1892 IEM_MC_BEGIN(0, 0);
1893 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1894 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1895 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1896 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1897 IEM_MC_ADVANCE_RIP();
1898 IEM_MC_END();
1899 }
1900 else
1901 {
1902 /*
1903 * Memory, register.
1904 */
1905 IEM_MC_BEGIN(0, 2);
1906 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1908
1909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1911 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1912 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1913
1914 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1915 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1916
1917 IEM_MC_ADVANCE_RIP();
1918 IEM_MC_END();
1919 }
1920 return VINF_SUCCESS;
1921}
1922
1923/* Opcode 0xf3 0x0f 0x29 - invalid */
1924/* Opcode 0xf2 0x0f 0x29 - invalid */
1925
1926
1927/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
1928FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
1929/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
1930FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
1931/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
1932FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
1933/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1934FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
1935
1936
1937/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
1938FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
1939{
1940 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1942 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1943 {
1944 /*
1945 * memory, register.
1946 */
1947 IEM_MC_BEGIN(0, 2);
1948 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1950
1951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1953 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1954 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1955
1956 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1957 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1958
1959 IEM_MC_ADVANCE_RIP();
1960 IEM_MC_END();
1961 }
1962 /* The register, register encoding is invalid. */
1963 else
1964 return IEMOP_RAISE_INVALID_OPCODE();
1965 return VINF_SUCCESS;
1966}
1967
1968/** Opcode 0x66 0x0f 0x2b - movntpd Mpd, Vpd */
1969FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
1970{
1971 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1972 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1973 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1974 {
1975 /*
1976 * memory, register.
1977 */
1978 IEM_MC_BEGIN(0, 2);
1979 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1981
1982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1984 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1985 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1986
1987 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1988 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1989
1990 IEM_MC_ADVANCE_RIP();
1991 IEM_MC_END();
1992 }
1993 /* The register, register encoding is invalid. */
1994 else
1995 return IEMOP_RAISE_INVALID_OPCODE();
1996 return VINF_SUCCESS;
1997}
1998/* Opcode 0xf3 0x0f 0x2b - invalid */
1999/* Opcode 0xf2 0x0f 0x2b - invalid */
2000
2001
2002/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2003FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2004/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2005FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2006/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2007FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2008/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2009FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2010
2011/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2012FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2013/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2014FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2015/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2016FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2017/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2018FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2019
2020/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2021FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2022/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2023FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2024/* Opcode 0xf3 0x0f 0x2e - invalid */
2025/* Opcode 0xf2 0x0f 0x2e - invalid */
2026
2027/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2028FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2029/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2030FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2031/* Opcode 0xf3 0x0f 0x2f - invalid */
2032/* Opcode 0xf2 0x0f 0x2f - invalid */
2033
2034/** Opcode 0x0f 0x30. */
2035FNIEMOP_DEF(iemOp_wrmsr)
2036{
2037 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2039 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2040}
2041
2042
2043/** Opcode 0x0f 0x31. */
2044FNIEMOP_DEF(iemOp_rdtsc)
2045{
2046 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2048 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2049}
2050
2051
2052/** Opcode 0x0f 0x33. */
2053FNIEMOP_DEF(iemOp_rdmsr)
2054{
2055 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2057 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2058}
2059
2060
2061/** Opcode 0x0f 0x34. */
2062FNIEMOP_DEF(iemOp_rdpmc)
2063{
2064 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2066 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2067}
2068
2069
2070/** Opcode 0x0f 0x34. */
2071FNIEMOP_STUB(iemOp_sysenter);
2072/** Opcode 0x0f 0x35. */
2073FNIEMOP_STUB(iemOp_sysexit);
2074/** Opcode 0x0f 0x37. */
2075FNIEMOP_STUB(iemOp_getsec);
2076
2077
2078/** Opcode 0x0f 0x38. */
2079FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2080{
2081#ifdef IEM_WITH_THREE_0F_38
2082 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2083 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2084#else
2085 IEMOP_BITCH_ABOUT_STUB();
2086 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2087#endif
2088}
2089
2090
2091/** Opcode 0x0f 0x3a. */
2092FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2093{
2094#ifdef IEM_WITH_THREE_0F_3A
2095 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2096 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2097#else
2098 IEMOP_BITCH_ABOUT_STUB();
2099 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2100#endif
2101}
2102
2103
2104/**
2105 * Implements a conditional move.
2106 *
2107 * Wish there was an obvious way to do this where we could share and reduce
2108 * code bloat.
2109 *
2110 * @param a_Cnd The conditional "microcode" operation.
2111 */
2112#define CMOV_X(a_Cnd) \
2113 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2114 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2115 { \
2116 switch (pVCpu->iem.s.enmEffOpSize) \
2117 { \
2118 case IEMMODE_16BIT: \
2119 IEM_MC_BEGIN(0, 1); \
2120 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2121 a_Cnd { \
2122 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2123 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2124 } IEM_MC_ENDIF(); \
2125 IEM_MC_ADVANCE_RIP(); \
2126 IEM_MC_END(); \
2127 return VINF_SUCCESS; \
2128 \
2129 case IEMMODE_32BIT: \
2130 IEM_MC_BEGIN(0, 1); \
2131 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2132 a_Cnd { \
2133 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2134 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2135 } IEM_MC_ELSE() { \
2136 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2137 } IEM_MC_ENDIF(); \
2138 IEM_MC_ADVANCE_RIP(); \
2139 IEM_MC_END(); \
2140 return VINF_SUCCESS; \
2141 \
2142 case IEMMODE_64BIT: \
2143 IEM_MC_BEGIN(0, 1); \
2144 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2145 a_Cnd { \
2146 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2147 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2148 } IEM_MC_ENDIF(); \
2149 IEM_MC_ADVANCE_RIP(); \
2150 IEM_MC_END(); \
2151 return VINF_SUCCESS; \
2152 \
2153 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2154 } \
2155 } \
2156 else \
2157 { \
2158 switch (pVCpu->iem.s.enmEffOpSize) \
2159 { \
2160 case IEMMODE_16BIT: \
2161 IEM_MC_BEGIN(0, 2); \
2162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2163 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2165 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2166 a_Cnd { \
2167 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2168 } IEM_MC_ENDIF(); \
2169 IEM_MC_ADVANCE_RIP(); \
2170 IEM_MC_END(); \
2171 return VINF_SUCCESS; \
2172 \
2173 case IEMMODE_32BIT: \
2174 IEM_MC_BEGIN(0, 2); \
2175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2176 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2178 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2179 a_Cnd { \
2180 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2181 } IEM_MC_ELSE() { \
2182 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2183 } IEM_MC_ENDIF(); \
2184 IEM_MC_ADVANCE_RIP(); \
2185 IEM_MC_END(); \
2186 return VINF_SUCCESS; \
2187 \
2188 case IEMMODE_64BIT: \
2189 IEM_MC_BEGIN(0, 2); \
2190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2191 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2193 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2194 a_Cnd { \
2195 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2196 } IEM_MC_ENDIF(); \
2197 IEM_MC_ADVANCE_RIP(); \
2198 IEM_MC_END(); \
2199 return VINF_SUCCESS; \
2200 \
2201 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2202 } \
2203 } do {} while (0)
2204
2205
2206
2207/** Opcode 0x0f 0x40. */
2208FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2209{
2210 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2211 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2212}
2213
2214
2215/** Opcode 0x0f 0x41. */
2216FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2217{
2218 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2219 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2220}
2221
2222
2223/** Opcode 0x0f 0x42. */
2224FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2225{
2226 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2227 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2228}
2229
2230
2231/** Opcode 0x0f 0x43. */
2232FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2233{
2234 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2235 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2236}
2237
2238
2239/** Opcode 0x0f 0x44. */
2240FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2241{
2242 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2243 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2244}
2245
2246
2247/** Opcode 0x0f 0x45. */
2248FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2249{
2250 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2251 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2252}
2253
2254
2255/** Opcode 0x0f 0x46. */
2256FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2257{
2258 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2259 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2260}
2261
2262
2263/** Opcode 0x0f 0x47. */
2264FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2265{
2266 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2267 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2268}
2269
2270
2271/** Opcode 0x0f 0x48. */
2272FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2273{
2274 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2275 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2276}
2277
2278
2279/** Opcode 0x0f 0x49. */
2280FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2281{
2282 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2283 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2284}
2285
2286
2287/** Opcode 0x0f 0x4a. */
2288FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2289{
2290 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2291 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2292}
2293
2294
2295/** Opcode 0x0f 0x4b. */
2296FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2297{
2298 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2299 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2300}
2301
2302
2303/** Opcode 0x0f 0x4c. */
2304FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2305{
2306 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2307 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2308}
2309
2310
2311/** Opcode 0x0f 0x4d. */
2312FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2313{
2314 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2315 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2316}
2317
2318
2319/** Opcode 0x0f 0x4e. */
2320FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2321{
2322 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2323 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2324}
2325
2326
2327/** Opcode 0x0f 0x4f. */
2328FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2329{
2330 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2331 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2332}
2333
2334#undef CMOV_X
2335
2336/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2337FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2338/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2339FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2340/* Opcode 0xf3 0x0f 0x50 - invalid */
2341/* Opcode 0xf2 0x0f 0x50 - invalid */
2342
2343/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2344FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2345/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2346FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2347/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2348FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2349/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2350FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2351
2352/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2353FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2354/* Opcode 0x66 0x0f 0x52 - invalid */
2355/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2356FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2357/* Opcode 0xf2 0x0f 0x52 - invalid */
2358
2359/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2360FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2361/* Opcode 0x66 0x0f 0x53 - invalid */
2362/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2363FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2364/* Opcode 0xf2 0x0f 0x53 - invalid */
2365
2366/** Opcode 0x0f 0x54 - andps Vps, Wps */
2367FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2368/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2369FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2370/* Opcode 0xf3 0x0f 0x54 - invalid */
2371/* Opcode 0xf2 0x0f 0x54 - invalid */
2372
2373/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2374FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2375/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2376FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2377/* Opcode 0xf3 0x0f 0x55 - invalid */
2378/* Opcode 0xf2 0x0f 0x55 - invalid */
2379
2380/** Opcode 0x0f 0x56 - orps Vps, Wps */
2381FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2382/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2383FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2384/* Opcode 0xf3 0x0f 0x56 - invalid */
2385/* Opcode 0xf2 0x0f 0x56 - invalid */
2386
2387/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2388FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2389/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2390FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2391/* Opcode 0xf3 0x0f 0x57 - invalid */
2392/* Opcode 0xf2 0x0f 0x57 - invalid */
2393
2394/** Opcode 0x0f 0x58 - addps Vps, Wps */
2395FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2396/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2397FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2398/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2399FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2400/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2401FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2402
2403/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2404FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2405/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2406FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2407/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2408FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2409/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2410FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2411
2412/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2413FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2414/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2415FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2416/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2417FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2418/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2419FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2420
2421/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2422FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2423/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2424FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2425/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2426FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2427/* Opcode 0xf2 0x0f 0x5b - invalid */
2428
2429/** Opcode 0x0f 0x5c - subps Vps, Wps */
2430FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2431/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2432FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2433/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2434FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2435/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2436FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2437
2438/** Opcode 0x0f 0x5d - minps Vps, Wps */
2439FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2440/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2441FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2442/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2443FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2444/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2445FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2446
2447/** Opcode 0x0f 0x5e - divps Vps, Wps */
2448FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2449/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2450FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2451/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2452FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2453/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2454FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2455
2456/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2457FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2458/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2459FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2460/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2461FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2462/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2463FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2464
2465/**
2466 * Common worker for MMX instructions on the forms:
2467 * pxxxx mm1, mm2/mem32
2468 *
2469 * The 2nd operand is the first half of a register, which in the memory case
2470 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2471 * memory accessed for MMX.
2472 *
2473 * Exceptions type 4.
2474 */
2475FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2476{
2477 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2478 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2479 {
2480 /*
2481 * Register, register.
2482 */
2483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2484 IEM_MC_BEGIN(2, 0);
2485 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2486 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2487 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2488 IEM_MC_PREPARE_SSE_USAGE();
2489 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2490 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2491 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2492 IEM_MC_ADVANCE_RIP();
2493 IEM_MC_END();
2494 }
2495 else
2496 {
2497 /*
2498 * Register, memory.
2499 */
2500 IEM_MC_BEGIN(2, 2);
2501 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2502 IEM_MC_LOCAL(uint64_t, uSrc);
2503 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2505
2506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2508 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2509 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2510
2511 IEM_MC_PREPARE_SSE_USAGE();
2512 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2513 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2514
2515 IEM_MC_ADVANCE_RIP();
2516 IEM_MC_END();
2517 }
2518 return VINF_SUCCESS;
2519}
2520
2521
2522/**
2523 * Common worker for SSE2 instructions on the forms:
2524 * pxxxx xmm1, xmm2/mem128
2525 *
2526 * The 2nd operand is the first half of a register, which in the memory case
2527 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2528 * memory accessed for MMX.
2529 *
2530 * Exceptions type 4.
2531 */
2532FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2533{
2534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2535 if (!pImpl->pfnU64)
2536 return IEMOP_RAISE_INVALID_OPCODE();
2537 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2538 {
2539 /*
2540 * Register, register.
2541 */
2542 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2543 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2545 IEM_MC_BEGIN(2, 0);
2546 IEM_MC_ARG(uint64_t *, pDst, 0);
2547 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2548 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2549 IEM_MC_PREPARE_FPU_USAGE();
2550 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2551 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2552 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2553 IEM_MC_ADVANCE_RIP();
2554 IEM_MC_END();
2555 }
2556 else
2557 {
2558 /*
2559 * Register, memory.
2560 */
2561 IEM_MC_BEGIN(2, 2);
2562 IEM_MC_ARG(uint64_t *, pDst, 0);
2563 IEM_MC_LOCAL(uint32_t, uSrc);
2564 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2566
2567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2569 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2570 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2571
2572 IEM_MC_PREPARE_FPU_USAGE();
2573 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2574 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2575
2576 IEM_MC_ADVANCE_RIP();
2577 IEM_MC_END();
2578 }
2579 return VINF_SUCCESS;
2580}
2581
2582
2583/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2584FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2585{
2586 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2587 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2588}
2589
2590/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
2591FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
2592{
2593 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
2594 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2595}
2596
2597/* Opcode 0xf3 0x0f 0x60 - invalid */
2598
2599
2600/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2601FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2602{
2603 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2604 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2605}
2606
2607/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
2608FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
2609{
2610 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
2611 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2612}
2613
2614/* Opcode 0xf3 0x0f 0x61 - invalid */
2615
2616
2617/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2618FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2619{
2620 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2621 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2622}
2623
2624/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
2625FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
2626{
2627 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
2628 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2629}
2630
2631/* Opcode 0xf3 0x0f 0x62 - invalid */
2632
2633
2634
2635/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2636FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2637/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
2638FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
2639/* Opcode 0xf3 0x0f 0x63 - invalid */
2640
2641/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2642FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2643/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
2644FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
2645/* Opcode 0xf3 0x0f 0x64 - invalid */
2646
2647/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2648FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2649/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
2650FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
2651/* Opcode 0xf3 0x0f 0x65 - invalid */
2652
2653/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2654FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2655/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
2656FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
2657/* Opcode 0xf3 0x0f 0x66 - invalid */
2658
2659/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2660FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2661/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
2662FNIEMOP_STUB(iemOp_packuswb_Vx_W);
2663/* Opcode 0xf3 0x0f 0x67 - invalid */
2664
2665
2666/**
2667 * Common worker for MMX instructions on the form:
2668 * pxxxx mm1, mm2/mem64
2669 *
2670 * The 2nd operand is the second half of a register, which in the memory case
2671 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2672 * where it may read the full 128 bits or only the upper 64 bits.
2673 *
2674 * Exceptions type 4.
2675 */
2676FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2677{
2678 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2679 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2680 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2681 {
2682 /*
2683 * Register, register.
2684 */
2685 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2686 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2688 IEM_MC_BEGIN(2, 0);
2689 IEM_MC_ARG(uint64_t *, pDst, 0);
2690 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2691 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2692 IEM_MC_PREPARE_FPU_USAGE();
2693 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2694 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2695 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2696 IEM_MC_ADVANCE_RIP();
2697 IEM_MC_END();
2698 }
2699 else
2700 {
2701 /*
2702 * Register, memory.
2703 */
2704 IEM_MC_BEGIN(2, 2);
2705 IEM_MC_ARG(uint64_t *, pDst, 0);
2706 IEM_MC_LOCAL(uint64_t, uSrc);
2707 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2709
2710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2712 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2713 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2714
2715 IEM_MC_PREPARE_FPU_USAGE();
2716 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2717 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2718
2719 IEM_MC_ADVANCE_RIP();
2720 IEM_MC_END();
2721 }
2722 return VINF_SUCCESS;
2723}
2724
2725
2726/**
2727 * Common worker for SSE2 instructions on the form:
2728 * pxxxx xmm1, xmm2/mem128
2729 *
2730 * The 2nd operand is the second half of a register, which in the memory case
2731 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2732 * where it may read the full 128 bits or only the upper 64 bits.
2733 *
2734 * Exceptions type 4.
2735 */
2736FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2737{
2738 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2739 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2740 {
2741 /*
2742 * Register, register.
2743 */
2744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2745 IEM_MC_BEGIN(2, 0);
2746 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2747 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2748 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2749 IEM_MC_PREPARE_SSE_USAGE();
2750 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2751 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2752 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2753 IEM_MC_ADVANCE_RIP();
2754 IEM_MC_END();
2755 }
2756 else
2757 {
2758 /*
2759 * Register, memory.
2760 */
2761 IEM_MC_BEGIN(2, 2);
2762 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2763 IEM_MC_LOCAL(RTUINT128U, uSrc);
2764 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2766
2767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2769 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2770 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2771
2772 IEM_MC_PREPARE_SSE_USAGE();
2773 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2774 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2775
2776 IEM_MC_ADVANCE_RIP();
2777 IEM_MC_END();
2778 }
2779 return VINF_SUCCESS;
2780}
2781
2782
2783/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2784FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2785{
2786 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2787 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2788}
2789
2790/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
2791FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
2792{
2793 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
2794 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2795}
2796/* Opcode 0xf3 0x0f 0x68 - invalid */
2797
2798
2799/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2800FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2801{
2802 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2803 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2804}
2805
2806/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
2807FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
2808{
2809 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
2810 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2811
2812}
2813/* Opcode 0xf3 0x0f 0x69 - invalid */
2814
2815
2816/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2817FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2818{
2819 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2820 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2821}
2822
2823/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
2824FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
2825{
2826 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
2827 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2828}
2829/* Opcode 0xf3 0x0f 0x6a - invalid */
2830
2831
2832/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2833FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2834/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
2835FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
2836/* Opcode 0xf3 0x0f 0x6b - invalid */
2837
2838
2839/* Opcode 0x0f 0x6c - invalid */
2840
2841/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
2842FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
2843{
2844 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
2845 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2846}
2847
2848/* Opcode 0xf3 0x0f 0x6c - invalid */
2849/* Opcode 0xf2 0x0f 0x6c - invalid */
2850
2851
2852/* Opcode 0x0f 0x6d - invalid */
2853
2854/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
2855FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
2856{
2857 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
2858 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2859}
2860
2861/* Opcode 0xf3 0x0f 0x6d - invalid */
2862
2863
2864/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
2865FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
2866{
2867 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2868 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2869 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2870 else
2871 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2872 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2873 {
2874 /* MMX, greg */
2875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2876 IEM_MC_BEGIN(0, 1);
2877 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2878 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2879 IEM_MC_LOCAL(uint64_t, u64Tmp);
2880 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2881 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2882 else
2883 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2884 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2885 IEM_MC_ADVANCE_RIP();
2886 IEM_MC_END();
2887 }
2888 else
2889 {
2890 /* MMX, [mem] */
2891 IEM_MC_BEGIN(0, 2);
2892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2893 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2896 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2897 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2898 {
2899 IEM_MC_LOCAL(uint64_t, u64Tmp);
2900 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2901 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2902 }
2903 else
2904 {
2905 IEM_MC_LOCAL(uint32_t, u32Tmp);
2906 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2907 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2908 }
2909 IEM_MC_ADVANCE_RIP();
2910 IEM_MC_END();
2911 }
2912 return VINF_SUCCESS;
2913}
2914
2915/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
2916FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
2917{
2918 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2919 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2920 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
2921 else
2922 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
2923 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2924 {
2925 /* XMM, greg*/
2926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2927 IEM_MC_BEGIN(0, 1);
2928 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2929 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2930 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2931 {
2932 IEM_MC_LOCAL(uint64_t, u64Tmp);
2933 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2934 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2935 }
2936 else
2937 {
2938 IEM_MC_LOCAL(uint32_t, u32Tmp);
2939 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2940 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2941 }
2942 IEM_MC_ADVANCE_RIP();
2943 IEM_MC_END();
2944 }
2945 else
2946 {
2947 /* XMM, [mem] */
2948 IEM_MC_BEGIN(0, 2);
2949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2950 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2953 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2954 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2955 {
2956 IEM_MC_LOCAL(uint64_t, u64Tmp);
2957 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2958 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2959 }
2960 else
2961 {
2962 IEM_MC_LOCAL(uint32_t, u32Tmp);
2963 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2964 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2965 }
2966 IEM_MC_ADVANCE_RIP();
2967 IEM_MC_END();
2968 }
2969 return VINF_SUCCESS;
2970}
2971
2972/* Opcode 0xf3 0x0f 0x6e - invalid */
2973
2974
2975/** Opcode 0x0f 0x6f - movq Pq, Qq */
2976FNIEMOP_DEF(iemOp_movq_Pq_Qq)
2977{
2978 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2979 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2980 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2981 {
2982 /*
2983 * Register, register.
2984 */
2985 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2986 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2988 IEM_MC_BEGIN(0, 1);
2989 IEM_MC_LOCAL(uint64_t, u64Tmp);
2990 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2991 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2992 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2993 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2994 IEM_MC_ADVANCE_RIP();
2995 IEM_MC_END();
2996 }
2997 else
2998 {
2999 /*
3000 * Register, memory.
3001 */
3002 IEM_MC_BEGIN(0, 2);
3003 IEM_MC_LOCAL(uint64_t, u64Tmp);
3004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3005
3006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3008 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3009 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3010 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3011 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3012
3013 IEM_MC_ADVANCE_RIP();
3014 IEM_MC_END();
3015 }
3016 return VINF_SUCCESS;
3017}
3018
3019/** Opcode 0x66 0x0f 0x6f - movdqa Vx, Wx */
3020FNIEMOP_DEF(iemOp_movdqa_Vx_Wx)
3021{
3022 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3023 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3024 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3025 {
3026 /*
3027 * Register, register.
3028 */
3029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3030 IEM_MC_BEGIN(0, 0);
3031 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3032 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3033 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3034 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3035 IEM_MC_ADVANCE_RIP();
3036 IEM_MC_END();
3037 }
3038 else
3039 {
3040 /*
3041 * Register, memory.
3042 */
3043 IEM_MC_BEGIN(0, 2);
3044 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3046
3047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3049 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3050 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3051 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3052 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3053
3054 IEM_MC_ADVANCE_RIP();
3055 IEM_MC_END();
3056 }
3057 return VINF_SUCCESS;
3058}
3059
3060/** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */
3061FNIEMOP_DEF(iemOp_movdqu_Vx_Wx)
3062{
3063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3064 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3065 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3066 {
3067 /*
3068 * Register, register.
3069 */
3070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3071 IEM_MC_BEGIN(0, 0);
3072 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3073 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3074 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3075 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3076 IEM_MC_ADVANCE_RIP();
3077 IEM_MC_END();
3078 }
3079 else
3080 {
3081 /*
3082 * Register, memory.
3083 */
3084 IEM_MC_BEGIN(0, 2);
3085 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3087
3088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3090 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3091 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3092 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3093 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3094
3095 IEM_MC_ADVANCE_RIP();
3096 IEM_MC_END();
3097 }
3098 return VINF_SUCCESS;
3099}
3100
3101
3102/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3103FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3104{
3105 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3108 {
3109 /*
3110 * Register, register.
3111 */
3112 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3114
3115 IEM_MC_BEGIN(3, 0);
3116 IEM_MC_ARG(uint64_t *, pDst, 0);
3117 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3118 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3119 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3120 IEM_MC_PREPARE_FPU_USAGE();
3121 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3122 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3123 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3124 IEM_MC_ADVANCE_RIP();
3125 IEM_MC_END();
3126 }
3127 else
3128 {
3129 /*
3130 * Register, memory.
3131 */
3132 IEM_MC_BEGIN(3, 2);
3133 IEM_MC_ARG(uint64_t *, pDst, 0);
3134 IEM_MC_LOCAL(uint64_t, uSrc);
3135 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3137
3138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3139 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3140 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3142 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3143
3144 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3145 IEM_MC_PREPARE_FPU_USAGE();
3146 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3147 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3148
3149 IEM_MC_ADVANCE_RIP();
3150 IEM_MC_END();
3151 }
3152 return VINF_SUCCESS;
3153}
3154
3155/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3156FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3157{
3158 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3160 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3161 {
3162 /*
3163 * Register, register.
3164 */
3165 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3167
3168 IEM_MC_BEGIN(3, 0);
3169 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3170 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3171 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3172 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3173 IEM_MC_PREPARE_SSE_USAGE();
3174 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3175 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3176 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3177 IEM_MC_ADVANCE_RIP();
3178 IEM_MC_END();
3179 }
3180 else
3181 {
3182 /*
3183 * Register, memory.
3184 */
3185 IEM_MC_BEGIN(3, 2);
3186 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3187 IEM_MC_LOCAL(RTUINT128U, uSrc);
3188 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3190
3191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3192 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3193 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3195 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3196
3197 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3198 IEM_MC_PREPARE_SSE_USAGE();
3199 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3200 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3201
3202 IEM_MC_ADVANCE_RIP();
3203 IEM_MC_END();
3204 }
3205 return VINF_SUCCESS;
3206}
3207
3208/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3209FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3210{
3211 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3213 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3214 {
3215 /*
3216 * Register, register.
3217 */
3218 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3220
3221 IEM_MC_BEGIN(3, 0);
3222 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3223 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3224 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3225 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3226 IEM_MC_PREPARE_SSE_USAGE();
3227 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3228 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3229 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3230 IEM_MC_ADVANCE_RIP();
3231 IEM_MC_END();
3232 }
3233 else
3234 {
3235 /*
3236 * Register, memory.
3237 */
3238 IEM_MC_BEGIN(3, 2);
3239 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3240 IEM_MC_LOCAL(RTUINT128U, uSrc);
3241 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3243
3244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3245 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3246 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3248 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3249
3250 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3251 IEM_MC_PREPARE_SSE_USAGE();
3252 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3253 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3254
3255 IEM_MC_ADVANCE_RIP();
3256 IEM_MC_END();
3257 }
3258 return VINF_SUCCESS;
3259}
3260
3261/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3262FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3263{
3264 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3265 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3266 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3267 {
3268 /*
3269 * Register, register.
3270 */
3271 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3273
3274 IEM_MC_BEGIN(3, 0);
3275 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3276 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3277 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3278 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3279 IEM_MC_PREPARE_SSE_USAGE();
3280 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3281 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3282 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3283 IEM_MC_ADVANCE_RIP();
3284 IEM_MC_END();
3285 }
3286 else
3287 {
3288 /*
3289 * Register, memory.
3290 */
3291 IEM_MC_BEGIN(3, 2);
3292 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3293 IEM_MC_LOCAL(RTUINT128U, uSrc);
3294 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3296
3297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3298 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3299 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3301 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3302
3303 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3304 IEM_MC_PREPARE_SSE_USAGE();
3305 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3306 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3307
3308 IEM_MC_ADVANCE_RIP();
3309 IEM_MC_END();
3310 }
3311 return VINF_SUCCESS;
3312}
3313
3314
3315/** Opcode 0x0f 0x71 11/2. */
3316FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3317
3318/** Opcode 0x66 0x0f 0x71 11/2. */
3319FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3320
3321/** Opcode 0x0f 0x71 11/4. */
3322FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3323
3324/** Opcode 0x66 0x0f 0x71 11/4. */
3325FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3326
3327/** Opcode 0x0f 0x71 11/6. */
3328FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3329
3330/** Opcode 0x66 0x0f 0x71 11/6. */
3331FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3332
3333
3334/**
3335 * Group 12 jump table for register variant.
3336 */
3337IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3338{
3339 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3340 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3341 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3342 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3343 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3344 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3345 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3346 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3347};
3348AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3349
3350
3351/** Opcode 0x0f 0x71. */
3352FNIEMOP_DEF(iemOp_Grp12)
3353{
3354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3356 /* register, register */
3357 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3358 + pVCpu->iem.s.idxPrefix], bRm);
3359 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3360}
3361
3362
3363/** Opcode 0x0f 0x72 11/2. */
3364FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3365
3366/** Opcode 0x66 0x0f 0x72 11/2. */
3367FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
3368
3369/** Opcode 0x0f 0x72 11/4. */
3370FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3371
3372/** Opcode 0x66 0x0f 0x72 11/4. */
3373FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
3374
3375/** Opcode 0x0f 0x72 11/6. */
3376FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3377
3378/** Opcode 0x66 0x0f 0x72 11/6. */
3379FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
3380
3381
3382/**
3383 * Group 13 jump table for register variant.
3384 */
3385IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3386{
3387 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3388 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3389 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3390 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3391 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3392 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3393 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3394 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3395};
3396AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3397
3398/** Opcode 0x0f 0x72. */
3399FNIEMOP_DEF(iemOp_Grp13)
3400{
3401 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3402 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3403 /* register, register */
3404 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3405 + pVCpu->iem.s.idxPrefix], bRm);
3406 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3407}
3408
3409
3410/** Opcode 0x0f 0x73 11/2. */
3411FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3412
3413/** Opcode 0x66 0x0f 0x73 11/2. */
3414FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
3415
3416/** Opcode 0x66 0x0f 0x73 11/3. */
3417FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
3418
3419/** Opcode 0x0f 0x73 11/6. */
3420FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3421
3422/** Opcode 0x66 0x0f 0x73 11/6. */
3423FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
3424
3425/** Opcode 0x66 0x0f 0x73 11/7. */
3426FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
3427
3428/**
3429 * Group 14 jump table for register variant.
3430 */
3431IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
3432{
3433 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3434 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3435 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3436 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3437 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3438 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3439 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3440 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3441};
3442AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
3443
3444
3445/** Opcode 0x0f 0x73. */
3446FNIEMOP_DEF(iemOp_Grp14)
3447{
3448 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3449 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3450 /* register, register */
3451 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3452 + pVCpu->iem.s.idxPrefix], bRm);
3453 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3454}
3455
3456
3457/**
3458 * Common worker for MMX instructions on the form:
3459 * pxxx mm1, mm2/mem64
3460 */
3461FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3462{
3463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3464 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3465 {
3466 /*
3467 * Register, register.
3468 */
3469 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3470 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3472 IEM_MC_BEGIN(2, 0);
3473 IEM_MC_ARG(uint64_t *, pDst, 0);
3474 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3475 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3476 IEM_MC_PREPARE_FPU_USAGE();
3477 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3478 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3479 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3480 IEM_MC_ADVANCE_RIP();
3481 IEM_MC_END();
3482 }
3483 else
3484 {
3485 /*
3486 * Register, memory.
3487 */
3488 IEM_MC_BEGIN(2, 2);
3489 IEM_MC_ARG(uint64_t *, pDst, 0);
3490 IEM_MC_LOCAL(uint64_t, uSrc);
3491 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3493
3494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3496 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3497 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3498
3499 IEM_MC_PREPARE_FPU_USAGE();
3500 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3501 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3502
3503 IEM_MC_ADVANCE_RIP();
3504 IEM_MC_END();
3505 }
3506 return VINF_SUCCESS;
3507}
3508
3509
3510/**
3511 * Common worker for SSE2 instructions on the forms:
3512 * pxxx xmm1, xmm2/mem128
3513 *
3514 * Proper alignment of the 128-bit operand is enforced.
3515 * Exceptions type 4. SSE2 cpuid checks.
3516 */
3517FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3518{
3519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3521 {
3522 /*
3523 * Register, register.
3524 */
3525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3526 IEM_MC_BEGIN(2, 0);
3527 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3528 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3529 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3530 IEM_MC_PREPARE_SSE_USAGE();
3531 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3532 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3533 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3534 IEM_MC_ADVANCE_RIP();
3535 IEM_MC_END();
3536 }
3537 else
3538 {
3539 /*
3540 * Register, memory.
3541 */
3542 IEM_MC_BEGIN(2, 2);
3543 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3544 IEM_MC_LOCAL(RTUINT128U, uSrc);
3545 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3547
3548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3550 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3551 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3552
3553 IEM_MC_PREPARE_SSE_USAGE();
3554 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3555 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3556
3557 IEM_MC_ADVANCE_RIP();
3558 IEM_MC_END();
3559 }
3560 return VINF_SUCCESS;
3561}
3562
3563
3564/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3565FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3566{
3567 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3568 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3569}
3570
3571/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
3572FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
3573{
3574 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
3575 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3576}
3577
3578/* Opcode 0xf3 0x0f 0x74 - invalid */
3579/* Opcode 0xf2 0x0f 0x74 - invalid */
3580
3581
3582/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3583FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3584{
3585 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3586 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3587}
3588
3589/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
3590FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
3591{
3592 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
3593 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3594}
3595
3596/* Opcode 0xf3 0x0f 0x75 - invalid */
3597/* Opcode 0xf2 0x0f 0x75 - invalid */
3598
3599
3600/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3601FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3602{
3603 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3604 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3605}
3606
3607/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
3608FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
3609{
3610 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
3611 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3612}
3613
3614/* Opcode 0xf3 0x0f 0x76 - invalid */
3615/* Opcode 0xf2 0x0f 0x76 - invalid */
3616
3617
3618/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
3619FNIEMOP_STUB(iemOp_emms);
3620/* Opcode 0x66 0x0f 0x77 - invalid */
3621/* Opcode 0xf3 0x0f 0x77 - invalid */
3622/* Opcode 0xf2 0x0f 0x77 - invalid */
3623
3624/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3625FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3626/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3627FNIEMOP_STUB(iemOp_AmdGrp17);
3628/* Opcode 0xf3 0x0f 0x78 - invalid */
3629/* Opcode 0xf2 0x0f 0x78 - invalid */
3630
3631/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3632FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3633/* Opcode 0x66 0x0f 0x79 - invalid */
3634/* Opcode 0xf3 0x0f 0x79 - invalid */
3635/* Opcode 0xf2 0x0f 0x79 - invalid */
3636
3637/* Opcode 0x0f 0x7a - invalid */
3638/* Opcode 0x66 0x0f 0x7a - invalid */
3639/* Opcode 0xf3 0x0f 0x7a - invalid */
3640/* Opcode 0xf2 0x0f 0x7a - invalid */
3641
3642/* Opcode 0x0f 0x7b - invalid */
3643/* Opcode 0x66 0x0f 0x7b - invalid */
3644/* Opcode 0xf3 0x0f 0x7b - invalid */
3645/* Opcode 0xf2 0x0f 0x7b - invalid */
3646
3647/* Opcode 0x0f 0x7c - invalid */
3648/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
3649FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
3650/* Opcode 0xf3 0x0f 0x7c - invalid */
3651/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
3652FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
3653
3654/* Opcode 0x0f 0x7d - invalid */
3655/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
3656FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
3657/* Opcode 0xf3 0x0f 0x7d - invalid */
3658/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
3659FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
3660
3661
3662/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3663FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3664{
3665 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3666 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3667 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3668 else
3669 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3670 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3671 {
3672 /* greg, MMX */
3673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3674 IEM_MC_BEGIN(0, 1);
3675 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3676 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3677 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3678 {
3679 IEM_MC_LOCAL(uint64_t, u64Tmp);
3680 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3681 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3682 }
3683 else
3684 {
3685 IEM_MC_LOCAL(uint32_t, u32Tmp);
3686 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3687 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3688 }
3689 IEM_MC_ADVANCE_RIP();
3690 IEM_MC_END();
3691 }
3692 else
3693 {
3694 /* [mem], MMX */
3695 IEM_MC_BEGIN(0, 2);
3696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3697 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3700 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3701 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3702 {
3703 IEM_MC_LOCAL(uint64_t, u64Tmp);
3704 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3705 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3706 }
3707 else
3708 {
3709 IEM_MC_LOCAL(uint32_t, u32Tmp);
3710 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3711 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3712 }
3713 IEM_MC_ADVANCE_RIP();
3714 IEM_MC_END();
3715 }
3716 return VINF_SUCCESS;
3717}
3718
3719/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
3720FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
3721{
3722 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3723 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3724 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3725 else
3726 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3727 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3728 {
3729 /* greg, XMM */
3730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3731 IEM_MC_BEGIN(0, 1);
3732 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3733 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3734 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3735 {
3736 IEM_MC_LOCAL(uint64_t, u64Tmp);
3737 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3738 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3739 }
3740 else
3741 {
3742 IEM_MC_LOCAL(uint32_t, u32Tmp);
3743 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3744 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3745 }
3746 IEM_MC_ADVANCE_RIP();
3747 IEM_MC_END();
3748 }
3749 else
3750 {
3751 /* [mem], XMM */
3752 IEM_MC_BEGIN(0, 2);
3753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3754 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3757 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3758 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3759 {
3760 IEM_MC_LOCAL(uint64_t, u64Tmp);
3761 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3762 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3763 }
3764 else
3765 {
3766 IEM_MC_LOCAL(uint32_t, u32Tmp);
3767 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3768 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3769 }
3770 IEM_MC_ADVANCE_RIP();
3771 IEM_MC_END();
3772 }
3773 return VINF_SUCCESS;
3774}
3775
3776/** Opcode 0xf3 0x0f 0x7e - movq Vq, Wq */
3777FNIEMOP_STUB(iemOp_movq_Vq_Wq);
3778/* Opcode 0xf2 0x0f 0x7e - invalid */
3779
3780
3781/** Opcode 0x0f 0x7f - movq Qq, Pq */
3782FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3783{
3784 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3785 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3786 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3787 {
3788 /*
3789 * Register, register.
3790 */
3791 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3792 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3794 IEM_MC_BEGIN(0, 1);
3795 IEM_MC_LOCAL(uint64_t, u64Tmp);
3796 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3797 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3798 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3799 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3800 IEM_MC_ADVANCE_RIP();
3801 IEM_MC_END();
3802 }
3803 else
3804 {
3805 /*
3806 * Register, memory.
3807 */
3808 IEM_MC_BEGIN(0, 2);
3809 IEM_MC_LOCAL(uint64_t, u64Tmp);
3810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3811
3812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3814 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3815 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3816
3817 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3818 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3819
3820 IEM_MC_ADVANCE_RIP();
3821 IEM_MC_END();
3822 }
3823 return VINF_SUCCESS;
3824}
3825
3826/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
3827FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
3828{
3829 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
3830 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3831 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3832 {
3833 /*
3834 * Register, register.
3835 */
3836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3837 IEM_MC_BEGIN(0, 0);
3838 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3839 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3840 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3841 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3842 IEM_MC_ADVANCE_RIP();
3843 IEM_MC_END();
3844 }
3845 else
3846 {
3847 /*
3848 * Register, memory.
3849 */
3850 IEM_MC_BEGIN(0, 2);
3851 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3853
3854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3856 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3857 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3858
3859 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3860 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3861
3862 IEM_MC_ADVANCE_RIP();
3863 IEM_MC_END();
3864 }
3865 return VINF_SUCCESS;
3866}
3867
3868/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
3869FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
3870{
3871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3872 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
3873 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3874 {
3875 /*
3876 * Register, register.
3877 */
3878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3879 IEM_MC_BEGIN(0, 0);
3880 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3881 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3882 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3883 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3884 IEM_MC_ADVANCE_RIP();
3885 IEM_MC_END();
3886 }
3887 else
3888 {
3889 /*
3890 * Register, memory.
3891 */
3892 IEM_MC_BEGIN(0, 2);
3893 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3895
3896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3898 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3899 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3900
3901 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3902 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3903
3904 IEM_MC_ADVANCE_RIP();
3905 IEM_MC_END();
3906 }
3907 return VINF_SUCCESS;
3908}
3909
3910/* Opcode 0xf2 0x0f 0x7f - invalid */
3911
3912
3913
3914/** Opcode 0x0f 0x80. */
3915FNIEMOP_DEF(iemOp_jo_Jv)
3916{
3917 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3918 IEMOP_HLP_MIN_386();
3919 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3920 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3921 {
3922 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3924
3925 IEM_MC_BEGIN(0, 0);
3926 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3927 IEM_MC_REL_JMP_S16(i16Imm);
3928 } IEM_MC_ELSE() {
3929 IEM_MC_ADVANCE_RIP();
3930 } IEM_MC_ENDIF();
3931 IEM_MC_END();
3932 }
3933 else
3934 {
3935 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3937
3938 IEM_MC_BEGIN(0, 0);
3939 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3940 IEM_MC_REL_JMP_S32(i32Imm);
3941 } IEM_MC_ELSE() {
3942 IEM_MC_ADVANCE_RIP();
3943 } IEM_MC_ENDIF();
3944 IEM_MC_END();
3945 }
3946 return VINF_SUCCESS;
3947}
3948
3949
3950/** Opcode 0x0f 0x81. */
3951FNIEMOP_DEF(iemOp_jno_Jv)
3952{
3953 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3954 IEMOP_HLP_MIN_386();
3955 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3956 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3957 {
3958 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3960
3961 IEM_MC_BEGIN(0, 0);
3962 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3963 IEM_MC_ADVANCE_RIP();
3964 } IEM_MC_ELSE() {
3965 IEM_MC_REL_JMP_S16(i16Imm);
3966 } IEM_MC_ENDIF();
3967 IEM_MC_END();
3968 }
3969 else
3970 {
3971 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3973
3974 IEM_MC_BEGIN(0, 0);
3975 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3976 IEM_MC_ADVANCE_RIP();
3977 } IEM_MC_ELSE() {
3978 IEM_MC_REL_JMP_S32(i32Imm);
3979 } IEM_MC_ENDIF();
3980 IEM_MC_END();
3981 }
3982 return VINF_SUCCESS;
3983}
3984
3985
3986/** Opcode 0x0f 0x82. */
3987FNIEMOP_DEF(iemOp_jc_Jv)
3988{
3989 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3990 IEMOP_HLP_MIN_386();
3991 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3992 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3993 {
3994 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3996
3997 IEM_MC_BEGIN(0, 0);
3998 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3999 IEM_MC_REL_JMP_S16(i16Imm);
4000 } IEM_MC_ELSE() {
4001 IEM_MC_ADVANCE_RIP();
4002 } IEM_MC_ENDIF();
4003 IEM_MC_END();
4004 }
4005 else
4006 {
4007 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4009
4010 IEM_MC_BEGIN(0, 0);
4011 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4012 IEM_MC_REL_JMP_S32(i32Imm);
4013 } IEM_MC_ELSE() {
4014 IEM_MC_ADVANCE_RIP();
4015 } IEM_MC_ENDIF();
4016 IEM_MC_END();
4017 }
4018 return VINF_SUCCESS;
4019}
4020
4021
4022/** Opcode 0x0f 0x83. */
4023FNIEMOP_DEF(iemOp_jnc_Jv)
4024{
4025 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4026 IEMOP_HLP_MIN_386();
4027 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4028 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4029 {
4030 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4032
4033 IEM_MC_BEGIN(0, 0);
4034 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4035 IEM_MC_ADVANCE_RIP();
4036 } IEM_MC_ELSE() {
4037 IEM_MC_REL_JMP_S16(i16Imm);
4038 } IEM_MC_ENDIF();
4039 IEM_MC_END();
4040 }
4041 else
4042 {
4043 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4045
4046 IEM_MC_BEGIN(0, 0);
4047 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4048 IEM_MC_ADVANCE_RIP();
4049 } IEM_MC_ELSE() {
4050 IEM_MC_REL_JMP_S32(i32Imm);
4051 } IEM_MC_ENDIF();
4052 IEM_MC_END();
4053 }
4054 return VINF_SUCCESS;
4055}
4056
4057
4058/** Opcode 0x0f 0x84. */
4059FNIEMOP_DEF(iemOp_je_Jv)
4060{
4061 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4062 IEMOP_HLP_MIN_386();
4063 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4064 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4065 {
4066 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4068
4069 IEM_MC_BEGIN(0, 0);
4070 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4071 IEM_MC_REL_JMP_S16(i16Imm);
4072 } IEM_MC_ELSE() {
4073 IEM_MC_ADVANCE_RIP();
4074 } IEM_MC_ENDIF();
4075 IEM_MC_END();
4076 }
4077 else
4078 {
4079 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4081
4082 IEM_MC_BEGIN(0, 0);
4083 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4084 IEM_MC_REL_JMP_S32(i32Imm);
4085 } IEM_MC_ELSE() {
4086 IEM_MC_ADVANCE_RIP();
4087 } IEM_MC_ENDIF();
4088 IEM_MC_END();
4089 }
4090 return VINF_SUCCESS;
4091}
4092
4093
4094/** Opcode 0x0f 0x85. */
4095FNIEMOP_DEF(iemOp_jne_Jv)
4096{
4097 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4098 IEMOP_HLP_MIN_386();
4099 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4100 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4101 {
4102 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4104
4105 IEM_MC_BEGIN(0, 0);
4106 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4107 IEM_MC_ADVANCE_RIP();
4108 } IEM_MC_ELSE() {
4109 IEM_MC_REL_JMP_S16(i16Imm);
4110 } IEM_MC_ENDIF();
4111 IEM_MC_END();
4112 }
4113 else
4114 {
4115 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4117
4118 IEM_MC_BEGIN(0, 0);
4119 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4120 IEM_MC_ADVANCE_RIP();
4121 } IEM_MC_ELSE() {
4122 IEM_MC_REL_JMP_S32(i32Imm);
4123 } IEM_MC_ENDIF();
4124 IEM_MC_END();
4125 }
4126 return VINF_SUCCESS;
4127}
4128
4129
4130/** Opcode 0x0f 0x86. */
4131FNIEMOP_DEF(iemOp_jbe_Jv)
4132{
4133 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4134 IEMOP_HLP_MIN_386();
4135 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4136 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4137 {
4138 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4140
4141 IEM_MC_BEGIN(0, 0);
4142 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4143 IEM_MC_REL_JMP_S16(i16Imm);
4144 } IEM_MC_ELSE() {
4145 IEM_MC_ADVANCE_RIP();
4146 } IEM_MC_ENDIF();
4147 IEM_MC_END();
4148 }
4149 else
4150 {
4151 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4153
4154 IEM_MC_BEGIN(0, 0);
4155 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4156 IEM_MC_REL_JMP_S32(i32Imm);
4157 } IEM_MC_ELSE() {
4158 IEM_MC_ADVANCE_RIP();
4159 } IEM_MC_ENDIF();
4160 IEM_MC_END();
4161 }
4162 return VINF_SUCCESS;
4163}
4164
4165
4166/** Opcode 0x0f 0x87. */
4167FNIEMOP_DEF(iemOp_jnbe_Jv)
4168{
4169 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4170 IEMOP_HLP_MIN_386();
4171 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4172 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4173 {
4174 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4176
4177 IEM_MC_BEGIN(0, 0);
4178 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4179 IEM_MC_ADVANCE_RIP();
4180 } IEM_MC_ELSE() {
4181 IEM_MC_REL_JMP_S16(i16Imm);
4182 } IEM_MC_ENDIF();
4183 IEM_MC_END();
4184 }
4185 else
4186 {
4187 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4189
4190 IEM_MC_BEGIN(0, 0);
4191 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4192 IEM_MC_ADVANCE_RIP();
4193 } IEM_MC_ELSE() {
4194 IEM_MC_REL_JMP_S32(i32Imm);
4195 } IEM_MC_ENDIF();
4196 IEM_MC_END();
4197 }
4198 return VINF_SUCCESS;
4199}
4200
4201
4202/** Opcode 0x0f 0x88. */
4203FNIEMOP_DEF(iemOp_js_Jv)
4204{
4205 IEMOP_MNEMONIC(js_Jv, "js Jv");
4206 IEMOP_HLP_MIN_386();
4207 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4208 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4209 {
4210 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4212
4213 IEM_MC_BEGIN(0, 0);
4214 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4215 IEM_MC_REL_JMP_S16(i16Imm);
4216 } IEM_MC_ELSE() {
4217 IEM_MC_ADVANCE_RIP();
4218 } IEM_MC_ENDIF();
4219 IEM_MC_END();
4220 }
4221 else
4222 {
4223 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4225
4226 IEM_MC_BEGIN(0, 0);
4227 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4228 IEM_MC_REL_JMP_S32(i32Imm);
4229 } IEM_MC_ELSE() {
4230 IEM_MC_ADVANCE_RIP();
4231 } IEM_MC_ENDIF();
4232 IEM_MC_END();
4233 }
4234 return VINF_SUCCESS;
4235}
4236
4237
4238/** Opcode 0x0f 0x89. */
4239FNIEMOP_DEF(iemOp_jns_Jv)
4240{
4241 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4242 IEMOP_HLP_MIN_386();
4243 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4244 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4245 {
4246 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4248
4249 IEM_MC_BEGIN(0, 0);
4250 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4251 IEM_MC_ADVANCE_RIP();
4252 } IEM_MC_ELSE() {
4253 IEM_MC_REL_JMP_S16(i16Imm);
4254 } IEM_MC_ENDIF();
4255 IEM_MC_END();
4256 }
4257 else
4258 {
4259 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4261
4262 IEM_MC_BEGIN(0, 0);
4263 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4264 IEM_MC_ADVANCE_RIP();
4265 } IEM_MC_ELSE() {
4266 IEM_MC_REL_JMP_S32(i32Imm);
4267 } IEM_MC_ENDIF();
4268 IEM_MC_END();
4269 }
4270 return VINF_SUCCESS;
4271}
4272
4273
4274/** Opcode 0x0f 0x8a. */
4275FNIEMOP_DEF(iemOp_jp_Jv)
4276{
4277 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4278 IEMOP_HLP_MIN_386();
4279 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4280 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4281 {
4282 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4284
4285 IEM_MC_BEGIN(0, 0);
4286 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4287 IEM_MC_REL_JMP_S16(i16Imm);
4288 } IEM_MC_ELSE() {
4289 IEM_MC_ADVANCE_RIP();
4290 } IEM_MC_ENDIF();
4291 IEM_MC_END();
4292 }
4293 else
4294 {
4295 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4297
4298 IEM_MC_BEGIN(0, 0);
4299 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4300 IEM_MC_REL_JMP_S32(i32Imm);
4301 } IEM_MC_ELSE() {
4302 IEM_MC_ADVANCE_RIP();
4303 } IEM_MC_ENDIF();
4304 IEM_MC_END();
4305 }
4306 return VINF_SUCCESS;
4307}
4308
4309
4310/** Opcode 0x0f 0x8b. */
4311FNIEMOP_DEF(iemOp_jnp_Jv)
4312{
4313 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4314 IEMOP_HLP_MIN_386();
4315 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4316 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4317 {
4318 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4320
4321 IEM_MC_BEGIN(0, 0);
4322 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4323 IEM_MC_ADVANCE_RIP();
4324 } IEM_MC_ELSE() {
4325 IEM_MC_REL_JMP_S16(i16Imm);
4326 } IEM_MC_ENDIF();
4327 IEM_MC_END();
4328 }
4329 else
4330 {
4331 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4333
4334 IEM_MC_BEGIN(0, 0);
4335 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4336 IEM_MC_ADVANCE_RIP();
4337 } IEM_MC_ELSE() {
4338 IEM_MC_REL_JMP_S32(i32Imm);
4339 } IEM_MC_ENDIF();
4340 IEM_MC_END();
4341 }
4342 return VINF_SUCCESS;
4343}
4344
4345
4346/** Opcode 0x0f 0x8c. */
4347FNIEMOP_DEF(iemOp_jl_Jv)
4348{
4349 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4350 IEMOP_HLP_MIN_386();
4351 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4352 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4353 {
4354 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4356
4357 IEM_MC_BEGIN(0, 0);
4358 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4359 IEM_MC_REL_JMP_S16(i16Imm);
4360 } IEM_MC_ELSE() {
4361 IEM_MC_ADVANCE_RIP();
4362 } IEM_MC_ENDIF();
4363 IEM_MC_END();
4364 }
4365 else
4366 {
4367 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4369
4370 IEM_MC_BEGIN(0, 0);
4371 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4372 IEM_MC_REL_JMP_S32(i32Imm);
4373 } IEM_MC_ELSE() {
4374 IEM_MC_ADVANCE_RIP();
4375 } IEM_MC_ENDIF();
4376 IEM_MC_END();
4377 }
4378 return VINF_SUCCESS;
4379}
4380
4381
4382/** Opcode 0x0f 0x8d. */
4383FNIEMOP_DEF(iemOp_jnl_Jv)
4384{
4385 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4386 IEMOP_HLP_MIN_386();
4387 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4388 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4389 {
4390 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4392
4393 IEM_MC_BEGIN(0, 0);
4394 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4395 IEM_MC_ADVANCE_RIP();
4396 } IEM_MC_ELSE() {
4397 IEM_MC_REL_JMP_S16(i16Imm);
4398 } IEM_MC_ENDIF();
4399 IEM_MC_END();
4400 }
4401 else
4402 {
4403 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4405
4406 IEM_MC_BEGIN(0, 0);
4407 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4408 IEM_MC_ADVANCE_RIP();
4409 } IEM_MC_ELSE() {
4410 IEM_MC_REL_JMP_S32(i32Imm);
4411 } IEM_MC_ENDIF();
4412 IEM_MC_END();
4413 }
4414 return VINF_SUCCESS;
4415}
4416
4417
4418/** Opcode 0x0f 0x8e. */
4419FNIEMOP_DEF(iemOp_jle_Jv)
4420{
4421 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4422 IEMOP_HLP_MIN_386();
4423 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4424 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4425 {
4426 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4428
4429 IEM_MC_BEGIN(0, 0);
4430 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4431 IEM_MC_REL_JMP_S16(i16Imm);
4432 } IEM_MC_ELSE() {
4433 IEM_MC_ADVANCE_RIP();
4434 } IEM_MC_ENDIF();
4435 IEM_MC_END();
4436 }
4437 else
4438 {
4439 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4441
4442 IEM_MC_BEGIN(0, 0);
4443 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4444 IEM_MC_REL_JMP_S32(i32Imm);
4445 } IEM_MC_ELSE() {
4446 IEM_MC_ADVANCE_RIP();
4447 } IEM_MC_ENDIF();
4448 IEM_MC_END();
4449 }
4450 return VINF_SUCCESS;
4451}
4452
4453
4454/** Opcode 0x0f 0x8f. */
4455FNIEMOP_DEF(iemOp_jnle_Jv)
4456{
4457 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4458 IEMOP_HLP_MIN_386();
4459 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4460 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4461 {
4462 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4464
4465 IEM_MC_BEGIN(0, 0);
4466 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4467 IEM_MC_ADVANCE_RIP();
4468 } IEM_MC_ELSE() {
4469 IEM_MC_REL_JMP_S16(i16Imm);
4470 } IEM_MC_ENDIF();
4471 IEM_MC_END();
4472 }
4473 else
4474 {
4475 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4477
4478 IEM_MC_BEGIN(0, 0);
4479 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4480 IEM_MC_ADVANCE_RIP();
4481 } IEM_MC_ELSE() {
4482 IEM_MC_REL_JMP_S32(i32Imm);
4483 } IEM_MC_ENDIF();
4484 IEM_MC_END();
4485 }
4486 return VINF_SUCCESS;
4487}
4488
4489
4490/** Opcode 0x0f 0x90. */
4491FNIEMOP_DEF(iemOp_seto_Eb)
4492{
4493 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4494 IEMOP_HLP_MIN_386();
4495 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4496
4497 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4498 * any way. AMD says it's "unused", whatever that means. We're
4499 * ignoring for now. */
4500 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4501 {
4502 /* register target */
4503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4504 IEM_MC_BEGIN(0, 0);
4505 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4506 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4507 } IEM_MC_ELSE() {
4508 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4509 } IEM_MC_ENDIF();
4510 IEM_MC_ADVANCE_RIP();
4511 IEM_MC_END();
4512 }
4513 else
4514 {
4515 /* memory target */
4516 IEM_MC_BEGIN(0, 1);
4517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4520 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4521 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4522 } IEM_MC_ELSE() {
4523 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4524 } IEM_MC_ENDIF();
4525 IEM_MC_ADVANCE_RIP();
4526 IEM_MC_END();
4527 }
4528 return VINF_SUCCESS;
4529}
4530
4531
4532/** Opcode 0x0f 0x91. */
4533FNIEMOP_DEF(iemOp_setno_Eb)
4534{
4535 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4536 IEMOP_HLP_MIN_386();
4537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4538
4539 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4540 * any way. AMD says it's "unused", whatever that means. We're
4541 * ignoring for now. */
4542 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4543 {
4544 /* register target */
4545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4546 IEM_MC_BEGIN(0, 0);
4547 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4548 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4549 } IEM_MC_ELSE() {
4550 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4551 } IEM_MC_ENDIF();
4552 IEM_MC_ADVANCE_RIP();
4553 IEM_MC_END();
4554 }
4555 else
4556 {
4557 /* memory target */
4558 IEM_MC_BEGIN(0, 1);
4559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4562 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4563 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4564 } IEM_MC_ELSE() {
4565 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4566 } IEM_MC_ENDIF();
4567 IEM_MC_ADVANCE_RIP();
4568 IEM_MC_END();
4569 }
4570 return VINF_SUCCESS;
4571}
4572
4573
4574/** Opcode 0x0f 0x92. */
4575FNIEMOP_DEF(iemOp_setc_Eb)
4576{
4577 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4578 IEMOP_HLP_MIN_386();
4579 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4580
4581 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4582 * any way. AMD says it's "unused", whatever that means. We're
4583 * ignoring for now. */
4584 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4585 {
4586 /* register target */
4587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4588 IEM_MC_BEGIN(0, 0);
4589 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4590 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4591 } IEM_MC_ELSE() {
4592 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4593 } IEM_MC_ENDIF();
4594 IEM_MC_ADVANCE_RIP();
4595 IEM_MC_END();
4596 }
4597 else
4598 {
4599 /* memory target */
4600 IEM_MC_BEGIN(0, 1);
4601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4604 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4605 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4606 } IEM_MC_ELSE() {
4607 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4608 } IEM_MC_ENDIF();
4609 IEM_MC_ADVANCE_RIP();
4610 IEM_MC_END();
4611 }
4612 return VINF_SUCCESS;
4613}
4614
4615
4616/** Opcode 0x0f 0x93. */
4617FNIEMOP_DEF(iemOp_setnc_Eb)
4618{
4619 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4620 IEMOP_HLP_MIN_386();
4621 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4622
4623 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4624 * any way. AMD says it's "unused", whatever that means. We're
4625 * ignoring for now. */
4626 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4627 {
4628 /* register target */
4629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4630 IEM_MC_BEGIN(0, 0);
4631 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4632 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4633 } IEM_MC_ELSE() {
4634 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4635 } IEM_MC_ENDIF();
4636 IEM_MC_ADVANCE_RIP();
4637 IEM_MC_END();
4638 }
4639 else
4640 {
4641 /* memory target */
4642 IEM_MC_BEGIN(0, 1);
4643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4646 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4647 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4648 } IEM_MC_ELSE() {
4649 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4650 } IEM_MC_ENDIF();
4651 IEM_MC_ADVANCE_RIP();
4652 IEM_MC_END();
4653 }
4654 return VINF_SUCCESS;
4655}
4656
4657
4658/** Opcode 0x0f 0x94. */
4659FNIEMOP_DEF(iemOp_sete_Eb)
4660{
4661 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4662 IEMOP_HLP_MIN_386();
4663 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4664
4665 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4666 * any way. AMD says it's "unused", whatever that means. We're
4667 * ignoring for now. */
4668 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4669 {
4670 /* register target */
4671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4672 IEM_MC_BEGIN(0, 0);
4673 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4674 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4675 } IEM_MC_ELSE() {
4676 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4677 } IEM_MC_ENDIF();
4678 IEM_MC_ADVANCE_RIP();
4679 IEM_MC_END();
4680 }
4681 else
4682 {
4683 /* memory target */
4684 IEM_MC_BEGIN(0, 1);
4685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4688 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4689 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4690 } IEM_MC_ELSE() {
4691 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4692 } IEM_MC_ENDIF();
4693 IEM_MC_ADVANCE_RIP();
4694 IEM_MC_END();
4695 }
4696 return VINF_SUCCESS;
4697}
4698
4699
4700/** Opcode 0x0f 0x95. */
4701FNIEMOP_DEF(iemOp_setne_Eb)
4702{
4703 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4704 IEMOP_HLP_MIN_386();
4705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4706
4707 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4708 * any way. AMD says it's "unused", whatever that means. We're
4709 * ignoring for now. */
4710 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4711 {
4712 /* register target */
4713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4714 IEM_MC_BEGIN(0, 0);
4715 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4716 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4717 } IEM_MC_ELSE() {
4718 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4719 } IEM_MC_ENDIF();
4720 IEM_MC_ADVANCE_RIP();
4721 IEM_MC_END();
4722 }
4723 else
4724 {
4725 /* memory target */
4726 IEM_MC_BEGIN(0, 1);
4727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4730 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4731 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4732 } IEM_MC_ELSE() {
4733 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4734 } IEM_MC_ENDIF();
4735 IEM_MC_ADVANCE_RIP();
4736 IEM_MC_END();
4737 }
4738 return VINF_SUCCESS;
4739}
4740
4741
4742/** Opcode 0x0f 0x96. */
4743FNIEMOP_DEF(iemOp_setbe_Eb)
4744{
4745 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4746 IEMOP_HLP_MIN_386();
4747 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4748
4749 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4750 * any way. AMD says it's "unused", whatever that means. We're
4751 * ignoring for now. */
4752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4753 {
4754 /* register target */
4755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4756 IEM_MC_BEGIN(0, 0);
4757 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4758 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4759 } IEM_MC_ELSE() {
4760 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4761 } IEM_MC_ENDIF();
4762 IEM_MC_ADVANCE_RIP();
4763 IEM_MC_END();
4764 }
4765 else
4766 {
4767 /* memory target */
4768 IEM_MC_BEGIN(0, 1);
4769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4772 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4773 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4774 } IEM_MC_ELSE() {
4775 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4776 } IEM_MC_ENDIF();
4777 IEM_MC_ADVANCE_RIP();
4778 IEM_MC_END();
4779 }
4780 return VINF_SUCCESS;
4781}
4782
4783
4784/** Opcode 0x0f 0x97. */
4785FNIEMOP_DEF(iemOp_setnbe_Eb)
4786{
4787 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4788 IEMOP_HLP_MIN_386();
4789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4790
4791 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4792 * any way. AMD says it's "unused", whatever that means. We're
4793 * ignoring for now. */
4794 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4795 {
4796 /* register target */
4797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4798 IEM_MC_BEGIN(0, 0);
4799 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4800 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4801 } IEM_MC_ELSE() {
4802 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4803 } IEM_MC_ENDIF();
4804 IEM_MC_ADVANCE_RIP();
4805 IEM_MC_END();
4806 }
4807 else
4808 {
4809 /* memory target */
4810 IEM_MC_BEGIN(0, 1);
4811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4814 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4815 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4816 } IEM_MC_ELSE() {
4817 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4818 } IEM_MC_ENDIF();
4819 IEM_MC_ADVANCE_RIP();
4820 IEM_MC_END();
4821 }
4822 return VINF_SUCCESS;
4823}
4824
4825
4826/** Opcode 0x0f 0x98. */
4827FNIEMOP_DEF(iemOp_sets_Eb)
4828{
4829 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4830 IEMOP_HLP_MIN_386();
4831 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4832
4833 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4834 * any way. AMD says it's "unused", whatever that means. We're
4835 * ignoring for now. */
4836 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4837 {
4838 /* register target */
4839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4840 IEM_MC_BEGIN(0, 0);
4841 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4842 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4843 } IEM_MC_ELSE() {
4844 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4845 } IEM_MC_ENDIF();
4846 IEM_MC_ADVANCE_RIP();
4847 IEM_MC_END();
4848 }
4849 else
4850 {
4851 /* memory target */
4852 IEM_MC_BEGIN(0, 1);
4853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4856 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4857 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4858 } IEM_MC_ELSE() {
4859 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4860 } IEM_MC_ENDIF();
4861 IEM_MC_ADVANCE_RIP();
4862 IEM_MC_END();
4863 }
4864 return VINF_SUCCESS;
4865}
4866
4867
4868/** Opcode 0x0f 0x99. */
4869FNIEMOP_DEF(iemOp_setns_Eb)
4870{
4871 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4872 IEMOP_HLP_MIN_386();
4873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4874
4875 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4876 * any way. AMD says it's "unused", whatever that means. We're
4877 * ignoring for now. */
4878 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4879 {
4880 /* register target */
4881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4882 IEM_MC_BEGIN(0, 0);
4883 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4884 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4885 } IEM_MC_ELSE() {
4886 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4887 } IEM_MC_ENDIF();
4888 IEM_MC_ADVANCE_RIP();
4889 IEM_MC_END();
4890 }
4891 else
4892 {
4893 /* memory target */
4894 IEM_MC_BEGIN(0, 1);
4895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4898 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4899 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4900 } IEM_MC_ELSE() {
4901 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4902 } IEM_MC_ENDIF();
4903 IEM_MC_ADVANCE_RIP();
4904 IEM_MC_END();
4905 }
4906 return VINF_SUCCESS;
4907}
4908
4909
4910/** Opcode 0x0f 0x9a. */
4911FNIEMOP_DEF(iemOp_setp_Eb)
4912{
4913 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4914 IEMOP_HLP_MIN_386();
4915 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4916
4917 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4918 * any way. AMD says it's "unused", whatever that means. We're
4919 * ignoring for now. */
4920 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4921 {
4922 /* register target */
4923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4924 IEM_MC_BEGIN(0, 0);
4925 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4926 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4927 } IEM_MC_ELSE() {
4928 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4929 } IEM_MC_ENDIF();
4930 IEM_MC_ADVANCE_RIP();
4931 IEM_MC_END();
4932 }
4933 else
4934 {
4935 /* memory target */
4936 IEM_MC_BEGIN(0, 1);
4937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4940 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4941 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4942 } IEM_MC_ELSE() {
4943 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4944 } IEM_MC_ENDIF();
4945 IEM_MC_ADVANCE_RIP();
4946 IEM_MC_END();
4947 }
4948 return VINF_SUCCESS;
4949}
4950
4951
4952/** Opcode 0x0f 0x9b. */
4953FNIEMOP_DEF(iemOp_setnp_Eb)
4954{
4955 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4956 IEMOP_HLP_MIN_386();
4957 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4958
4959 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4960 * any way. AMD says it's "unused", whatever that means. We're
4961 * ignoring for now. */
4962 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4963 {
4964 /* register target */
4965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4966 IEM_MC_BEGIN(0, 0);
4967 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4968 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4969 } IEM_MC_ELSE() {
4970 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4971 } IEM_MC_ENDIF();
4972 IEM_MC_ADVANCE_RIP();
4973 IEM_MC_END();
4974 }
4975 else
4976 {
4977 /* memory target */
4978 IEM_MC_BEGIN(0, 1);
4979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4982 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4983 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4984 } IEM_MC_ELSE() {
4985 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4986 } IEM_MC_ENDIF();
4987 IEM_MC_ADVANCE_RIP();
4988 IEM_MC_END();
4989 }
4990 return VINF_SUCCESS;
4991}
4992
4993
4994/** Opcode 0x0f 0x9c. */
4995FNIEMOP_DEF(iemOp_setl_Eb)
4996{
4997 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4998 IEMOP_HLP_MIN_386();
4999 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5000
5001 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5002 * any way. AMD says it's "unused", whatever that means. We're
5003 * ignoring for now. */
5004 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5005 {
5006 /* register target */
5007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5008 IEM_MC_BEGIN(0, 0);
5009 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5010 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5011 } IEM_MC_ELSE() {
5012 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5013 } IEM_MC_ENDIF();
5014 IEM_MC_ADVANCE_RIP();
5015 IEM_MC_END();
5016 }
5017 else
5018 {
5019 /* memory target */
5020 IEM_MC_BEGIN(0, 1);
5021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5024 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5025 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5026 } IEM_MC_ELSE() {
5027 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5028 } IEM_MC_ENDIF();
5029 IEM_MC_ADVANCE_RIP();
5030 IEM_MC_END();
5031 }
5032 return VINF_SUCCESS;
5033}
5034
5035
5036/** Opcode 0x0f 0x9d. */
5037FNIEMOP_DEF(iemOp_setnl_Eb)
5038{
5039 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5040 IEMOP_HLP_MIN_386();
5041 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5042
5043 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5044 * any way. AMD says it's "unused", whatever that means. We're
5045 * ignoring for now. */
5046 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5047 {
5048 /* register target */
5049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5050 IEM_MC_BEGIN(0, 0);
5051 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5052 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5053 } IEM_MC_ELSE() {
5054 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5055 } IEM_MC_ENDIF();
5056 IEM_MC_ADVANCE_RIP();
5057 IEM_MC_END();
5058 }
5059 else
5060 {
5061 /* memory target */
5062 IEM_MC_BEGIN(0, 1);
5063 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5066 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5067 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5068 } IEM_MC_ELSE() {
5069 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5070 } IEM_MC_ENDIF();
5071 IEM_MC_ADVANCE_RIP();
5072 IEM_MC_END();
5073 }
5074 return VINF_SUCCESS;
5075}
5076
5077
5078/** Opcode 0x0f 0x9e. */
5079FNIEMOP_DEF(iemOp_setle_Eb)
5080{
5081 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5082 IEMOP_HLP_MIN_386();
5083 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5084
5085 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5086 * any way. AMD says it's "unused", whatever that means. We're
5087 * ignoring for now. */
5088 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5089 {
5090 /* register target */
5091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5092 IEM_MC_BEGIN(0, 0);
5093 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5094 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5095 } IEM_MC_ELSE() {
5096 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5097 } IEM_MC_ENDIF();
5098 IEM_MC_ADVANCE_RIP();
5099 IEM_MC_END();
5100 }
5101 else
5102 {
5103 /* memory target */
5104 IEM_MC_BEGIN(0, 1);
5105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5108 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5109 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5110 } IEM_MC_ELSE() {
5111 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5112 } IEM_MC_ENDIF();
5113 IEM_MC_ADVANCE_RIP();
5114 IEM_MC_END();
5115 }
5116 return VINF_SUCCESS;
5117}
5118
5119
5120/** Opcode 0x0f 0x9f. */
5121FNIEMOP_DEF(iemOp_setnle_Eb)
5122{
5123 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5124 IEMOP_HLP_MIN_386();
5125 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5126
5127 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5128 * any way. AMD says it's "unused", whatever that means. We're
5129 * ignoring for now. */
5130 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5131 {
5132 /* register target */
5133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5134 IEM_MC_BEGIN(0, 0);
5135 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5136 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5137 } IEM_MC_ELSE() {
5138 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5139 } IEM_MC_ENDIF();
5140 IEM_MC_ADVANCE_RIP();
5141 IEM_MC_END();
5142 }
5143 else
5144 {
5145 /* memory target */
5146 IEM_MC_BEGIN(0, 1);
5147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5150 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5151 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5152 } IEM_MC_ELSE() {
5153 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5154 } IEM_MC_ENDIF();
5155 IEM_MC_ADVANCE_RIP();
5156 IEM_MC_END();
5157 }
5158 return VINF_SUCCESS;
5159}
5160
5161
5162/**
5163 * Common 'push segment-register' helper.
5164 */
5165FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5166{
5167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5168 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5169 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5170
5171 switch (pVCpu->iem.s.enmEffOpSize)
5172 {
5173 case IEMMODE_16BIT:
5174 IEM_MC_BEGIN(0, 1);
5175 IEM_MC_LOCAL(uint16_t, u16Value);
5176 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5177 IEM_MC_PUSH_U16(u16Value);
5178 IEM_MC_ADVANCE_RIP();
5179 IEM_MC_END();
5180 break;
5181
5182 case IEMMODE_32BIT:
5183 IEM_MC_BEGIN(0, 1);
5184 IEM_MC_LOCAL(uint32_t, u32Value);
5185 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5186 IEM_MC_PUSH_U32_SREG(u32Value);
5187 IEM_MC_ADVANCE_RIP();
5188 IEM_MC_END();
5189 break;
5190
5191 case IEMMODE_64BIT:
5192 IEM_MC_BEGIN(0, 1);
5193 IEM_MC_LOCAL(uint64_t, u64Value);
5194 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5195 IEM_MC_PUSH_U64(u64Value);
5196 IEM_MC_ADVANCE_RIP();
5197 IEM_MC_END();
5198 break;
5199 }
5200
5201 return VINF_SUCCESS;
5202}
5203
5204
5205/** Opcode 0x0f 0xa0. */
5206FNIEMOP_DEF(iemOp_push_fs)
5207{
5208 IEMOP_MNEMONIC(push_fs, "push fs");
5209 IEMOP_HLP_MIN_386();
5210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5211 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5212}
5213
5214
5215/** Opcode 0x0f 0xa1. */
5216FNIEMOP_DEF(iemOp_pop_fs)
5217{
5218 IEMOP_MNEMONIC(pop_fs, "pop fs");
5219 IEMOP_HLP_MIN_386();
5220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5221 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5222}
5223
5224
5225/** Opcode 0x0f 0xa2. */
5226FNIEMOP_DEF(iemOp_cpuid)
5227{
5228 IEMOP_MNEMONIC(cpuid, "cpuid");
5229 IEMOP_HLP_MIN_486(); /* not all 486es. */
5230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5231 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5232}
5233
5234
5235/**
5236 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5237 * iemOp_bts_Ev_Gv.
5238 */
5239FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5240{
5241 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5242 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5243
5244 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5245 {
5246 /* register destination. */
5247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5248 switch (pVCpu->iem.s.enmEffOpSize)
5249 {
5250 case IEMMODE_16BIT:
5251 IEM_MC_BEGIN(3, 0);
5252 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5253 IEM_MC_ARG(uint16_t, u16Src, 1);
5254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5255
5256 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5257 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5258 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5259 IEM_MC_REF_EFLAGS(pEFlags);
5260 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5261
5262 IEM_MC_ADVANCE_RIP();
5263 IEM_MC_END();
5264 return VINF_SUCCESS;
5265
5266 case IEMMODE_32BIT:
5267 IEM_MC_BEGIN(3, 0);
5268 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5269 IEM_MC_ARG(uint32_t, u32Src, 1);
5270 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5271
5272 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5273 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5274 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5275 IEM_MC_REF_EFLAGS(pEFlags);
5276 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5277
5278 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5279 IEM_MC_ADVANCE_RIP();
5280 IEM_MC_END();
5281 return VINF_SUCCESS;
5282
5283 case IEMMODE_64BIT:
5284 IEM_MC_BEGIN(3, 0);
5285 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5286 IEM_MC_ARG(uint64_t, u64Src, 1);
5287 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5288
5289 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5290 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5291 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5292 IEM_MC_REF_EFLAGS(pEFlags);
5293 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5294
5295 IEM_MC_ADVANCE_RIP();
5296 IEM_MC_END();
5297 return VINF_SUCCESS;
5298
5299 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5300 }
5301 }
5302 else
5303 {
5304 /* memory destination. */
5305
5306 uint32_t fAccess;
5307 if (pImpl->pfnLockedU16)
5308 fAccess = IEM_ACCESS_DATA_RW;
5309 else /* BT */
5310 fAccess = IEM_ACCESS_DATA_R;
5311
5312 /** @todo test negative bit offsets! */
5313 switch (pVCpu->iem.s.enmEffOpSize)
5314 {
5315 case IEMMODE_16BIT:
5316 IEM_MC_BEGIN(3, 2);
5317 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5318 IEM_MC_ARG(uint16_t, u16Src, 1);
5319 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5321 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5322
5323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5324 if (pImpl->pfnLockedU16)
5325 IEMOP_HLP_DONE_DECODING();
5326 else
5327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5328 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5329 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5330 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5331 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5332 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5333 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5334 IEM_MC_FETCH_EFLAGS(EFlags);
5335
5336 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5337 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5338 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5339 else
5340 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5341 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5342
5343 IEM_MC_COMMIT_EFLAGS(EFlags);
5344 IEM_MC_ADVANCE_RIP();
5345 IEM_MC_END();
5346 return VINF_SUCCESS;
5347
5348 case IEMMODE_32BIT:
5349 IEM_MC_BEGIN(3, 2);
5350 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5351 IEM_MC_ARG(uint32_t, u32Src, 1);
5352 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5354 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5355
5356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5357 if (pImpl->pfnLockedU16)
5358 IEMOP_HLP_DONE_DECODING();
5359 else
5360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5361 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5362 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5363 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5364 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5365 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5366 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5367 IEM_MC_FETCH_EFLAGS(EFlags);
5368
5369 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5370 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5372 else
5373 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5374 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5375
5376 IEM_MC_COMMIT_EFLAGS(EFlags);
5377 IEM_MC_ADVANCE_RIP();
5378 IEM_MC_END();
5379 return VINF_SUCCESS;
5380
5381 case IEMMODE_64BIT:
5382 IEM_MC_BEGIN(3, 2);
5383 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5384 IEM_MC_ARG(uint64_t, u64Src, 1);
5385 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5387 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5388
5389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5390 if (pImpl->pfnLockedU16)
5391 IEMOP_HLP_DONE_DECODING();
5392 else
5393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5394 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5395 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5396 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5397 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5398 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5399 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5400 IEM_MC_FETCH_EFLAGS(EFlags);
5401
5402 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5403 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5405 else
5406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5407 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5408
5409 IEM_MC_COMMIT_EFLAGS(EFlags);
5410 IEM_MC_ADVANCE_RIP();
5411 IEM_MC_END();
5412 return VINF_SUCCESS;
5413
5414 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5415 }
5416 }
5417}
5418
5419
5420/** Opcode 0x0f 0xa3. */
5421FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5422{
5423 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5424 IEMOP_HLP_MIN_386();
5425 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5426}
5427
5428
5429/**
5430 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5431 */
5432FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5433{
5434 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5435 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5436
5437 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5438 {
5439 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5441
5442 switch (pVCpu->iem.s.enmEffOpSize)
5443 {
5444 case IEMMODE_16BIT:
5445 IEM_MC_BEGIN(4, 0);
5446 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5447 IEM_MC_ARG(uint16_t, u16Src, 1);
5448 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5449 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5450
5451 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5452 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5453 IEM_MC_REF_EFLAGS(pEFlags);
5454 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5455
5456 IEM_MC_ADVANCE_RIP();
5457 IEM_MC_END();
5458 return VINF_SUCCESS;
5459
5460 case IEMMODE_32BIT:
5461 IEM_MC_BEGIN(4, 0);
5462 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5463 IEM_MC_ARG(uint32_t, u32Src, 1);
5464 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5465 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5466
5467 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5468 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5469 IEM_MC_REF_EFLAGS(pEFlags);
5470 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5471
5472 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5473 IEM_MC_ADVANCE_RIP();
5474 IEM_MC_END();
5475 return VINF_SUCCESS;
5476
5477 case IEMMODE_64BIT:
5478 IEM_MC_BEGIN(4, 0);
5479 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5480 IEM_MC_ARG(uint64_t, u64Src, 1);
5481 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5482 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5483
5484 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5485 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5486 IEM_MC_REF_EFLAGS(pEFlags);
5487 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5488
5489 IEM_MC_ADVANCE_RIP();
5490 IEM_MC_END();
5491 return VINF_SUCCESS;
5492
5493 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5494 }
5495 }
5496 else
5497 {
5498 switch (pVCpu->iem.s.enmEffOpSize)
5499 {
5500 case IEMMODE_16BIT:
5501 IEM_MC_BEGIN(4, 2);
5502 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5503 IEM_MC_ARG(uint16_t, u16Src, 1);
5504 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5505 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5507
5508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5509 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5510 IEM_MC_ASSIGN(cShiftArg, cShift);
5511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5512 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5513 IEM_MC_FETCH_EFLAGS(EFlags);
5514 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5515 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5516
5517 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5518 IEM_MC_COMMIT_EFLAGS(EFlags);
5519 IEM_MC_ADVANCE_RIP();
5520 IEM_MC_END();
5521 return VINF_SUCCESS;
5522
5523 case IEMMODE_32BIT:
5524 IEM_MC_BEGIN(4, 2);
5525 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5526 IEM_MC_ARG(uint32_t, u32Src, 1);
5527 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5528 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5530
5531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5532 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5533 IEM_MC_ASSIGN(cShiftArg, cShift);
5534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5535 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5536 IEM_MC_FETCH_EFLAGS(EFlags);
5537 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5538 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5539
5540 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5541 IEM_MC_COMMIT_EFLAGS(EFlags);
5542 IEM_MC_ADVANCE_RIP();
5543 IEM_MC_END();
5544 return VINF_SUCCESS;
5545
5546 case IEMMODE_64BIT:
5547 IEM_MC_BEGIN(4, 2);
5548 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5549 IEM_MC_ARG(uint64_t, u64Src, 1);
5550 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5551 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5553
5554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5555 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5556 IEM_MC_ASSIGN(cShiftArg, cShift);
5557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5558 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5559 IEM_MC_FETCH_EFLAGS(EFlags);
5560 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5561 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5562
5563 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5564 IEM_MC_COMMIT_EFLAGS(EFlags);
5565 IEM_MC_ADVANCE_RIP();
5566 IEM_MC_END();
5567 return VINF_SUCCESS;
5568
5569 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5570 }
5571 }
5572}
5573
5574
5575/**
5576 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5577 */
5578FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5579{
5580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5581 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5582
5583 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5584 {
5585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5586
5587 switch (pVCpu->iem.s.enmEffOpSize)
5588 {
5589 case IEMMODE_16BIT:
5590 IEM_MC_BEGIN(4, 0);
5591 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5592 IEM_MC_ARG(uint16_t, u16Src, 1);
5593 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5594 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5595
5596 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5597 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5598 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5599 IEM_MC_REF_EFLAGS(pEFlags);
5600 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5601
5602 IEM_MC_ADVANCE_RIP();
5603 IEM_MC_END();
5604 return VINF_SUCCESS;
5605
5606 case IEMMODE_32BIT:
5607 IEM_MC_BEGIN(4, 0);
5608 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5609 IEM_MC_ARG(uint32_t, u32Src, 1);
5610 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5611 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5612
5613 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5614 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5615 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5616 IEM_MC_REF_EFLAGS(pEFlags);
5617 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5618
5619 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5620 IEM_MC_ADVANCE_RIP();
5621 IEM_MC_END();
5622 return VINF_SUCCESS;
5623
5624 case IEMMODE_64BIT:
5625 IEM_MC_BEGIN(4, 0);
5626 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5627 IEM_MC_ARG(uint64_t, u64Src, 1);
5628 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5629 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5630
5631 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5632 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5633 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5634 IEM_MC_REF_EFLAGS(pEFlags);
5635 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5636
5637 IEM_MC_ADVANCE_RIP();
5638 IEM_MC_END();
5639 return VINF_SUCCESS;
5640
5641 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5642 }
5643 }
5644 else
5645 {
5646 switch (pVCpu->iem.s.enmEffOpSize)
5647 {
5648 case IEMMODE_16BIT:
5649 IEM_MC_BEGIN(4, 2);
5650 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5651 IEM_MC_ARG(uint16_t, u16Src, 1);
5652 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5653 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5655
5656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5658 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5659 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5660 IEM_MC_FETCH_EFLAGS(EFlags);
5661 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5662 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5663
5664 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5665 IEM_MC_COMMIT_EFLAGS(EFlags);
5666 IEM_MC_ADVANCE_RIP();
5667 IEM_MC_END();
5668 return VINF_SUCCESS;
5669
5670 case IEMMODE_32BIT:
5671 IEM_MC_BEGIN(4, 2);
5672 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5673 IEM_MC_ARG(uint32_t, u32Src, 1);
5674 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5675 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5677
5678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5680 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5681 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5682 IEM_MC_FETCH_EFLAGS(EFlags);
5683 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5684 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5685
5686 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5687 IEM_MC_COMMIT_EFLAGS(EFlags);
5688 IEM_MC_ADVANCE_RIP();
5689 IEM_MC_END();
5690 return VINF_SUCCESS;
5691
5692 case IEMMODE_64BIT:
5693 IEM_MC_BEGIN(4, 2);
5694 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5695 IEM_MC_ARG(uint64_t, u64Src, 1);
5696 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5697 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5699
5700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5702 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5703 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5704 IEM_MC_FETCH_EFLAGS(EFlags);
5705 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5706 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5707
5708 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5709 IEM_MC_COMMIT_EFLAGS(EFlags);
5710 IEM_MC_ADVANCE_RIP();
5711 IEM_MC_END();
5712 return VINF_SUCCESS;
5713
5714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5715 }
5716 }
5717}
5718
5719
5720
5721/** Opcode 0x0f 0xa4. */
5722FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5723{
5724 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5725 IEMOP_HLP_MIN_386();
5726 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5727}
5728
5729
5730/** Opcode 0x0f 0xa5. */
5731FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5732{
5733 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5734 IEMOP_HLP_MIN_386();
5735 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5736}
5737
5738
5739/** Opcode 0x0f 0xa8. */
5740FNIEMOP_DEF(iemOp_push_gs)
5741{
5742 IEMOP_MNEMONIC(push_gs, "push gs");
5743 IEMOP_HLP_MIN_386();
5744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5745 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5746}
5747
5748
5749/** Opcode 0x0f 0xa9. */
5750FNIEMOP_DEF(iemOp_pop_gs)
5751{
5752 IEMOP_MNEMONIC(pop_gs, "pop gs");
5753 IEMOP_HLP_MIN_386();
5754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5755 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5756}
5757
5758
5759/** Opcode 0x0f 0xaa. */
5760FNIEMOP_DEF(iemOp_rsm)
5761{
5762 IEMOP_MNEMONIC(rsm, "rsm");
5763 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
5764 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
5765 * intercept). */
5766 IEMOP_BITCH_ABOUT_STUB();
5767 return IEMOP_RAISE_INVALID_OPCODE();
5768}
5769
5770//IEMOP_HLP_MIN_386();
5771
5772
5773/** Opcode 0x0f 0xab. */
5774FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5775{
5776 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5777 IEMOP_HLP_MIN_386();
5778 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5779}
5780
5781
5782/** Opcode 0x0f 0xac. */
5783FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5784{
5785 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5786 IEMOP_HLP_MIN_386();
5787 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5788}
5789
5790
5791/** Opcode 0x0f 0xad. */
5792FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5793{
5794 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5795 IEMOP_HLP_MIN_386();
5796 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5797}
5798
5799
5800/** Opcode 0x0f 0xae mem/0. */
5801FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5802{
5803 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5804 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5805 return IEMOP_RAISE_INVALID_OPCODE();
5806
5807 IEM_MC_BEGIN(3, 1);
5808 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5809 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5810 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5813 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
5814 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5815 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5816 IEM_MC_END();
5817 return VINF_SUCCESS;
5818}
5819
5820
5821/** Opcode 0x0f 0xae mem/1. */
5822FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5823{
5824 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5825 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5826 return IEMOP_RAISE_INVALID_OPCODE();
5827
5828 IEM_MC_BEGIN(3, 1);
5829 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5830 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5831 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5834 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5835 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5836 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5837 IEM_MC_END();
5838 return VINF_SUCCESS;
5839}
5840
5841
5842/**
5843 * @opmaps grp15
5844 * @opcode !11/2
5845 * @oppfx none
5846 * @opcpuid sse
5847 * @opgroup og_sse_mxcsrsm
5848 * @opxcpttype 5
5849 * @optest op1=0 -> mxcsr=0
5850 * @optest op1=0x2083 -> mxcsr=0x2083
5851 * @optest op1=0xfffffffe -> value.xcpt=0xd
5852 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
5853 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
5854 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
5855 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
5856 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
5857 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5858 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5859 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5860 */
5861FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
5862{
5863 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
5864 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
5865 return IEMOP_RAISE_INVALID_OPCODE();
5866
5867 IEM_MC_BEGIN(2, 0);
5868 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5869 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5872 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5873 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5874 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
5875 IEM_MC_END();
5876 return VINF_SUCCESS;
5877}
5878
5879
5880/**
5881 * @opmaps grp15
5882 * @opcode !11/3
5883 * @oppfx none
5884 * @opcpuid sse
5885 * @opgroup og_sse_mxcsrsm
5886 * @opxcpttype 5
5887 * @optest mxcsr=0 -> op1=0
5888 * @optest mxcsr=0x2083 -> op1=0x2083
5889 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
5890 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
5891 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
5892 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
5893 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
5894 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5895 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5896 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5897 */
5898FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
5899{
5900 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, MdWO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
5901 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
5902 return IEMOP_RAISE_INVALID_OPCODE();
5903
5904 IEM_MC_BEGIN(2, 0);
5905 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5906 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5909 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5910 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5911 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
5912 IEM_MC_END();
5913 return VINF_SUCCESS;
5914}
5915
5916
5917/**
5918 * @opmaps grp15
5919 * @opcode !11/4
5920 * @oppfx none
5921 * @opcpuid xsave
5922 * @opgroup og_system
5923 * @opxcpttype none
5924 */
5925FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
5926{
5927 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, MRW, DISOPTYPE_HARMLESS, 0);
5928 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
5929 return IEMOP_RAISE_INVALID_OPCODE();
5930
5931 IEM_MC_BEGIN(3, 0);
5932 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5933 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5934 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5937 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
5938 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5939 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
5940 IEM_MC_END();
5941 return VINF_SUCCESS;
5942}
5943
5944
5945/**
5946 * @opmaps grp15
5947 * @opcode !11/5
5948 * @oppfx none
5949 * @opcpuid xsave
5950 * @opgroup og_system
5951 * @opxcpttype none
5952 */
5953FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
5954{
5955 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, MRO, DISOPTYPE_HARMLESS, 0);
5956 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
5957 return IEMOP_RAISE_INVALID_OPCODE();
5958
5959 IEM_MC_BEGIN(3, 0);
5960 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5961 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5962 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5965 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
5966 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5967 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5968 IEM_MC_END();
5969 return VINF_SUCCESS;
5970}
5971
5972/** Opcode 0x0f 0xae mem/6. */
5973FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5974
5975/**
5976 * @opmaps grp15
5977 * @opcode !11/7
5978 * @oppfx none
5979 * @opcpuid clfsh
5980 * @opgroup og_cachectl
5981 * @optest op1=1 ->
5982 */
5983FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
5984{
5985 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
5986 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
5987 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
5988
5989 IEM_MC_BEGIN(2, 0);
5990 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5991 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5994 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5995 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
5996 IEM_MC_END();
5997 return VINF_SUCCESS;
5998}
5999
6000/**
6001 * @opmaps grp15
6002 * @opcode !11/7
6003 * @oppfx 0x66
6004 * @opcpuid clflushopt
6005 * @opgroup og_cachectl
6006 * @optest op1=1 ->
6007 */
6008FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6009{
6010 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6011 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6012 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6013
6014 IEM_MC_BEGIN(2, 0);
6015 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6016 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6019 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6020 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6021 IEM_MC_END();
6022 return VINF_SUCCESS;
6023}
6024
6025
6026/** Opcode 0x0f 0xae 11b/5. */
6027FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6028{
6029 RT_NOREF_PV(bRm);
6030 IEMOP_MNEMONIC(lfence, "lfence");
6031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6032 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6033 return IEMOP_RAISE_INVALID_OPCODE();
6034
6035 IEM_MC_BEGIN(0, 0);
6036 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6037 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6038 else
6039 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6040 IEM_MC_ADVANCE_RIP();
6041 IEM_MC_END();
6042 return VINF_SUCCESS;
6043}
6044
6045
6046/** Opcode 0x0f 0xae 11b/6. */
6047FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6048{
6049 RT_NOREF_PV(bRm);
6050 IEMOP_MNEMONIC(mfence, "mfence");
6051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6052 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6053 return IEMOP_RAISE_INVALID_OPCODE();
6054
6055 IEM_MC_BEGIN(0, 0);
6056 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6057 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6058 else
6059 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6060 IEM_MC_ADVANCE_RIP();
6061 IEM_MC_END();
6062 return VINF_SUCCESS;
6063}
6064
6065
6066/** Opcode 0x0f 0xae 11b/7. */
6067FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6068{
6069 RT_NOREF_PV(bRm);
6070 IEMOP_MNEMONIC(sfence, "sfence");
6071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6072 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6073 return IEMOP_RAISE_INVALID_OPCODE();
6074
6075 IEM_MC_BEGIN(0, 0);
6076 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6077 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6078 else
6079 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6080 IEM_MC_ADVANCE_RIP();
6081 IEM_MC_END();
6082 return VINF_SUCCESS;
6083}
6084
6085
6086/** Opcode 0xf3 0x0f 0xae 11b/0. */
6087FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6088
6089/** Opcode 0xf3 0x0f 0xae 11b/1. */
6090FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6091
6092/** Opcode 0xf3 0x0f 0xae 11b/2. */
6093FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6094
6095/** Opcode 0xf3 0x0f 0xae 11b/3. */
6096FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6097
6098
6099/**
6100 * Group 15 jump table for register variant.
6101 */
6102IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6103{ /* pfx: none, 066h, 0f3h, 0f2h */
6104 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6105 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6106 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6107 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6108 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6109 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6110 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6111 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6112};
6113AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6114
6115
6116/**
6117 * Group 15 jump table for memory variant.
6118 */
6119IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6120{ /* pfx: none, 066h, 0f3h, 0f2h */
6121 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6122 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6123 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6124 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6125 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6126 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6127 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6128 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6129};
6130AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6131
6132
6133/** Opcode 0x0f 0xae. */
6134FNIEMOP_DEF(iemOp_Grp15)
6135{
6136 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6138 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6139 /* register, register */
6140 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6141 + pVCpu->iem.s.idxPrefix], bRm);
6142 /* memory, register */
6143 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6144 + pVCpu->iem.s.idxPrefix], bRm);
6145}
6146
6147
6148/** Opcode 0x0f 0xaf. */
6149FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6150{
6151 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6152 IEMOP_HLP_MIN_386();
6153 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6154 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6155}
6156
6157
6158/** Opcode 0x0f 0xb0. */
6159FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6160{
6161 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6162 IEMOP_HLP_MIN_486();
6163 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6164
6165 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6166 {
6167 IEMOP_HLP_DONE_DECODING();
6168 IEM_MC_BEGIN(4, 0);
6169 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6170 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6171 IEM_MC_ARG(uint8_t, u8Src, 2);
6172 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6173
6174 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6175 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6176 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6177 IEM_MC_REF_EFLAGS(pEFlags);
6178 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6179 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6180 else
6181 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6182
6183 IEM_MC_ADVANCE_RIP();
6184 IEM_MC_END();
6185 }
6186 else
6187 {
6188 IEM_MC_BEGIN(4, 3);
6189 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6190 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6191 IEM_MC_ARG(uint8_t, u8Src, 2);
6192 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6194 IEM_MC_LOCAL(uint8_t, u8Al);
6195
6196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6197 IEMOP_HLP_DONE_DECODING();
6198 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6199 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6200 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6201 IEM_MC_FETCH_EFLAGS(EFlags);
6202 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6203 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6204 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6205 else
6206 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6207
6208 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6209 IEM_MC_COMMIT_EFLAGS(EFlags);
6210 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6211 IEM_MC_ADVANCE_RIP();
6212 IEM_MC_END();
6213 }
6214 return VINF_SUCCESS;
6215}
6216
6217/** Opcode 0x0f 0xb1. */
6218FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6219{
6220 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6221 IEMOP_HLP_MIN_486();
6222 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6223
6224 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6225 {
6226 IEMOP_HLP_DONE_DECODING();
6227 switch (pVCpu->iem.s.enmEffOpSize)
6228 {
6229 case IEMMODE_16BIT:
6230 IEM_MC_BEGIN(4, 0);
6231 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6232 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6233 IEM_MC_ARG(uint16_t, u16Src, 2);
6234 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6235
6236 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6237 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6238 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6239 IEM_MC_REF_EFLAGS(pEFlags);
6240 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6241 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6242 else
6243 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6244
6245 IEM_MC_ADVANCE_RIP();
6246 IEM_MC_END();
6247 return VINF_SUCCESS;
6248
6249 case IEMMODE_32BIT:
6250 IEM_MC_BEGIN(4, 0);
6251 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6252 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6253 IEM_MC_ARG(uint32_t, u32Src, 2);
6254 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6255
6256 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6257 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6258 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6259 IEM_MC_REF_EFLAGS(pEFlags);
6260 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6261 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6262 else
6263 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6264
6265 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6266 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6267 IEM_MC_ADVANCE_RIP();
6268 IEM_MC_END();
6269 return VINF_SUCCESS;
6270
6271 case IEMMODE_64BIT:
6272 IEM_MC_BEGIN(4, 0);
6273 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6274 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6275#ifdef RT_ARCH_X86
6276 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6277#else
6278 IEM_MC_ARG(uint64_t, u64Src, 2);
6279#endif
6280 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6281
6282 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6283 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6284 IEM_MC_REF_EFLAGS(pEFlags);
6285#ifdef RT_ARCH_X86
6286 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6287 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6288 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6289 else
6290 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6291#else
6292 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6293 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6294 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6295 else
6296 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6297#endif
6298
6299 IEM_MC_ADVANCE_RIP();
6300 IEM_MC_END();
6301 return VINF_SUCCESS;
6302
6303 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6304 }
6305 }
6306 else
6307 {
6308 switch (pVCpu->iem.s.enmEffOpSize)
6309 {
6310 case IEMMODE_16BIT:
6311 IEM_MC_BEGIN(4, 3);
6312 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6313 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6314 IEM_MC_ARG(uint16_t, u16Src, 2);
6315 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6317 IEM_MC_LOCAL(uint16_t, u16Ax);
6318
6319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6320 IEMOP_HLP_DONE_DECODING();
6321 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6322 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6323 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6324 IEM_MC_FETCH_EFLAGS(EFlags);
6325 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6326 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6327 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6328 else
6329 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6330
6331 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6332 IEM_MC_COMMIT_EFLAGS(EFlags);
6333 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6334 IEM_MC_ADVANCE_RIP();
6335 IEM_MC_END();
6336 return VINF_SUCCESS;
6337
6338 case IEMMODE_32BIT:
6339 IEM_MC_BEGIN(4, 3);
6340 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6341 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6342 IEM_MC_ARG(uint32_t, u32Src, 2);
6343 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6345 IEM_MC_LOCAL(uint32_t, u32Eax);
6346
6347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6348 IEMOP_HLP_DONE_DECODING();
6349 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6350 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6351 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6352 IEM_MC_FETCH_EFLAGS(EFlags);
6353 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6354 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6355 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6356 else
6357 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6358
6359 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6360 IEM_MC_COMMIT_EFLAGS(EFlags);
6361 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6362 IEM_MC_ADVANCE_RIP();
6363 IEM_MC_END();
6364 return VINF_SUCCESS;
6365
6366 case IEMMODE_64BIT:
6367 IEM_MC_BEGIN(4, 3);
6368 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6369 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6370#ifdef RT_ARCH_X86
6371 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6372#else
6373 IEM_MC_ARG(uint64_t, u64Src, 2);
6374#endif
6375 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6377 IEM_MC_LOCAL(uint64_t, u64Rax);
6378
6379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6380 IEMOP_HLP_DONE_DECODING();
6381 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6382 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6383 IEM_MC_FETCH_EFLAGS(EFlags);
6384 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6385#ifdef RT_ARCH_X86
6386 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6387 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6388 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6389 else
6390 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6391#else
6392 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6393 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6394 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6395 else
6396 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6397#endif
6398
6399 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6400 IEM_MC_COMMIT_EFLAGS(EFlags);
6401 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6402 IEM_MC_ADVANCE_RIP();
6403 IEM_MC_END();
6404 return VINF_SUCCESS;
6405
6406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6407 }
6408 }
6409}
6410
6411
6412FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6413{
6414 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6415 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6416
6417 switch (pVCpu->iem.s.enmEffOpSize)
6418 {
6419 case IEMMODE_16BIT:
6420 IEM_MC_BEGIN(5, 1);
6421 IEM_MC_ARG(uint16_t, uSel, 0);
6422 IEM_MC_ARG(uint16_t, offSeg, 1);
6423 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6424 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6425 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6426 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6429 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6430 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6431 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6432 IEM_MC_END();
6433 return VINF_SUCCESS;
6434
6435 case IEMMODE_32BIT:
6436 IEM_MC_BEGIN(5, 1);
6437 IEM_MC_ARG(uint16_t, uSel, 0);
6438 IEM_MC_ARG(uint32_t, offSeg, 1);
6439 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6440 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6441 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6442 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6445 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6446 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6447 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6448 IEM_MC_END();
6449 return VINF_SUCCESS;
6450
6451 case IEMMODE_64BIT:
6452 IEM_MC_BEGIN(5, 1);
6453 IEM_MC_ARG(uint16_t, uSel, 0);
6454 IEM_MC_ARG(uint64_t, offSeg, 1);
6455 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6456 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6457 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6458 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6461 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6462 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6463 else
6464 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6465 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6466 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6467 IEM_MC_END();
6468 return VINF_SUCCESS;
6469
6470 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6471 }
6472}
6473
6474
6475/** Opcode 0x0f 0xb2. */
6476FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6477{
6478 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6479 IEMOP_HLP_MIN_386();
6480 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6481 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6482 return IEMOP_RAISE_INVALID_OPCODE();
6483 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6484}
6485
6486
6487/** Opcode 0x0f 0xb3. */
6488FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6489{
6490 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6491 IEMOP_HLP_MIN_386();
6492 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6493}
6494
6495
6496/** Opcode 0x0f 0xb4. */
6497FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6498{
6499 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6500 IEMOP_HLP_MIN_386();
6501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6502 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6503 return IEMOP_RAISE_INVALID_OPCODE();
6504 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6505}
6506
6507
6508/** Opcode 0x0f 0xb5. */
6509FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6510{
6511 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6512 IEMOP_HLP_MIN_386();
6513 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6514 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6515 return IEMOP_RAISE_INVALID_OPCODE();
6516 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6517}
6518
6519
6520/** Opcode 0x0f 0xb6. */
6521FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6522{
6523 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6524 IEMOP_HLP_MIN_386();
6525
6526 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6527
6528 /*
6529 * If rm is denoting a register, no more instruction bytes.
6530 */
6531 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6532 {
6533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6534 switch (pVCpu->iem.s.enmEffOpSize)
6535 {
6536 case IEMMODE_16BIT:
6537 IEM_MC_BEGIN(0, 1);
6538 IEM_MC_LOCAL(uint16_t, u16Value);
6539 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6540 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6541 IEM_MC_ADVANCE_RIP();
6542 IEM_MC_END();
6543 return VINF_SUCCESS;
6544
6545 case IEMMODE_32BIT:
6546 IEM_MC_BEGIN(0, 1);
6547 IEM_MC_LOCAL(uint32_t, u32Value);
6548 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6549 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6550 IEM_MC_ADVANCE_RIP();
6551 IEM_MC_END();
6552 return VINF_SUCCESS;
6553
6554 case IEMMODE_64BIT:
6555 IEM_MC_BEGIN(0, 1);
6556 IEM_MC_LOCAL(uint64_t, u64Value);
6557 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6558 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6559 IEM_MC_ADVANCE_RIP();
6560 IEM_MC_END();
6561 return VINF_SUCCESS;
6562
6563 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6564 }
6565 }
6566 else
6567 {
6568 /*
6569 * We're loading a register from memory.
6570 */
6571 switch (pVCpu->iem.s.enmEffOpSize)
6572 {
6573 case IEMMODE_16BIT:
6574 IEM_MC_BEGIN(0, 2);
6575 IEM_MC_LOCAL(uint16_t, u16Value);
6576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6579 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6580 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6581 IEM_MC_ADVANCE_RIP();
6582 IEM_MC_END();
6583 return VINF_SUCCESS;
6584
6585 case IEMMODE_32BIT:
6586 IEM_MC_BEGIN(0, 2);
6587 IEM_MC_LOCAL(uint32_t, u32Value);
6588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6591 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6592 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6593 IEM_MC_ADVANCE_RIP();
6594 IEM_MC_END();
6595 return VINF_SUCCESS;
6596
6597 case IEMMODE_64BIT:
6598 IEM_MC_BEGIN(0, 2);
6599 IEM_MC_LOCAL(uint64_t, u64Value);
6600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6603 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6604 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6605 IEM_MC_ADVANCE_RIP();
6606 IEM_MC_END();
6607 return VINF_SUCCESS;
6608
6609 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6610 }
6611 }
6612}
6613
6614
6615/** Opcode 0x0f 0xb7. */
6616FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6617{
6618 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6619 IEMOP_HLP_MIN_386();
6620
6621 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6622
6623 /** @todo Not entirely sure how the operand size prefix is handled here,
6624 * assuming that it will be ignored. Would be nice to have a few
6625 * test for this. */
6626 /*
6627 * If rm is denoting a register, no more instruction bytes.
6628 */
6629 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6630 {
6631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6632 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6633 {
6634 IEM_MC_BEGIN(0, 1);
6635 IEM_MC_LOCAL(uint32_t, u32Value);
6636 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6637 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6638 IEM_MC_ADVANCE_RIP();
6639 IEM_MC_END();
6640 }
6641 else
6642 {
6643 IEM_MC_BEGIN(0, 1);
6644 IEM_MC_LOCAL(uint64_t, u64Value);
6645 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6646 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6647 IEM_MC_ADVANCE_RIP();
6648 IEM_MC_END();
6649 }
6650 }
6651 else
6652 {
6653 /*
6654 * We're loading a register from memory.
6655 */
6656 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6657 {
6658 IEM_MC_BEGIN(0, 2);
6659 IEM_MC_LOCAL(uint32_t, u32Value);
6660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6663 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6664 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6665 IEM_MC_ADVANCE_RIP();
6666 IEM_MC_END();
6667 }
6668 else
6669 {
6670 IEM_MC_BEGIN(0, 2);
6671 IEM_MC_LOCAL(uint64_t, u64Value);
6672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6675 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6676 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6677 IEM_MC_ADVANCE_RIP();
6678 IEM_MC_END();
6679 }
6680 }
6681 return VINF_SUCCESS;
6682}
6683
6684
6685/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6686FNIEMOP_UD_STUB(iemOp_jmpe);
6687/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6688FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6689
6690
6691/**
6692 * @opcode 0xb9
6693 * @opinvalid intel-modrm
6694 * @optest ->
6695 */
6696FNIEMOP_DEF(iemOp_Grp10)
6697{
6698 /*
6699 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
6700 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
6701 */
6702 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
6703 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZE); /* just picked Gb,Eb here. */
6704 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
6705}
6706
6707
6708/** Opcode 0x0f 0xba. */
6709FNIEMOP_DEF(iemOp_Grp8)
6710{
6711 IEMOP_HLP_MIN_386();
6712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6713 PCIEMOPBINSIZES pImpl;
6714 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6715 {
6716 case 0: case 1: case 2: case 3:
6717 /* Both AMD and Intel want full modr/m decoding and imm8. */
6718 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
6719 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6720 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6721 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6722 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6723 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6724 }
6725 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6726
6727 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6728 {
6729 /* register destination. */
6730 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6732
6733 switch (pVCpu->iem.s.enmEffOpSize)
6734 {
6735 case IEMMODE_16BIT:
6736 IEM_MC_BEGIN(3, 0);
6737 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6738 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6739 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6740
6741 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6742 IEM_MC_REF_EFLAGS(pEFlags);
6743 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6744
6745 IEM_MC_ADVANCE_RIP();
6746 IEM_MC_END();
6747 return VINF_SUCCESS;
6748
6749 case IEMMODE_32BIT:
6750 IEM_MC_BEGIN(3, 0);
6751 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6752 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6753 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6754
6755 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6756 IEM_MC_REF_EFLAGS(pEFlags);
6757 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6758
6759 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6760 IEM_MC_ADVANCE_RIP();
6761 IEM_MC_END();
6762 return VINF_SUCCESS;
6763
6764 case IEMMODE_64BIT:
6765 IEM_MC_BEGIN(3, 0);
6766 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6767 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6768 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6769
6770 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6771 IEM_MC_REF_EFLAGS(pEFlags);
6772 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6773
6774 IEM_MC_ADVANCE_RIP();
6775 IEM_MC_END();
6776 return VINF_SUCCESS;
6777
6778 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6779 }
6780 }
6781 else
6782 {
6783 /* memory destination. */
6784
6785 uint32_t fAccess;
6786 if (pImpl->pfnLockedU16)
6787 fAccess = IEM_ACCESS_DATA_RW;
6788 else /* BT */
6789 fAccess = IEM_ACCESS_DATA_R;
6790
6791 /** @todo test negative bit offsets! */
6792 switch (pVCpu->iem.s.enmEffOpSize)
6793 {
6794 case IEMMODE_16BIT:
6795 IEM_MC_BEGIN(3, 1);
6796 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6797 IEM_MC_ARG(uint16_t, u16Src, 1);
6798 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6800
6801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6802 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6803 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6804 if (pImpl->pfnLockedU16)
6805 IEMOP_HLP_DONE_DECODING();
6806 else
6807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6808 IEM_MC_FETCH_EFLAGS(EFlags);
6809 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6810 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6811 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6812 else
6813 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6814 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6815
6816 IEM_MC_COMMIT_EFLAGS(EFlags);
6817 IEM_MC_ADVANCE_RIP();
6818 IEM_MC_END();
6819 return VINF_SUCCESS;
6820
6821 case IEMMODE_32BIT:
6822 IEM_MC_BEGIN(3, 1);
6823 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6824 IEM_MC_ARG(uint32_t, u32Src, 1);
6825 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6827
6828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6829 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6830 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6831 if (pImpl->pfnLockedU16)
6832 IEMOP_HLP_DONE_DECODING();
6833 else
6834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6835 IEM_MC_FETCH_EFLAGS(EFlags);
6836 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6837 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6838 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6839 else
6840 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6841 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6842
6843 IEM_MC_COMMIT_EFLAGS(EFlags);
6844 IEM_MC_ADVANCE_RIP();
6845 IEM_MC_END();
6846 return VINF_SUCCESS;
6847
6848 case IEMMODE_64BIT:
6849 IEM_MC_BEGIN(3, 1);
6850 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6851 IEM_MC_ARG(uint64_t, u64Src, 1);
6852 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6854
6855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6856 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6857 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6858 if (pImpl->pfnLockedU16)
6859 IEMOP_HLP_DONE_DECODING();
6860 else
6861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6862 IEM_MC_FETCH_EFLAGS(EFlags);
6863 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6864 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6865 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6866 else
6867 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6868 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6869
6870 IEM_MC_COMMIT_EFLAGS(EFlags);
6871 IEM_MC_ADVANCE_RIP();
6872 IEM_MC_END();
6873 return VINF_SUCCESS;
6874
6875 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6876 }
6877 }
6878}
6879
6880
6881/** Opcode 0x0f 0xbb. */
6882FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6883{
6884 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6885 IEMOP_HLP_MIN_386();
6886 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6887}
6888
6889
6890/** Opcode 0x0f 0xbc. */
6891FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6892{
6893 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6894 IEMOP_HLP_MIN_386();
6895 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6896 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6897}
6898
6899
6900/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6901FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6902
6903
6904/** Opcode 0x0f 0xbd. */
6905FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6906{
6907 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6908 IEMOP_HLP_MIN_386();
6909 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6910 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6911}
6912
6913
6914/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6915FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6916
6917
6918/** Opcode 0x0f 0xbe. */
6919FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6920{
6921 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6922 IEMOP_HLP_MIN_386();
6923
6924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6925
6926 /*
6927 * If rm is denoting a register, no more instruction bytes.
6928 */
6929 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6930 {
6931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6932 switch (pVCpu->iem.s.enmEffOpSize)
6933 {
6934 case IEMMODE_16BIT:
6935 IEM_MC_BEGIN(0, 1);
6936 IEM_MC_LOCAL(uint16_t, u16Value);
6937 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6938 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6939 IEM_MC_ADVANCE_RIP();
6940 IEM_MC_END();
6941 return VINF_SUCCESS;
6942
6943 case IEMMODE_32BIT:
6944 IEM_MC_BEGIN(0, 1);
6945 IEM_MC_LOCAL(uint32_t, u32Value);
6946 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6947 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6948 IEM_MC_ADVANCE_RIP();
6949 IEM_MC_END();
6950 return VINF_SUCCESS;
6951
6952 case IEMMODE_64BIT:
6953 IEM_MC_BEGIN(0, 1);
6954 IEM_MC_LOCAL(uint64_t, u64Value);
6955 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6956 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6957 IEM_MC_ADVANCE_RIP();
6958 IEM_MC_END();
6959 return VINF_SUCCESS;
6960
6961 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6962 }
6963 }
6964 else
6965 {
6966 /*
6967 * We're loading a register from memory.
6968 */
6969 switch (pVCpu->iem.s.enmEffOpSize)
6970 {
6971 case IEMMODE_16BIT:
6972 IEM_MC_BEGIN(0, 2);
6973 IEM_MC_LOCAL(uint16_t, u16Value);
6974 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6977 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6978 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6979 IEM_MC_ADVANCE_RIP();
6980 IEM_MC_END();
6981 return VINF_SUCCESS;
6982
6983 case IEMMODE_32BIT:
6984 IEM_MC_BEGIN(0, 2);
6985 IEM_MC_LOCAL(uint32_t, u32Value);
6986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6989 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6990 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6991 IEM_MC_ADVANCE_RIP();
6992 IEM_MC_END();
6993 return VINF_SUCCESS;
6994
6995 case IEMMODE_64BIT:
6996 IEM_MC_BEGIN(0, 2);
6997 IEM_MC_LOCAL(uint64_t, u64Value);
6998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7001 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7002 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7003 IEM_MC_ADVANCE_RIP();
7004 IEM_MC_END();
7005 return VINF_SUCCESS;
7006
7007 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7008 }
7009 }
7010}
7011
7012
7013/** Opcode 0x0f 0xbf. */
7014FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7015{
7016 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7017 IEMOP_HLP_MIN_386();
7018
7019 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7020
7021 /** @todo Not entirely sure how the operand size prefix is handled here,
7022 * assuming that it will be ignored. Would be nice to have a few
7023 * test for this. */
7024 /*
7025 * If rm is denoting a register, no more instruction bytes.
7026 */
7027 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7028 {
7029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7030 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7031 {
7032 IEM_MC_BEGIN(0, 1);
7033 IEM_MC_LOCAL(uint32_t, u32Value);
7034 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7035 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7036 IEM_MC_ADVANCE_RIP();
7037 IEM_MC_END();
7038 }
7039 else
7040 {
7041 IEM_MC_BEGIN(0, 1);
7042 IEM_MC_LOCAL(uint64_t, u64Value);
7043 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7044 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7045 IEM_MC_ADVANCE_RIP();
7046 IEM_MC_END();
7047 }
7048 }
7049 else
7050 {
7051 /*
7052 * We're loading a register from memory.
7053 */
7054 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7055 {
7056 IEM_MC_BEGIN(0, 2);
7057 IEM_MC_LOCAL(uint32_t, u32Value);
7058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7061 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7062 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7063 IEM_MC_ADVANCE_RIP();
7064 IEM_MC_END();
7065 }
7066 else
7067 {
7068 IEM_MC_BEGIN(0, 2);
7069 IEM_MC_LOCAL(uint64_t, u64Value);
7070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7071 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7073 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7074 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7075 IEM_MC_ADVANCE_RIP();
7076 IEM_MC_END();
7077 }
7078 }
7079 return VINF_SUCCESS;
7080}
7081
7082
7083/** Opcode 0x0f 0xc0. */
7084FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7085{
7086 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7087 IEMOP_HLP_MIN_486();
7088 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7089
7090 /*
7091 * If rm is denoting a register, no more instruction bytes.
7092 */
7093 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7094 {
7095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7096
7097 IEM_MC_BEGIN(3, 0);
7098 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7099 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7100 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7101
7102 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7103 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7104 IEM_MC_REF_EFLAGS(pEFlags);
7105 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7106
7107 IEM_MC_ADVANCE_RIP();
7108 IEM_MC_END();
7109 }
7110 else
7111 {
7112 /*
7113 * We're accessing memory.
7114 */
7115 IEM_MC_BEGIN(3, 3);
7116 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7117 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7118 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7119 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7121
7122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7123 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7124 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7125 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7126 IEM_MC_FETCH_EFLAGS(EFlags);
7127 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7128 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7129 else
7130 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7131
7132 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7133 IEM_MC_COMMIT_EFLAGS(EFlags);
7134 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7135 IEM_MC_ADVANCE_RIP();
7136 IEM_MC_END();
7137 return VINF_SUCCESS;
7138 }
7139 return VINF_SUCCESS;
7140}
7141
7142
7143/** Opcode 0x0f 0xc1. */
7144FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7145{
7146 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7147 IEMOP_HLP_MIN_486();
7148 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7149
7150 /*
7151 * If rm is denoting a register, no more instruction bytes.
7152 */
7153 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7154 {
7155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7156
7157 switch (pVCpu->iem.s.enmEffOpSize)
7158 {
7159 case IEMMODE_16BIT:
7160 IEM_MC_BEGIN(3, 0);
7161 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7162 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7163 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7164
7165 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7166 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7167 IEM_MC_REF_EFLAGS(pEFlags);
7168 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7169
7170 IEM_MC_ADVANCE_RIP();
7171 IEM_MC_END();
7172 return VINF_SUCCESS;
7173
7174 case IEMMODE_32BIT:
7175 IEM_MC_BEGIN(3, 0);
7176 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7177 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7178 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7179
7180 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7181 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7182 IEM_MC_REF_EFLAGS(pEFlags);
7183 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7184
7185 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7186 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7187 IEM_MC_ADVANCE_RIP();
7188 IEM_MC_END();
7189 return VINF_SUCCESS;
7190
7191 case IEMMODE_64BIT:
7192 IEM_MC_BEGIN(3, 0);
7193 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7194 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7195 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7196
7197 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7198 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7199 IEM_MC_REF_EFLAGS(pEFlags);
7200 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7201
7202 IEM_MC_ADVANCE_RIP();
7203 IEM_MC_END();
7204 return VINF_SUCCESS;
7205
7206 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7207 }
7208 }
7209 else
7210 {
7211 /*
7212 * We're accessing memory.
7213 */
7214 switch (pVCpu->iem.s.enmEffOpSize)
7215 {
7216 case IEMMODE_16BIT:
7217 IEM_MC_BEGIN(3, 3);
7218 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7219 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7220 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7221 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7223
7224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7225 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7226 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7227 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7228 IEM_MC_FETCH_EFLAGS(EFlags);
7229 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7230 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7231 else
7232 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7233
7234 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7235 IEM_MC_COMMIT_EFLAGS(EFlags);
7236 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7237 IEM_MC_ADVANCE_RIP();
7238 IEM_MC_END();
7239 return VINF_SUCCESS;
7240
7241 case IEMMODE_32BIT:
7242 IEM_MC_BEGIN(3, 3);
7243 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7244 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7245 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7246 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7248
7249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7250 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7251 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7252 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7253 IEM_MC_FETCH_EFLAGS(EFlags);
7254 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7255 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7256 else
7257 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7258
7259 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7260 IEM_MC_COMMIT_EFLAGS(EFlags);
7261 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7262 IEM_MC_ADVANCE_RIP();
7263 IEM_MC_END();
7264 return VINF_SUCCESS;
7265
7266 case IEMMODE_64BIT:
7267 IEM_MC_BEGIN(3, 3);
7268 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7269 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7270 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7271 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7273
7274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7275 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7276 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7277 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7278 IEM_MC_FETCH_EFLAGS(EFlags);
7279 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7280 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7281 else
7282 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7283
7284 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7285 IEM_MC_COMMIT_EFLAGS(EFlags);
7286 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7287 IEM_MC_ADVANCE_RIP();
7288 IEM_MC_END();
7289 return VINF_SUCCESS;
7290
7291 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7292 }
7293 }
7294}
7295
7296
7297/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
7298FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
7299/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
7300FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
7301/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
7302FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
7303/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
7304FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
7305
7306
7307/** Opcode 0x0f 0xc3. */
7308FNIEMOP_DEF(iemOp_movnti_My_Gy)
7309{
7310 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7311
7312 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7313
7314 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7315 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7316 {
7317 switch (pVCpu->iem.s.enmEffOpSize)
7318 {
7319 case IEMMODE_32BIT:
7320 IEM_MC_BEGIN(0, 2);
7321 IEM_MC_LOCAL(uint32_t, u32Value);
7322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7323
7324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7326 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7327 return IEMOP_RAISE_INVALID_OPCODE();
7328
7329 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7330 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7331 IEM_MC_ADVANCE_RIP();
7332 IEM_MC_END();
7333 break;
7334
7335 case IEMMODE_64BIT:
7336 IEM_MC_BEGIN(0, 2);
7337 IEM_MC_LOCAL(uint64_t, u64Value);
7338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7339
7340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7342 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7343 return IEMOP_RAISE_INVALID_OPCODE();
7344
7345 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7346 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7347 IEM_MC_ADVANCE_RIP();
7348 IEM_MC_END();
7349 break;
7350
7351 case IEMMODE_16BIT:
7352 /** @todo check this form. */
7353 return IEMOP_RAISE_INVALID_OPCODE();
7354 }
7355 }
7356 else
7357 return IEMOP_RAISE_INVALID_OPCODE();
7358 return VINF_SUCCESS;
7359}
7360/* Opcode 0x66 0x0f 0xc3 - invalid */
7361/* Opcode 0xf3 0x0f 0xc3 - invalid */
7362/* Opcode 0xf2 0x0f 0xc3 - invalid */
7363
7364/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
7365FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7366/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
7367FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
7368/* Opcode 0xf3 0x0f 0xc4 - invalid */
7369/* Opcode 0xf2 0x0f 0xc4 - invalid */
7370
7371/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7372FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7373/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
7374FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
7375/* Opcode 0xf3 0x0f 0xc5 - invalid */
7376/* Opcode 0xf2 0x0f 0xc5 - invalid */
7377
7378/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
7379FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
7380/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
7381FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
7382/* Opcode 0xf3 0x0f 0xc6 - invalid */
7383/* Opcode 0xf2 0x0f 0xc6 - invalid */
7384
7385
7386/** Opcode 0x0f 0xc7 !11/1. */
7387FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7388{
7389 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7390
7391 IEM_MC_BEGIN(4, 3);
7392 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7393 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7394 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7395 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7396 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7397 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7399
7400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7401 IEMOP_HLP_DONE_DECODING();
7402 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7403
7404 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7405 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7406 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7407
7408 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7409 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7410 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7411
7412 IEM_MC_FETCH_EFLAGS(EFlags);
7413 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7414 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7415 else
7416 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7417
7418 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7419 IEM_MC_COMMIT_EFLAGS(EFlags);
7420 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7421 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7422 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7423 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7424 IEM_MC_ENDIF();
7425 IEM_MC_ADVANCE_RIP();
7426
7427 IEM_MC_END();
7428 return VINF_SUCCESS;
7429}
7430
7431
7432/** Opcode REX.W 0x0f 0xc7 !11/1. */
7433FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7434{
7435 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7436 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7437 {
7438#if 0
7439 RT_NOREF(bRm);
7440 IEMOP_BITCH_ABOUT_STUB();
7441 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7442#else
7443 IEM_MC_BEGIN(4, 3);
7444 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7445 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7446 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7447 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7448 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7449 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7451
7452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7453 IEMOP_HLP_DONE_DECODING();
7454 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7455 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7456
7457 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7458 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7459 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7460
7461 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7462 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7463 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7464
7465 IEM_MC_FETCH_EFLAGS(EFlags);
7466# ifdef RT_ARCH_AMD64
7467 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7468 {
7469 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7470 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7471 else
7472 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7473 }
7474 else
7475# endif
7476 {
7477 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7478 accesses and not all all atomic, which works fine on in UNI CPU guest
7479 configuration (ignoring DMA). If guest SMP is active we have no choice
7480 but to use a rendezvous callback here. Sigh. */
7481 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7482 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7483 else
7484 {
7485 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7486 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7487 }
7488 }
7489
7490 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7491 IEM_MC_COMMIT_EFLAGS(EFlags);
7492 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7493 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7494 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7495 IEM_MC_ENDIF();
7496 IEM_MC_ADVANCE_RIP();
7497
7498 IEM_MC_END();
7499 return VINF_SUCCESS;
7500#endif
7501 }
7502 Log(("cmpxchg16b -> #UD\n"));
7503 return IEMOP_RAISE_INVALID_OPCODE();
7504}
7505
7506FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
7507{
7508 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7509 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7510 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7511}
7512
7513/** Opcode 0x0f 0xc7 11/6. */
7514FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7515
7516/** Opcode 0x0f 0xc7 !11/6. */
7517FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7518
7519/** Opcode 0x66 0x0f 0xc7 !11/6. */
7520FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7521
7522/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7523FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7524
7525/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7526FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7527
7528/** Opcode 0x0f 0xc7 11/7. */
7529FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
7530
7531
7532/**
7533 * Group 9 jump table for register variant.
7534 */
7535IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
7536{ /* pfx: none, 066h, 0f3h, 0f2h */
7537 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7538 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
7539 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7540 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7541 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7542 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7543 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7544 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7545};
7546AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
7547
7548
7549/**
7550 * Group 9 jump table for memory variant.
7551 */
7552IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
7553{ /* pfx: none, 066h, 0f3h, 0f2h */
7554 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7555 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
7556 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7557 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7558 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7559 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7560 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
7561 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7562};
7563AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
7564
7565
7566/** Opcode 0x0f 0xc7. */
7567FNIEMOP_DEF(iemOp_Grp9)
7568{
7569 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7570 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7571 /* register, register */
7572 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7573 + pVCpu->iem.s.idxPrefix], bRm);
7574 /* memory, register */
7575 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7576 + pVCpu->iem.s.idxPrefix], bRm);
7577}
7578
7579
7580/**
7581 * Common 'bswap register' helper.
7582 */
7583FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7584{
7585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7586 switch (pVCpu->iem.s.enmEffOpSize)
7587 {
7588 case IEMMODE_16BIT:
7589 IEM_MC_BEGIN(1, 0);
7590 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7591 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7592 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7593 IEM_MC_ADVANCE_RIP();
7594 IEM_MC_END();
7595 return VINF_SUCCESS;
7596
7597 case IEMMODE_32BIT:
7598 IEM_MC_BEGIN(1, 0);
7599 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7600 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7601 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7602 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7603 IEM_MC_ADVANCE_RIP();
7604 IEM_MC_END();
7605 return VINF_SUCCESS;
7606
7607 case IEMMODE_64BIT:
7608 IEM_MC_BEGIN(1, 0);
7609 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7610 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7611 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7612 IEM_MC_ADVANCE_RIP();
7613 IEM_MC_END();
7614 return VINF_SUCCESS;
7615
7616 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7617 }
7618}
7619
7620
7621/** Opcode 0x0f 0xc8. */
7622FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7623{
7624 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7625 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7626 prefix. REX.B is the correct prefix it appears. For a parallel
7627 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7628 IEMOP_HLP_MIN_486();
7629 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7630}
7631
7632
7633/** Opcode 0x0f 0xc9. */
7634FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7635{
7636 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7637 IEMOP_HLP_MIN_486();
7638 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7639}
7640
7641
7642/** Opcode 0x0f 0xca. */
7643FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7644{
7645 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7646 IEMOP_HLP_MIN_486();
7647 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7648}
7649
7650
7651/** Opcode 0x0f 0xcb. */
7652FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7653{
7654 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7655 IEMOP_HLP_MIN_486();
7656 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7657}
7658
7659
7660/** Opcode 0x0f 0xcc. */
7661FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7662{
7663 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7664 IEMOP_HLP_MIN_486();
7665 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7666}
7667
7668
7669/** Opcode 0x0f 0xcd. */
7670FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7671{
7672 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7673 IEMOP_HLP_MIN_486();
7674 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7675}
7676
7677
7678/** Opcode 0x0f 0xce. */
7679FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7680{
7681 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7682 IEMOP_HLP_MIN_486();
7683 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7684}
7685
7686
7687/** Opcode 0x0f 0xcf. */
7688FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7689{
7690 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7691 IEMOP_HLP_MIN_486();
7692 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7693}
7694
7695
7696/* Opcode 0x0f 0xd0 - invalid */
7697/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
7698FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
7699/* Opcode 0xf3 0x0f 0xd0 - invalid */
7700/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
7701FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
7702
7703/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7704FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7705/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
7706FNIEMOP_STUB(iemOp_psrlw_Vx_W);
7707/* Opcode 0xf3 0x0f 0xd1 - invalid */
7708/* Opcode 0xf2 0x0f 0xd1 - invalid */
7709
7710/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7711FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7712/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
7713FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
7714/* Opcode 0xf3 0x0f 0xd2 - invalid */
7715/* Opcode 0xf2 0x0f 0xd2 - invalid */
7716
7717/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7718FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7719/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
7720FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
7721/* Opcode 0xf3 0x0f 0xd3 - invalid */
7722/* Opcode 0xf2 0x0f 0xd3 - invalid */
7723
7724/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7725FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7726/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
7727FNIEMOP_STUB(iemOp_paddq_Vx_W);
7728/* Opcode 0xf3 0x0f 0xd4 - invalid */
7729/* Opcode 0xf2 0x0f 0xd4 - invalid */
7730
7731/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7732FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7733/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
7734FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
7735/* Opcode 0xf3 0x0f 0xd5 - invalid */
7736/* Opcode 0xf2 0x0f 0xd5 - invalid */
7737
7738/* Opcode 0x0f 0xd6 - invalid */
7739
7740/**
7741 * @opcode 0xd6
7742 * @oppfx 0x66
7743 * @opcpuid sse2
7744 * @opgroup og_sse2_pcksclr_datamove
7745 * @opxcpttype none
7746 * @optest op1=-1 op2=2 -> op1=2
7747 * @optest op1=0 op2=-42 -> op1=-42
7748 */
7749FNIEMOP_DEF(iemOp_movq_Wq_Vq)
7750{
7751 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
7752 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7753 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7754 {
7755 /*
7756 * Register, register.
7757 */
7758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7759 IEM_MC_BEGIN(0, 2);
7760 IEM_MC_LOCAL(uint64_t, uSrc);
7761
7762 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7763 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7764
7765 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7766 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
7767
7768 IEM_MC_ADVANCE_RIP();
7769 IEM_MC_END();
7770 }
7771 else
7772 {
7773 /*
7774 * Memory, register.
7775 */
7776 IEM_MC_BEGIN(0, 2);
7777 IEM_MC_LOCAL(uint64_t, uSrc);
7778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7779
7780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7782 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7783 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7784
7785 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7786 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7787
7788 IEM_MC_ADVANCE_RIP();
7789 IEM_MC_END();
7790 }
7791 return VINF_SUCCESS;
7792}
7793
7794
7795/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7796FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7797/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7798FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7799#if 0
7800FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7801{
7802 /* Docs says register only. */
7803 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7804
7805 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7806 {
7807 case IEM_OP_PRF_SIZE_OP: /* SSE */
7808 I E M O P _ M N E M O N I C(movq_Wq_Vq, "movq Wq,Vq");
7809 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7810 IEM_MC_BEGIN(2, 0);
7811 IEM_MC_ARG(uint64_t *, pDst, 0);
7812 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
7813 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7814 IEM_MC_PREPARE_SSE_USAGE();
7815 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7816 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7817 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7818 IEM_MC_ADVANCE_RIP();
7819 IEM_MC_END();
7820 return VINF_SUCCESS;
7821
7822 case 0: /* MMX */
7823 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7824 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7825 IEM_MC_BEGIN(2, 0);
7826 IEM_MC_ARG(uint64_t *, pDst, 0);
7827 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7828 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7829 IEM_MC_PREPARE_FPU_USAGE();
7830 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7831 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7832 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7833 IEM_MC_ADVANCE_RIP();
7834 IEM_MC_END();
7835 return VINF_SUCCESS;
7836
7837 default:
7838 return IEMOP_RAISE_INVALID_OPCODE();
7839 }
7840}
7841#endif
7842
7843
7844/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7845FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7846{
7847 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7848 /** @todo testcase: Check that the instruction implicitly clears the high
7849 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7850 * and opcode modifications are made to work with the whole width (not
7851 * just 128). */
7852 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
7853 /* Docs says register only. */
7854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7855 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7856 {
7857 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7858 IEM_MC_BEGIN(2, 0);
7859 IEM_MC_ARG(uint64_t *, pDst, 0);
7860 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7861 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7862 IEM_MC_PREPARE_FPU_USAGE();
7863 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7864 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7865 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7866 IEM_MC_ADVANCE_RIP();
7867 IEM_MC_END();
7868 return VINF_SUCCESS;
7869 }
7870 return IEMOP_RAISE_INVALID_OPCODE();
7871}
7872
7873/** Opcode 0x66 0x0f 0xd7 - */
7874FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
7875{
7876 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7877 /** @todo testcase: Check that the instruction implicitly clears the high
7878 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7879 * and opcode modifications are made to work with the whole width (not
7880 * just 128). */
7881 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
7882 /* Docs says register only. */
7883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7884 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7885 {
7886 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7887 IEM_MC_BEGIN(2, 0);
7888 IEM_MC_ARG(uint64_t *, pDst, 0);
7889 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
7890 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7891 IEM_MC_PREPARE_SSE_USAGE();
7892 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7893 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7894 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7895 IEM_MC_ADVANCE_RIP();
7896 IEM_MC_END();
7897 return VINF_SUCCESS;
7898 }
7899 return IEMOP_RAISE_INVALID_OPCODE();
7900}
7901
7902/* Opcode 0xf3 0x0f 0xd7 - invalid */
7903/* Opcode 0xf2 0x0f 0xd7 - invalid */
7904
7905
7906/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7907FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7908/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
7909FNIEMOP_STUB(iemOp_psubusb_Vx_W);
7910/* Opcode 0xf3 0x0f 0xd8 - invalid */
7911/* Opcode 0xf2 0x0f 0xd8 - invalid */
7912
7913/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7914FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7915/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
7916FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
7917/* Opcode 0xf3 0x0f 0xd9 - invalid */
7918/* Opcode 0xf2 0x0f 0xd9 - invalid */
7919
7920/** Opcode 0x0f 0xda - pminub Pq, Qq */
7921FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7922/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
7923FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
7924/* Opcode 0xf3 0x0f 0xda - invalid */
7925/* Opcode 0xf2 0x0f 0xda - invalid */
7926
7927/** Opcode 0x0f 0xdb - pand Pq, Qq */
7928FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7929/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
7930FNIEMOP_STUB(iemOp_pand_Vx_W);
7931/* Opcode 0xf3 0x0f 0xdb - invalid */
7932/* Opcode 0xf2 0x0f 0xdb - invalid */
7933
7934/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7935FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7936/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
7937FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
7938/* Opcode 0xf3 0x0f 0xdc - invalid */
7939/* Opcode 0xf2 0x0f 0xdc - invalid */
7940
7941/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7942FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7943/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
7944FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
7945/* Opcode 0xf3 0x0f 0xdd - invalid */
7946/* Opcode 0xf2 0x0f 0xdd - invalid */
7947
7948/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7949FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7950/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
7951FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
7952/* Opcode 0xf3 0x0f 0xde - invalid */
7953/* Opcode 0xf2 0x0f 0xde - invalid */
7954
7955/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7956FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7957/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
7958FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
7959/* Opcode 0xf3 0x0f 0xdf - invalid */
7960/* Opcode 0xf2 0x0f 0xdf - invalid */
7961
7962/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7963FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7964/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
7965FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
7966/* Opcode 0xf3 0x0f 0xe0 - invalid */
7967/* Opcode 0xf2 0x0f 0xe0 - invalid */
7968
7969/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7970FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7971/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
7972FNIEMOP_STUB(iemOp_psraw_Vx_W);
7973/* Opcode 0xf3 0x0f 0xe1 - invalid */
7974/* Opcode 0xf2 0x0f 0xe1 - invalid */
7975
7976/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7977FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7978/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
7979FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
7980/* Opcode 0xf3 0x0f 0xe2 - invalid */
7981/* Opcode 0xf2 0x0f 0xe2 - invalid */
7982
7983/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7984FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7985/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
7986FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
7987/* Opcode 0xf3 0x0f 0xe3 - invalid */
7988/* Opcode 0xf2 0x0f 0xe3 - invalid */
7989
7990/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7991FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7992/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
7993FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
7994/* Opcode 0xf3 0x0f 0xe4 - invalid */
7995/* Opcode 0xf2 0x0f 0xe4 - invalid */
7996
7997/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7998FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7999/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8000FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8001/* Opcode 0xf3 0x0f 0xe5 - invalid */
8002/* Opcode 0xf2 0x0f 0xe5 - invalid */
8003
8004/* Opcode 0x0f 0xe6 - invalid */
8005/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8006FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8007/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8008FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8009/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8010FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8011
8012
8013/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8014FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8015{
8016 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8017 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8018 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8019 {
8020 /* Register, memory. */
8021 IEM_MC_BEGIN(0, 2);
8022 IEM_MC_LOCAL(uint64_t, uSrc);
8023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8024
8025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8027 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8028 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8029
8030 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8031 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8032
8033 IEM_MC_ADVANCE_RIP();
8034 IEM_MC_END();
8035 return VINF_SUCCESS;
8036 }
8037 /* The register, register encoding is invalid. */
8038 return IEMOP_RAISE_INVALID_OPCODE();
8039}
8040
8041/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
8042FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
8043{
8044 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8045 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8046 {
8047 /* Register, memory. */
8048 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
8049 IEM_MC_BEGIN(0, 2);
8050 IEM_MC_LOCAL(RTUINT128U, uSrc);
8051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8052
8053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8055 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8056 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8057
8058 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8059 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8060
8061 IEM_MC_ADVANCE_RIP();
8062 IEM_MC_END();
8063 return VINF_SUCCESS;
8064 }
8065
8066 /* The register, register encoding is invalid. */
8067 return IEMOP_RAISE_INVALID_OPCODE();
8068}
8069
8070/* Opcode 0xf3 0x0f 0xe7 - invalid */
8071/* Opcode 0xf2 0x0f 0xe7 - invalid */
8072
8073
8074/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8075FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8076/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8077FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8078/* Opcode 0xf3 0x0f 0xe8 - invalid */
8079/* Opcode 0xf2 0x0f 0xe8 - invalid */
8080
8081/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8082FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8083/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8084FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8085/* Opcode 0xf3 0x0f 0xe9 - invalid */
8086/* Opcode 0xf2 0x0f 0xe9 - invalid */
8087
8088/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8089FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8090/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8091FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8092/* Opcode 0xf3 0x0f 0xea - invalid */
8093/* Opcode 0xf2 0x0f 0xea - invalid */
8094
8095/** Opcode 0x0f 0xeb - por Pq, Qq */
8096FNIEMOP_STUB(iemOp_por_Pq_Qq);
8097/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8098FNIEMOP_STUB(iemOp_por_Vx_W);
8099/* Opcode 0xf3 0x0f 0xeb - invalid */
8100/* Opcode 0xf2 0x0f 0xeb - invalid */
8101
8102/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8103FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8104/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8105FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8106/* Opcode 0xf3 0x0f 0xec - invalid */
8107/* Opcode 0xf2 0x0f 0xec - invalid */
8108
8109/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8110FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8111/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8112FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8113/* Opcode 0xf3 0x0f 0xed - invalid */
8114/* Opcode 0xf2 0x0f 0xed - invalid */
8115
8116/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8117FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8118/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8119FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8120/* Opcode 0xf3 0x0f 0xee - invalid */
8121/* Opcode 0xf2 0x0f 0xee - invalid */
8122
8123
8124/** Opcode 0x0f 0xef - pxor Pq, Qq */
8125FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8126{
8127 IEMOP_MNEMONIC(pxor, "pxor");
8128 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8129}
8130
8131/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8132FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8133{
8134 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8135 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8136}
8137
8138/* Opcode 0xf3 0x0f 0xef - invalid */
8139/* Opcode 0xf2 0x0f 0xef - invalid */
8140
8141/* Opcode 0x0f 0xf0 - invalid */
8142/* Opcode 0x66 0x0f 0xf0 - invalid */
8143/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8144FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8145
8146/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8147FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8148/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8149FNIEMOP_STUB(iemOp_psllw_Vx_W);
8150/* Opcode 0xf2 0x0f 0xf1 - invalid */
8151
8152/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8153FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8154/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8155FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8156/* Opcode 0xf2 0x0f 0xf2 - invalid */
8157
8158/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8159FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8160/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8161FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8162/* Opcode 0xf2 0x0f 0xf3 - invalid */
8163
8164/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8165FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8166/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8167FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8168/* Opcode 0xf2 0x0f 0xf4 - invalid */
8169
8170/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8171FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8172/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
8173FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
8174/* Opcode 0xf2 0x0f 0xf5 - invalid */
8175
8176/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8177FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8178/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
8179FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
8180/* Opcode 0xf2 0x0f 0xf6 - invalid */
8181
8182/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8183FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8184/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
8185FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
8186/* Opcode 0xf2 0x0f 0xf7 - invalid */
8187
8188/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8189FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8190/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
8191FNIEMOP_STUB(iemOp_psubb_Vx_W);
8192/* Opcode 0xf2 0x0f 0xf8 - invalid */
8193
8194/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8195FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8196/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
8197FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
8198/* Opcode 0xf2 0x0f 0xf9 - invalid */
8199
8200/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8201FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8202/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
8203FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
8204/* Opcode 0xf2 0x0f 0xfa - invalid */
8205
8206/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8207FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8208/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
8209FNIEMOP_STUB(iemOp_psubq_Vx_W);
8210/* Opcode 0xf2 0x0f 0xfb - invalid */
8211
8212/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8213FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8214/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
8215FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
8216/* Opcode 0xf2 0x0f 0xfc - invalid */
8217
8218/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8219FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8220/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
8221FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
8222/* Opcode 0xf2 0x0f 0xfd - invalid */
8223
8224/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8225FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8226/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
8227FNIEMOP_STUB(iemOp_paddd_Vx_W);
8228/* Opcode 0xf2 0x0f 0xfe - invalid */
8229
8230
8231/** Opcode **** 0x0f 0xff - UD0 */
8232FNIEMOP_DEF(iemOp_ud0)
8233{
8234 IEMOP_MNEMONIC(ud0, "ud0");
8235 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8236 {
8237 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8238#ifndef TST_IEM_CHECK_MC
8239 RTGCPTR GCPtrEff;
8240 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8241 if (rcStrict != VINF_SUCCESS)
8242 return rcStrict;
8243#endif
8244 IEMOP_HLP_DONE_DECODING();
8245 }
8246 return IEMOP_RAISE_INVALID_OPCODE();
8247}
8248
8249
8250
8251/**
8252 * Two byte opcode map, first byte 0x0f.
8253 *
8254 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
8255 * check if it needs updating as well when making changes.
8256 */
8257IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8258{
8259 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8260 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8261 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8262 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8263 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8264 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8265 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8266 /* 0x06 */ IEMOP_X4(iemOp_clts),
8267 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8268 /* 0x08 */ IEMOP_X4(iemOp_invd),
8269 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8270 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8271 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8272 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8273 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8274 /* 0x0e */ IEMOP_X4(iemOp_femms),
8275 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8276
8277 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vx_Wsd,
8278 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
8279 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
8280 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8281 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8282 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8283 /* 0x16 */ iemOp_movhpsv1_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpdv1_Vdq_Mq, iemOp_movshdup_Vx_Wx, iemOp_InvalidNeedRM,
8284 /* 0x17 */ iemOp_movhpsv1_Mq_Vq, iemOp_movhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8285 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8286 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8287 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8288 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8289 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8290 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8291 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8292 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8293
8294 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8295 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8296 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8297 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8298 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8299 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8300 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8301 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8302 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8303 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8304 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
8305 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8306 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
8307 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
8308 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8309 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8310
8311 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8312 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8313 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8314 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8315 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8316 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8317 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8318 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8319 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
8320 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8321 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
8322 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8323 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8324 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8325 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8326 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8327
8328 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8329 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8330 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8331 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8332 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8333 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8334 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8335 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8336 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8337 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8338 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8339 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8340 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8341 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8342 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8343 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8344
8345 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8346 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
8347 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
8348 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
8349 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8350 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8351 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8352 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8353 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
8354 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
8355 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
8356 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8357 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
8358 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
8359 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
8360 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
8361
8362 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8363 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8364 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8365 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8366 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8367 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8368 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8369 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8370 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8371 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8372 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8373 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8374 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8375 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8376 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8377 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vx_Wx, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM,
8378
8379 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
8380 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8381 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8382 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8383 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8384 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8385 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8386 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8387
8388 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8389 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8390 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8391 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8392 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
8393 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
8394 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
8395 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
8396
8397 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8398 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8399 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8400 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8401 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8402 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8403 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8404 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8405 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8406 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8407 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8408 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8409 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8410 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8411 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8412 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8413
8414 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8415 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8416 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8417 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8418 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8419 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8420 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8421 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8422 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8423 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8424 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8425 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8426 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8427 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8428 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8429 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8430
8431 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8432 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8433 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8434 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8435 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8436 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8437 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8438 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8439 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8440 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8441 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8442 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8443 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8444 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8445 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8446 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8447
8448 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8449 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8450 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8451 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8452 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8453 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8454 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8455 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8456 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8457 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8458 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8459 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8460 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8461 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8462 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8463 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8464
8465 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8466 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8467 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
8468 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8469 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8470 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8471 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8472 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8473 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8474 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8475 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8476 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8477 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8478 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8479 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8480 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8481
8482 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
8483 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8484 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8485 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8486 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8487 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8488 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8489 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8490 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8491 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8492 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8493 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8494 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8495 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8496 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8497 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8498
8499 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8500 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8501 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8502 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8503 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8504 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8505 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
8506 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8507 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8508 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8509 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8510 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8511 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8512 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8513 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8514 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8515
8516 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
8517 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8518 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8519 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8520 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8521 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8522 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8523 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8524 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8525 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8526 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8527 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8528 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8529 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8530 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8531 /* 0xff */ IEMOP_X4(iemOp_ud0),
8532};
8533AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8534
8535/** @} */
8536
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette