VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66785

Last change on this file since 66785 was 66785, checked in by vboxsync, 8 years ago

IEM: Implemented movlhps Vdq,Uq and movhps Vdq,Mq (0f 16).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 314.1 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66785 2017-05-04 11:48:37Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.215389.xyz. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
38 switch (pVCpu->iem.s.enmEffOpSize)
39 {
40 case IEMMODE_16BIT:
41 IEM_MC_BEGIN(0, 1);
42 IEM_MC_LOCAL(uint16_t, u16Ldtr);
43 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
44 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
45 IEM_MC_ADVANCE_RIP();
46 IEM_MC_END();
47 break;
48
49 case IEMMODE_32BIT:
50 IEM_MC_BEGIN(0, 1);
51 IEM_MC_LOCAL(uint32_t, u32Ldtr);
52 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
53 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
54 IEM_MC_ADVANCE_RIP();
55 IEM_MC_END();
56 break;
57
58 case IEMMODE_64BIT:
59 IEM_MC_BEGIN(0, 1);
60 IEM_MC_LOCAL(uint64_t, u64Ldtr);
61 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
62 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 break;
66
67 IEM_NOT_REACHED_DEFAULT_CASE_RET();
68 }
69 }
70 else
71 {
72 IEM_MC_BEGIN(0, 2);
73 IEM_MC_LOCAL(uint16_t, u16Ldtr);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
76 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
77 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
78 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
79 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/** Opcode 0x0f 0x00 /1. */
88FNIEMOPRM_DEF(iemOp_Grp6_str)
89{
90 IEMOP_MNEMONIC(str, "str Rv/Mw");
91 IEMOP_HLP_MIN_286();
92 IEMOP_HLP_NO_REAL_OR_V86_MODE();
93
94 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
95 {
96 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
97 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
98 switch (pVCpu->iem.s.enmEffOpSize)
99 {
100 case IEMMODE_16BIT:
101 IEM_MC_BEGIN(0, 1);
102 IEM_MC_LOCAL(uint16_t, u16Tr);
103 IEM_MC_FETCH_TR_U16(u16Tr);
104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
105 IEM_MC_ADVANCE_RIP();
106 IEM_MC_END();
107 break;
108
109 case IEMMODE_32BIT:
110 IEM_MC_BEGIN(0, 1);
111 IEM_MC_LOCAL(uint32_t, u32Tr);
112 IEM_MC_FETCH_TR_U32(u32Tr);
113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
114 IEM_MC_ADVANCE_RIP();
115 IEM_MC_END();
116 break;
117
118 case IEMMODE_64BIT:
119 IEM_MC_BEGIN(0, 1);
120 IEM_MC_LOCAL(uint64_t, u64Tr);
121 IEM_MC_FETCH_TR_U64(u64Tr);
122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 break;
126
127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
128 }
129 }
130 else
131 {
132 IEM_MC_BEGIN(0, 2);
133 IEM_MC_LOCAL(uint16_t, u16Tr);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
136 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
137 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
138 IEM_MC_FETCH_TR_U16(u16Tr);
139 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
140 IEM_MC_ADVANCE_RIP();
141 IEM_MC_END();
142 }
143 return VINF_SUCCESS;
144}
145
146
147/** Opcode 0x0f 0x00 /2. */
148FNIEMOPRM_DEF(iemOp_Grp6_lldt)
149{
150 IEMOP_MNEMONIC(lldt, "lldt Ew");
151 IEMOP_HLP_MIN_286();
152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
153
154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
155 {
156 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
157 IEM_MC_BEGIN(1, 0);
158 IEM_MC_ARG(uint16_t, u16Sel, 0);
159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
161 IEM_MC_END();
162 }
163 else
164 {
165 IEM_MC_BEGIN(1, 1);
166 IEM_MC_ARG(uint16_t, u16Sel, 0);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
169 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
170 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
171 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
172 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
173 IEM_MC_END();
174 }
175 return VINF_SUCCESS;
176}
177
178
179/** Opcode 0x0f 0x00 /3. */
180FNIEMOPRM_DEF(iemOp_Grp6_ltr)
181{
182 IEMOP_MNEMONIC(ltr, "ltr Ew");
183 IEMOP_HLP_MIN_286();
184 IEMOP_HLP_NO_REAL_OR_V86_MODE();
185
186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
187 {
188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
189 IEM_MC_BEGIN(1, 0);
190 IEM_MC_ARG(uint16_t, u16Sel, 0);
191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
192 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
193 IEM_MC_END();
194 }
195 else
196 {
197 IEM_MC_BEGIN(1, 1);
198 IEM_MC_ARG(uint16_t, u16Sel, 0);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
202 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
203 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
204 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
205 IEM_MC_END();
206 }
207 return VINF_SUCCESS;
208}
209
210
211/** Opcode 0x0f 0x00 /3. */
212FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
213{
214 IEMOP_HLP_MIN_286();
215 IEMOP_HLP_NO_REAL_OR_V86_MODE();
216
217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
218 {
219 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
220 IEM_MC_BEGIN(2, 0);
221 IEM_MC_ARG(uint16_t, u16Sel, 0);
222 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
223 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
224 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
225 IEM_MC_END();
226 }
227 else
228 {
229 IEM_MC_BEGIN(2, 1);
230 IEM_MC_ARG(uint16_t, u16Sel, 0);
231 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
234 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
235 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
236 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
237 IEM_MC_END();
238 }
239 return VINF_SUCCESS;
240}
241
242
243/** Opcode 0x0f 0x00 /4. */
244FNIEMOPRM_DEF(iemOp_Grp6_verr)
245{
246 IEMOP_MNEMONIC(verr, "verr Ew");
247 IEMOP_HLP_MIN_286();
248 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
249}
250
251
252/** Opcode 0x0f 0x00 /5. */
253FNIEMOPRM_DEF(iemOp_Grp6_verw)
254{
255 IEMOP_MNEMONIC(verw, "verw Ew");
256 IEMOP_HLP_MIN_286();
257 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
258}
259
260
261/**
262 * Group 6 jump table.
263 */
264IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
265{
266 iemOp_Grp6_sldt,
267 iemOp_Grp6_str,
268 iemOp_Grp6_lldt,
269 iemOp_Grp6_ltr,
270 iemOp_Grp6_verr,
271 iemOp_Grp6_verw,
272 iemOp_InvalidWithRM,
273 iemOp_InvalidWithRM
274};
275
276/** Opcode 0x0f 0x00. */
277FNIEMOP_DEF(iemOp_Grp6)
278{
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
280 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
281}
282
283
284/** Opcode 0x0f 0x01 /0. */
285FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
286{
287 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
288 IEMOP_HLP_MIN_286();
289 IEMOP_HLP_64BIT_OP_SIZE();
290 IEM_MC_BEGIN(2, 1);
291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
292 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
295 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
296 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
297 IEM_MC_END();
298 return VINF_SUCCESS;
299}
300
301
302/** Opcode 0x0f 0x01 /0. */
303FNIEMOP_DEF(iemOp_Grp7_vmcall)
304{
305 IEMOP_BITCH_ABOUT_STUB();
306 return IEMOP_RAISE_INVALID_OPCODE();
307}
308
309
310/** Opcode 0x0f 0x01 /0. */
311FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
312{
313 IEMOP_BITCH_ABOUT_STUB();
314 return IEMOP_RAISE_INVALID_OPCODE();
315}
316
317
318/** Opcode 0x0f 0x01 /0. */
319FNIEMOP_DEF(iemOp_Grp7_vmresume)
320{
321 IEMOP_BITCH_ABOUT_STUB();
322 return IEMOP_RAISE_INVALID_OPCODE();
323}
324
325
326/** Opcode 0x0f 0x01 /0. */
327FNIEMOP_DEF(iemOp_Grp7_vmxoff)
328{
329 IEMOP_BITCH_ABOUT_STUB();
330 return IEMOP_RAISE_INVALID_OPCODE();
331}
332
333
334/** Opcode 0x0f 0x01 /1. */
335FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
336{
337 IEMOP_MNEMONIC(sidt, "sidt Ms");
338 IEMOP_HLP_MIN_286();
339 IEMOP_HLP_64BIT_OP_SIZE();
340 IEM_MC_BEGIN(2, 1);
341 IEM_MC_ARG(uint8_t, iEffSeg, 0);
342 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
345 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
346 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
347 IEM_MC_END();
348 return VINF_SUCCESS;
349}
350
351
352/** Opcode 0x0f 0x01 /1. */
353FNIEMOP_DEF(iemOp_Grp7_monitor)
354{
355 IEMOP_MNEMONIC(monitor, "monitor");
356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
357 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
358}
359
360
361/** Opcode 0x0f 0x01 /1. */
362FNIEMOP_DEF(iemOp_Grp7_mwait)
363{
364 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
367}
368
369
370/** Opcode 0x0f 0x01 /2. */
371FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
372{
373 IEMOP_MNEMONIC(lgdt, "lgdt");
374 IEMOP_HLP_64BIT_OP_SIZE();
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd0. */
389FNIEMOP_DEF(iemOp_Grp7_xgetbv)
390{
391 IEMOP_MNEMONIC(xgetbv, "xgetbv");
392 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
393 {
394 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
396 }
397 return IEMOP_RAISE_INVALID_OPCODE();
398}
399
400
401/** Opcode 0x0f 0x01 0xd1. */
402FNIEMOP_DEF(iemOp_Grp7_xsetbv)
403{
404 IEMOP_MNEMONIC(xsetbv, "xsetbv");
405 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
406 {
407 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
408 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
409 }
410 return IEMOP_RAISE_INVALID_OPCODE();
411}
412
413
414/** Opcode 0x0f 0x01 /3. */
415FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
416{
417 IEMOP_MNEMONIC(lidt, "lidt");
418 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
419 ? IEMMODE_64BIT
420 : pVCpu->iem.s.enmEffOpSize;
421 IEM_MC_BEGIN(3, 1);
422 IEM_MC_ARG(uint8_t, iEffSeg, 0);
423 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
424 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
428 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
429 IEM_MC_END();
430 return VINF_SUCCESS;
431}
432
433
434#ifdef VBOX_WITH_NESTED_HWVIRT
435/** Opcode 0x0f 0x01 0xd8. */
436FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
437{
438 IEMOP_MNEMONIC(vmrun, "vmrun");
439 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
440}
441
442/** Opcode 0x0f 0x01 0xd9. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
444{
445 IEMOP_MNEMONIC(vmmcall, "vmmcall");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
447}
448
449
450/** Opcode 0x0f 0x01 0xda. */
451FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
452{
453 IEMOP_MNEMONIC(vmload, "vmload");
454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
455}
456
457
458/** Opcode 0x0f 0x01 0xdb. */
459FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
460{
461 IEMOP_MNEMONIC(vmsave, "vmsave");
462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
463}
464
465
466/** Opcode 0x0f 0x01 0xdc. */
467FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
468{
469 IEMOP_MNEMONIC(stgi, "stgi");
470 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
471}
472
473
474/** Opcode 0x0f 0x01 0xdd. */
475FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
476{
477 IEMOP_MNEMONIC(clgi, "clgi");
478 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
479}
480
481
482/** Opcode 0x0f 0x01 0xdf. */
483FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
484{
485 IEMOP_MNEMONIC(invlpga, "invlpga");
486 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
487}
488
489
490/** Opcode 0x0f 0x01 0xde. */
491FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
492{
493 IEMOP_MNEMONIC(skinit, "skinit");
494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
495}
496#else
497/** Opcode 0x0f 0x01 0xd8. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
499
500/** Opcode 0x0f 0x01 0xd9. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
502
503/** Opcode 0x0f 0x01 0xda. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
505
506/** Opcode 0x0f 0x01 0xdb. */
507FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
508
509/** Opcode 0x0f 0x01 0xdc. */
510FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
511
512/** Opcode 0x0f 0x01 0xdd. */
513FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
514
515/** Opcode 0x0f 0x01 0xdf. */
516FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520#endif /* VBOX_WITH_NESTED_HWVIRT */
521
522/** Opcode 0x0f 0x01 /4. */
523FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
524{
525 IEMOP_MNEMONIC(smsw, "smsw");
526 IEMOP_HLP_MIN_286();
527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
528 {
529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
530 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
531 switch (pVCpu->iem.s.enmEffOpSize)
532 {
533 case IEMMODE_16BIT:
534 IEM_MC_BEGIN(0, 1);
535 IEM_MC_LOCAL(uint16_t, u16Tmp);
536 IEM_MC_FETCH_CR0_U16(u16Tmp);
537 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
538 { /* likely */ }
539 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
541 else
542 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
543 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
544 IEM_MC_ADVANCE_RIP();
545 IEM_MC_END();
546 return VINF_SUCCESS;
547
548 case IEMMODE_32BIT:
549 IEM_MC_BEGIN(0, 1);
550 IEM_MC_LOCAL(uint32_t, u32Tmp);
551 IEM_MC_FETCH_CR0_U32(u32Tmp);
552 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 return VINF_SUCCESS;
556
557 case IEMMODE_64BIT:
558 IEM_MC_BEGIN(0, 1);
559 IEM_MC_LOCAL(uint64_t, u64Tmp);
560 IEM_MC_FETCH_CR0_U64(u64Tmp);
561 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
562 IEM_MC_ADVANCE_RIP();
563 IEM_MC_END();
564 return VINF_SUCCESS;
565
566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
567 }
568 }
569 else
570 {
571 /* Ignore operand size here, memory refs are always 16-bit. */
572 IEM_MC_BEGIN(0, 2);
573 IEM_MC_LOCAL(uint16_t, u16Tmp);
574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
578 IEM_MC_FETCH_CR0_U16(u16Tmp);
579 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
580 { /* likely */ }
581 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
582 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
583 else
584 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
585 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
586 IEM_MC_ADVANCE_RIP();
587 IEM_MC_END();
588 return VINF_SUCCESS;
589 }
590}
591
592
593/** Opcode 0x0f 0x01 /6. */
594FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
595{
596 /* The operand size is effectively ignored, all is 16-bit and only the
597 lower 3-bits are used. */
598 IEMOP_MNEMONIC(lmsw, "lmsw");
599 IEMOP_HLP_MIN_286();
600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
601 {
602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
603 IEM_MC_BEGIN(1, 0);
604 IEM_MC_ARG(uint16_t, u16Tmp, 0);
605 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
606 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
607 IEM_MC_END();
608 }
609 else
610 {
611 IEM_MC_BEGIN(1, 1);
612 IEM_MC_ARG(uint16_t, u16Tmp, 0);
613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
617 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
618 IEM_MC_END();
619 }
620 return VINF_SUCCESS;
621}
622
623
624/** Opcode 0x0f 0x01 /7. */
625FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
626{
627 IEMOP_MNEMONIC(invlpg, "invlpg");
628 IEMOP_HLP_MIN_486();
629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
630 IEM_MC_BEGIN(1, 1);
631 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
633 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
634 IEM_MC_END();
635 return VINF_SUCCESS;
636}
637
638
639/** Opcode 0x0f 0x01 /7. */
640FNIEMOP_DEF(iemOp_Grp7_swapgs)
641{
642 IEMOP_MNEMONIC(swapgs, "swapgs");
643 IEMOP_HLP_ONLY_64BIT();
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
646}
647
648
649/** Opcode 0x0f 0x01 /7. */
650FNIEMOP_DEF(iemOp_Grp7_rdtscp)
651{
652 IEMOP_MNEMONIC(rdtscp, "rdtscp");
653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
654 /** @todo SVM intercept removal from here. */
655 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RDTSCP, SVM_EXIT_RDTSCP, 0, 0);
656 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
657}
658
659
660/**
661 * Group 7 jump table, memory variant.
662 */
663IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
664{
665 iemOp_Grp7_sgdt,
666 iemOp_Grp7_sidt,
667 iemOp_Grp7_lgdt,
668 iemOp_Grp7_lidt,
669 iemOp_Grp7_smsw,
670 iemOp_InvalidWithRM,
671 iemOp_Grp7_lmsw,
672 iemOp_Grp7_invlpg
673};
674
675
676/** Opcode 0x0f 0x01. */
677FNIEMOP_DEF(iemOp_Grp7)
678{
679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
680 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
681 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
682
683 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
684 {
685 case 0:
686 switch (bRm & X86_MODRM_RM_MASK)
687 {
688 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
689 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
690 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
691 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 1:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
700 }
701 return IEMOP_RAISE_INVALID_OPCODE();
702
703 case 2:
704 switch (bRm & X86_MODRM_RM_MASK)
705 {
706 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
707 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
708 }
709 return IEMOP_RAISE_INVALID_OPCODE();
710
711 case 3:
712 switch (bRm & X86_MODRM_RM_MASK)
713 {
714 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
715 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
716 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
717 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
718 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
719 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
720 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
721 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
723 }
724
725 case 4:
726 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
727
728 case 5:
729 return IEMOP_RAISE_INVALID_OPCODE();
730
731 case 6:
732 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
733
734 case 7:
735 switch (bRm & X86_MODRM_RM_MASK)
736 {
737 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
738 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
739 }
740 return IEMOP_RAISE_INVALID_OPCODE();
741
742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
743 }
744}
745
746/** Opcode 0x0f 0x00 /3. */
747FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
748{
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
751
752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
753 {
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 switch (pVCpu->iem.s.enmEffOpSize)
756 {
757 case IEMMODE_16BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 case IEMMODE_32BIT:
773 case IEMMODE_64BIT:
774 {
775 IEM_MC_BEGIN(3, 0);
776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
777 IEM_MC_ARG(uint16_t, u16Sel, 1);
778 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
779
780 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
781 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
782 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
783
784 IEM_MC_END();
785 return VINF_SUCCESS;
786 }
787
788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
789 }
790 }
791 else
792 {
793 switch (pVCpu->iem.s.enmEffOpSize)
794 {
795 case IEMMODE_16BIT:
796 {
797 IEM_MC_BEGIN(3, 1);
798 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
799 IEM_MC_ARG(uint16_t, u16Sel, 1);
800 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
802
803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
804 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
805
806 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
807 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
808 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
809
810 IEM_MC_END();
811 return VINF_SUCCESS;
812 }
813
814 case IEMMODE_32BIT:
815 case IEMMODE_64BIT:
816 {
817 IEM_MC_BEGIN(3, 1);
818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 1);
820 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
822
823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
824 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
825/** @todo testcase: make sure it's a 16-bit read. */
826
827 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
828 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
829 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
830
831 IEM_MC_END();
832 return VINF_SUCCESS;
833 }
834
835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
836 }
837 }
838}
839
840
841
842/** Opcode 0x0f 0x02. */
843FNIEMOP_DEF(iemOp_lar_Gv_Ew)
844{
845 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
846 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
847}
848
849
850/** Opcode 0x0f 0x03. */
851FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
852{
853 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
854 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
855}
856
857
858/** Opcode 0x0f 0x05. */
859FNIEMOP_DEF(iemOp_syscall)
860{
861 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
863 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
864}
865
866
867/** Opcode 0x0f 0x06. */
868FNIEMOP_DEF(iemOp_clts)
869{
870 IEMOP_MNEMONIC(clts, "clts");
871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
872 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
873}
874
875
876/** Opcode 0x0f 0x07. */
877FNIEMOP_DEF(iemOp_sysret)
878{
879 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
881 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
882}
883
884
885/** Opcode 0x0f 0x08. */
886FNIEMOP_DEF(iemOp_invd)
887{
888 IEMOP_MNEMONIC(invd, "invd");
889#ifdef VBOX_WITH_NESTED_HWVIRT
890 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
891 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
892#else
893 RT_NOREF_PV(pVCpu);
894#endif
895 /** @todo implement invd for the regular case (above only handles nested SVM
896 * exits). */
897 IEMOP_BITCH_ABOUT_STUB();
898 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
899}
900
901// IEMOP_HLP_MIN_486();
902
903
904/** Opcode 0x0f 0x09. */
905FNIEMOP_DEF(iemOp_wbinvd)
906{
907 IEMOP_MNEMONIC(wbinvd, "wbinvd");
908 IEMOP_HLP_MIN_486();
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
910 IEM_MC_BEGIN(0, 0);
911 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
912 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
913 IEM_MC_ADVANCE_RIP();
914 IEM_MC_END();
915 return VINF_SUCCESS; /* ignore for now */
916}
917
918
919/** Opcode 0x0f 0x0b. */
920FNIEMOP_DEF(iemOp_ud2)
921{
922 IEMOP_MNEMONIC(ud2, "ud2");
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926/** Opcode 0x0f 0x0d. */
927FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
928{
929 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
931 {
932 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
933 return IEMOP_RAISE_INVALID_OPCODE();
934 }
935
936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
938 {
939 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
940 return IEMOP_RAISE_INVALID_OPCODE();
941 }
942
943 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
944 {
945 case 2: /* Aliased to /0 for the time being. */
946 case 4: /* Aliased to /0 for the time being. */
947 case 5: /* Aliased to /0 for the time being. */
948 case 6: /* Aliased to /0 for the time being. */
949 case 7: /* Aliased to /0 for the time being. */
950 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
951 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
952 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
954 }
955
956 IEM_MC_BEGIN(0, 1);
957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
960 /* Currently a NOP. */
961 NOREF(GCPtrEffSrc);
962 IEM_MC_ADVANCE_RIP();
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x0e. */
969FNIEMOP_STUB(iemOp_femms);
970
971
972/** Opcode 0x0f 0x0f. */
973FNIEMOP_DEF(iemOp_3Dnow)
974{
975 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
976 {
977 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
978 return IEMOP_RAISE_INVALID_OPCODE();
979 }
980
981#ifdef IEM_WITH_3DNOW
982 /* This is pretty sparse, use switch instead of table. */
983 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
984 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
985#else
986 IEMOP_BITCH_ABOUT_STUB();
987 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
988#endif
989}
990
991
992/**
993 * @opcode 0x10
994 * @oppfx none
995 * @opcpuid sse
996 * @opgroup og_sse_simdfp_datamove
997 * @opxcpttype 4UA
998 * @optest op1=1 op2=2 -> op1=2
999 * @optest op1=0 op2=-22 -> op1=-22
1000 */
1001FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1002{
1003 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1006 {
1007 /*
1008 * Register, register.
1009 */
1010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1011 IEM_MC_BEGIN(0, 0);
1012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1013 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1014 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1015 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1016 IEM_MC_ADVANCE_RIP();
1017 IEM_MC_END();
1018 }
1019 else
1020 {
1021 /*
1022 * Memory, register.
1023 */
1024 IEM_MC_BEGIN(0, 2);
1025 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1027
1028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1032
1033 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1034 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1035
1036 IEM_MC_ADVANCE_RIP();
1037 IEM_MC_END();
1038 }
1039 return VINF_SUCCESS;
1040
1041}
1042
1043
1044/**
1045 * @opcode 0x10
1046 * @oppfx 0x66
1047 * @opcpuid sse2
1048 * @opgroup og_sse2_pcksclr_datamove
1049 * @opxcpttype 4UA
1050 * @optest op1=1 op2=2 -> op1=2
1051 * @optest op1=0 op2=-42 -> op1=-42
1052 */
1053FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1054{
1055 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1058 {
1059 /*
1060 * Register, register.
1061 */
1062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1063 IEM_MC_BEGIN(0, 0);
1064 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1066 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1067 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1068 IEM_MC_ADVANCE_RIP();
1069 IEM_MC_END();
1070 }
1071 else
1072 {
1073 /*
1074 * Memory, register.
1075 */
1076 IEM_MC_BEGIN(0, 2);
1077 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1079
1080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1083 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1084
1085 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1086 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1087
1088 IEM_MC_ADVANCE_RIP();
1089 IEM_MC_END();
1090 }
1091 return VINF_SUCCESS;
1092}
1093
1094
1095/**
1096 * @opcode 0x10
1097 * @oppfx 0xf3
1098 * @opcpuid sse
1099 * @opgroup og_sse_simdfp_datamove
1100 * @opxcpttype 5
1101 * @optest op1=1 op2=2 -> op1=2
1102 * @optest op1=0 op2=-22 -> op1=-22
1103 */
1104FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1105{
1106 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZxReg, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1109 {
1110 /*
1111 * Register, register.
1112 */
1113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1114 IEM_MC_BEGIN(0, 1);
1115 IEM_MC_LOCAL(uint32_t, uSrc);
1116
1117 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1118 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1119 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1120 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 else
1126 {
1127 /*
1128 * Memory, register.
1129 */
1130 IEM_MC_BEGIN(0, 2);
1131 IEM_MC_LOCAL(uint32_t, uSrc);
1132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1133
1134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1136 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1138
1139 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1140 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1141
1142 IEM_MC_ADVANCE_RIP();
1143 IEM_MC_END();
1144 }
1145 return VINF_SUCCESS;
1146}
1147
1148
1149/**
1150 * @opcode 0x10
1151 * @oppfx 0xf2
1152 * @opcpuid sse2
1153 * @opgroup og_sse2_pcksclr_datamove
1154 * @opxcpttype 5
1155 * @optest op1=1 op2=2 -> op1=2
1156 * @optest op1=0 op2=-42 -> op1=-42
1157 */
1158FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1159{
1160 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZxReg, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1163 {
1164 /*
1165 * Register, register.
1166 */
1167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1168 IEM_MC_BEGIN(0, 1);
1169 IEM_MC_LOCAL(uint64_t, uSrc);
1170
1171 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1172 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1173 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1174 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1175
1176 IEM_MC_ADVANCE_RIP();
1177 IEM_MC_END();
1178 }
1179 else
1180 {
1181 /*
1182 * Memory, register.
1183 */
1184 IEM_MC_BEGIN(0, 2);
1185 IEM_MC_LOCAL(uint64_t, uSrc);
1186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1187
1188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1190 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1191 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1192
1193 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1194 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1195
1196 IEM_MC_ADVANCE_RIP();
1197 IEM_MC_END();
1198 }
1199 return VINF_SUCCESS;
1200}
1201
1202
1203/**
1204 * @opcode 0x11
1205 * @oppfx none
1206 * @opcpuid sse
1207 * @opgroup og_sse_simdfp_datamove
1208 * @opxcpttype 4UA
1209 * @optest op1=1 op2=2 -> op1=2
1210 * @optest op1=0 op2=-42 -> op1=-42
1211 */
1212FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1213{
1214 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1217 {
1218 /*
1219 * Register, register.
1220 */
1221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1222 IEM_MC_BEGIN(0, 0);
1223 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1224 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1225 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1226 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1227 IEM_MC_ADVANCE_RIP();
1228 IEM_MC_END();
1229 }
1230 else
1231 {
1232 /*
1233 * Memory, register.
1234 */
1235 IEM_MC_BEGIN(0, 2);
1236 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1238
1239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1243
1244 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1245 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1246
1247 IEM_MC_ADVANCE_RIP();
1248 IEM_MC_END();
1249 }
1250 return VINF_SUCCESS;
1251}
1252
1253
1254/**
1255 * @opcode 0x11
1256 * @oppfx 0x66
1257 * @opcpuid sse2
1258 * @opgroup og_sse2_pcksclr_datamove
1259 * @opxcpttype 4UA
1260 * @optest op1=1 op2=2 -> op1=2
1261 * @optest op1=0 op2=-42 -> op1=-42
1262 */
1263FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1264{
1265 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1268 {
1269 /*
1270 * Register, register.
1271 */
1272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1273 IEM_MC_BEGIN(0, 0);
1274 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1275 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1276 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1277 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1278 IEM_MC_ADVANCE_RIP();
1279 IEM_MC_END();
1280 }
1281 else
1282 {
1283 /*
1284 * Memory, register.
1285 */
1286 IEM_MC_BEGIN(0, 2);
1287 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1289
1290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1292 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1294
1295 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1296 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1297
1298 IEM_MC_ADVANCE_RIP();
1299 IEM_MC_END();
1300 }
1301 return VINF_SUCCESS;
1302}
1303
1304
1305/**
1306 * @opcode 0x11
1307 * @oppfx 0xf3
1308 * @opcpuid sse
1309 * @opgroup og_sse_simdfp_datamove
1310 * @opxcpttype 5
1311 * @optest op1=1 op2=2 -> op1=2
1312 * @optest op1=0 op2=-22 -> op1=-22
1313 */
1314FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1315{
1316 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1319 {
1320 /*
1321 * Register, register.
1322 */
1323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1324 IEM_MC_BEGIN(0, 1);
1325 IEM_MC_LOCAL(uint32_t, uSrc);
1326
1327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1328 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1329 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1330 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1331
1332 IEM_MC_ADVANCE_RIP();
1333 IEM_MC_END();
1334 }
1335 else
1336 {
1337 /*
1338 * Memory, register.
1339 */
1340 IEM_MC_BEGIN(0, 2);
1341 IEM_MC_LOCAL(uint32_t, uSrc);
1342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1343
1344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1346 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1348
1349 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1350 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1351
1352 IEM_MC_ADVANCE_RIP();
1353 IEM_MC_END();
1354 }
1355 return VINF_SUCCESS;
1356}
1357
1358
1359/**
1360 * @opcode 0x11
1361 * @oppfx 0xf2
1362 * @opcpuid sse2
1363 * @opgroup og_sse2_pcksclr_datamove
1364 * @opxcpttype 5
1365 * @optest op1=1 op2=2 -> op1=2
1366 * @optest op1=0 op2=-42 -> op1=-42
1367 */
1368FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1369{
1370 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1373 {
1374 /*
1375 * Register, register.
1376 */
1377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1378 IEM_MC_BEGIN(0, 1);
1379 IEM_MC_LOCAL(uint64_t, uSrc);
1380
1381 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1382 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1383 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1384 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1385
1386 IEM_MC_ADVANCE_RIP();
1387 IEM_MC_END();
1388 }
1389 else
1390 {
1391 /*
1392 * Memory, register.
1393 */
1394 IEM_MC_BEGIN(0, 2);
1395 IEM_MC_LOCAL(uint64_t, uSrc);
1396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1397
1398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1400 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1402
1403 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1404 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1405
1406 IEM_MC_ADVANCE_RIP();
1407 IEM_MC_END();
1408 }
1409 return VINF_SUCCESS;
1410}
1411
1412
1413FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1414{
1415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1416 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1417 {
1418 /**
1419 * @opcode 0x12
1420 * @opcodesub 11 mr/reg
1421 * @oppfx none
1422 * @opcpuid sse
1423 * @opgroup og_sse_simdfp_datamove
1424 * @opxcpttype 5
1425 * @optest op1=1 op2=2 -> op1=2
1426 * @optest op1=0 op2=-42 -> op1=-42
1427 */
1428 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1429
1430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1431 IEM_MC_BEGIN(0, 1);
1432 IEM_MC_LOCAL(uint64_t, uSrc);
1433
1434 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1436 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1437 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1438
1439 IEM_MC_ADVANCE_RIP();
1440 IEM_MC_END();
1441 }
1442 else
1443 {
1444 /**
1445 * @opdone
1446 * @opcode 0x12
1447 * @opcodesub !11 mr/reg
1448 * @oppfx none
1449 * @opcpuid sse
1450 * @opgroup og_sse_simdfp_datamove
1451 * @opxcpttype 5
1452 * @optest op1=1 op2=2 -> op1=2
1453 * @optest op1=0 op2=-42 -> op1=-42
1454 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1455 */
1456 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1457
1458 IEM_MC_BEGIN(0, 2);
1459 IEM_MC_LOCAL(uint64_t, uSrc);
1460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1461
1462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1466
1467 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1468 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1469
1470 IEM_MC_ADVANCE_RIP();
1471 IEM_MC_END();
1472 }
1473 return VINF_SUCCESS;
1474}
1475
1476
1477/**
1478 * @opcode 0x12
1479 * @opcodesub !11 mr/reg
1480 * @oppfx 0x66
1481 * @opcpuid sse2
1482 * @opgroup og_sse2_pcksclr_datamove
1483 * @opxcpttype 5
1484 * @optest op1=1 op2=2 -> op1=2
1485 * @optest op1=0 op2=-42 -> op1=-42
1486 */
1487FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1488{
1489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1490 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1491 {
1492 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1493
1494 IEM_MC_BEGIN(0, 2);
1495 IEM_MC_LOCAL(uint64_t, uSrc);
1496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1497
1498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1502
1503 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1504 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1505
1506 IEM_MC_ADVANCE_RIP();
1507 IEM_MC_END();
1508 return VINF_SUCCESS;
1509 }
1510
1511 /**
1512 * @opdone
1513 * @opmnemonic ud660f12m3
1514 * @opcode 0x12
1515 * @opcodesub 11 mr/reg
1516 * @oppfx 0x66
1517 * @opunused immediate
1518 * @opcpuid sse
1519 * @optest ->
1520 */
1521 return IEMOP_RAISE_INVALID_OPCODE();
1522}
1523
1524
1525/**
1526 * @opcode 0x12
1527 * @oppfx 0xf3
1528 * @opcpuid sse3
1529 * @opgroup og_sse3_pcksclr_datamove
1530 * @opxcpttype 4
1531 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1532 * op1=0x00000002000000020000000100000001
1533 */
1534FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1535{
1536 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1538 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1539 {
1540 /*
1541 * Register, register.
1542 */
1543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1544 IEM_MC_BEGIN(2, 0);
1545 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1546 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1547
1548 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1549 IEM_MC_PREPARE_SSE_USAGE();
1550
1551 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1552 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1553 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1554
1555 IEM_MC_ADVANCE_RIP();
1556 IEM_MC_END();
1557 }
1558 else
1559 {
1560 /*
1561 * Register, memory.
1562 */
1563 IEM_MC_BEGIN(2, 2);
1564 IEM_MC_LOCAL(RTUINT128U, uSrc);
1565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1566 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1567 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1568
1569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1571 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1572 IEM_MC_PREPARE_SSE_USAGE();
1573
1574 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1575 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1576 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1577
1578 IEM_MC_ADVANCE_RIP();
1579 IEM_MC_END();
1580 }
1581 return VINF_SUCCESS;
1582}
1583
1584
1585/**
1586 * @opcode 0x12
1587 * @oppfx 0xf2
1588 * @opcpuid sse3
1589 * @opgroup og_sse3_pcksclr_datamove
1590 * @opxcpttype 5
1591 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1592 * op1=0x22222222111111112222222211111111
1593 */
1594FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1595{
1596 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1599 {
1600 /*
1601 * Register, register.
1602 */
1603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1604 IEM_MC_BEGIN(2, 0);
1605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1606 IEM_MC_ARG(uint64_t, uSrc, 1);
1607
1608 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1609 IEM_MC_PREPARE_SSE_USAGE();
1610
1611 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1612 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1613 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1614
1615 IEM_MC_ADVANCE_RIP();
1616 IEM_MC_END();
1617 }
1618 else
1619 {
1620 /*
1621 * Register, memory.
1622 */
1623 IEM_MC_BEGIN(2, 2);
1624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1625 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1626 IEM_MC_ARG(uint64_t, uSrc, 1);
1627
1628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1630 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1631 IEM_MC_PREPARE_SSE_USAGE();
1632
1633 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1634 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1635 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1636
1637 IEM_MC_ADVANCE_RIP();
1638 IEM_MC_END();
1639 }
1640 return VINF_SUCCESS;
1641}
1642
1643
1644/**
1645 * @opcode 0x13
1646 * @opcodesub !11 mr/reg
1647 * @oppfx none
1648 * @opcpuid sse
1649 * @opgroup og_sse_simdfp_datamove
1650 * @opxcpttype 5
1651 * @optest op1=1 op2=2 -> op1=2
1652 * @optest op1=0 op2=-42 -> op1=-42
1653 */
1654FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1655{
1656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1657 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1658 {
1659 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, MqWO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1660
1661 IEM_MC_BEGIN(0, 2);
1662 IEM_MC_LOCAL(uint64_t, uSrc);
1663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1664
1665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1667 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1668 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1669
1670 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1671 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1672
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 return VINF_SUCCESS;
1676 }
1677
1678 /**
1679 * @opdone
1680 * @opmnemonic ud0f13m3
1681 * @opcode 0x13
1682 * @opcodesub 11 mr/reg
1683 * @oppfx none
1684 * @opunused immediate
1685 * @opcpuid sse
1686 * @optest ->
1687 */
1688 return IEMOP_RAISE_INVALID_OPCODE();
1689}
1690
1691
1692/**
1693 * @opcode 0x13
1694 * @opcodesub !11 mr/reg
1695 * @oppfx 0x66
1696 * @opcpuid sse
1697 * @opgroup og_sse_simdfp_datamove
1698 * @opxcpttype 5
1699 * @optest op1=1 op2=2 -> op1=2
1700 * @optest op1=0 op2=-42 -> op1=-42
1701 */
1702FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1703{
1704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1705 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1706 {
1707 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, MqWO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1708 IEM_MC_BEGIN(0, 2);
1709 IEM_MC_LOCAL(uint64_t, uSrc);
1710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1711
1712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1714 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1715 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1716
1717 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1718 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1719
1720 IEM_MC_ADVANCE_RIP();
1721 IEM_MC_END();
1722 return VINF_SUCCESS;
1723 }
1724
1725 /**
1726 * @opdone
1727 * @opmnemonic ud660f13m3
1728 * @opcode 0x13
1729 * @opcodesub 11 mr/reg
1730 * @oppfx 0x66
1731 * @opunused immediate
1732 * @opcpuid sse
1733 * @optest ->
1734 */
1735 return IEMOP_RAISE_INVALID_OPCODE();
1736}
1737
1738
1739/**
1740 * @opmnemonic udf30f13
1741 * @opcode 0x13
1742 * @oppfx 0xf3
1743 * @opunused intel-modrm
1744 * @opcpuid sse
1745 * @optest ->
1746 * @opdone
1747 */
1748
1749/**
1750 * @opmnemonic udf20f13
1751 * @opcode 0x13
1752 * @oppfx 0xf2
1753 * @opunused intel-modrm
1754 * @opcpuid sse
1755 * @optest ->
1756 * @opdone
1757 */
1758
1759/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1760FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1761/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1762FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1763
1764/**
1765 * @opdone
1766 * @opmnemonic udf30f14
1767 * @opcode 0x14
1768 * @oppfx 0xf3
1769 * @opunused intel-modrm
1770 * @opcpuid sse
1771 * @optest ->
1772 * @opdone
1773 */
1774
1775/**
1776 * @opmnemonic udf20f14
1777 * @opcode 0x14
1778 * @oppfx 0xf2
1779 * @opunused intel-modrm
1780 * @opcpuid sse
1781 * @optest ->
1782 * @opdone
1783 */
1784
1785/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1786FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1787/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1788FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1789/* Opcode 0xf3 0x0f 0x15 - invalid */
1790/* Opcode 0xf2 0x0f 0x15 - invalid */
1791
1792/**
1793 * @opdone
1794 * @opmnemonic udf30f15
1795 * @opcode 0x15
1796 * @oppfx 0xf3
1797 * @opunused intel-modrm
1798 * @opcpuid sse
1799 * @optest ->
1800 * @opdone
1801 */
1802
1803/**
1804 * @opmnemonic udf20f15
1805 * @opcode 0x15
1806 * @oppfx 0xf2
1807 * @opunused intel-modrm
1808 * @opcpuid sse
1809 * @optest ->
1810 * @opdone
1811 */
1812
1813FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1814{
1815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1817 {
1818 /**
1819 * @opcode 0x16
1820 * @opcodesub 11 mr/reg
1821 * @oppfx none
1822 * @opcpuid sse
1823 * @opgroup og_sse_simdfp_datamove
1824 * @opxcpttype 5
1825 * @optest op1=1 op2=2 -> op1=2
1826 * @optest op1=0 op2=-42 -> op1=-42
1827 */
1828 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1829
1830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1831 IEM_MC_BEGIN(0, 1);
1832 IEM_MC_LOCAL(uint64_t, uSrc);
1833
1834 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1835 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1836 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1837 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1838
1839 IEM_MC_ADVANCE_RIP();
1840 IEM_MC_END();
1841 }
1842 else
1843 {
1844 /**
1845 * @opdone
1846 * @opcode 0x16
1847 * @opcodesub !11 mr/reg
1848 * @oppfx none
1849 * @opcpuid sse
1850 * @opgroup og_sse_simdfp_datamove
1851 * @opxcpttype 5
1852 * @optest op1=1 op2=2 -> op1=2
1853 * @optest op1=0 op2=-42 -> op1=-42
1854 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1855 */
1856 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1857
1858 IEM_MC_BEGIN(0, 2);
1859 IEM_MC_LOCAL(uint64_t, uSrc);
1860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1861
1862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1866
1867 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1868 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1869
1870 IEM_MC_ADVANCE_RIP();
1871 IEM_MC_END();
1872 }
1873 return VINF_SUCCESS;
1874}
1875
1876/** Opcode 0x66 0x0f 0x16 - movhpdv1 Vdq, Mq */
1877FNIEMOP_STUB(iemOp_movhpd_Vdq_Mq); //NEXT
1878/** Opcode 0xf3 0x0f 0x16 - movshdup Vx, Wx */
1879FNIEMOP_STUB(iemOp_movshdup_Vx_Wx); //NEXT
1880
1881/**
1882 * @opdone
1883 * @opmnemonic udf30f16
1884 * @opcode 0x16
1885 * @oppfx 0xf2
1886 * @opunused intel-modrm
1887 * @opcpuid sse
1888 * @optest ->
1889 * @opdone
1890 */
1891
1892/** Opcode 0x0f 0x17 - movhpsv1 Mq, Vq */
1893FNIEMOP_STUB(iemOp_movhps_Mq_Vq); //NEXT
1894/** Opcode 0x66 0x0f 0x17 - movhpdv1 Mq, Vq */
1895FNIEMOP_STUB(iemOp_movhpd_Mq_Vq); //NEXT
1896
1897/**
1898 * @opdone
1899 * @opmnemonic udf30f17
1900 * @opcode 0x17
1901 * @oppfx 0xf3
1902 * @opunused intel-modrm
1903 * @opcpuid sse
1904 * @optest ->
1905 * @opdone
1906 */
1907
1908/**
1909 * @opmnemonic udf20f17
1910 * @opcode 0x17
1911 * @oppfx 0xf2
1912 * @opunused intel-modrm
1913 * @opcpuid sse
1914 * @optest ->
1915 * @opdone
1916 */
1917
1918
1919/** Opcode 0x0f 0x18. */
1920FNIEMOP_DEF(iemOp_prefetch_Grp16)
1921{
1922 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1923 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1924 {
1925 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1926 {
1927 case 4: /* Aliased to /0 for the time being according to AMD. */
1928 case 5: /* Aliased to /0 for the time being according to AMD. */
1929 case 6: /* Aliased to /0 for the time being according to AMD. */
1930 case 7: /* Aliased to /0 for the time being according to AMD. */
1931 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1932 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1933 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1934 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1935 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1936 }
1937
1938 IEM_MC_BEGIN(0, 1);
1939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1942 /* Currently a NOP. */
1943 NOREF(GCPtrEffSrc);
1944 IEM_MC_ADVANCE_RIP();
1945 IEM_MC_END();
1946 return VINF_SUCCESS;
1947 }
1948
1949 return IEMOP_RAISE_INVALID_OPCODE();
1950}
1951
1952
1953/** Opcode 0x0f 0x19..0x1f. */
1954FNIEMOP_DEF(iemOp_nop_Ev)
1955{
1956 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1957 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1958 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1959 {
1960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1961 IEM_MC_BEGIN(0, 0);
1962 IEM_MC_ADVANCE_RIP();
1963 IEM_MC_END();
1964 }
1965 else
1966 {
1967 IEM_MC_BEGIN(0, 1);
1968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1971 /* Currently a NOP. */
1972 NOREF(GCPtrEffSrc);
1973 IEM_MC_ADVANCE_RIP();
1974 IEM_MC_END();
1975 }
1976 return VINF_SUCCESS;
1977}
1978
1979
1980/** Opcode 0x0f 0x20. */
1981FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1982{
1983 /* mod is ignored, as is operand size overrides. */
1984 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1985 IEMOP_HLP_MIN_386();
1986 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1987 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1988 else
1989 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1990
1991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1992 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1993 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1994 {
1995 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1996 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1997 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1998 iCrReg |= 8;
1999 }
2000 switch (iCrReg)
2001 {
2002 case 0: case 2: case 3: case 4: case 8:
2003 break;
2004 default:
2005 return IEMOP_RAISE_INVALID_OPCODE();
2006 }
2007 IEMOP_HLP_DONE_DECODING();
2008
2009 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2010}
2011
2012
2013/** Opcode 0x0f 0x21. */
2014FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2015{
2016 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2017 IEMOP_HLP_MIN_386();
2018 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2020 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2021 return IEMOP_RAISE_INVALID_OPCODE();
2022 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2023 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2024 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2025}
2026
2027
2028/** Opcode 0x0f 0x22. */
2029FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2030{
2031 /* mod is ignored, as is operand size overrides. */
2032 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2033 IEMOP_HLP_MIN_386();
2034 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2035 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2036 else
2037 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2038
2039 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2040 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2041 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2042 {
2043 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2044 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2045 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2046 iCrReg |= 8;
2047 }
2048 switch (iCrReg)
2049 {
2050 case 0: case 2: case 3: case 4: case 8:
2051 break;
2052 default:
2053 return IEMOP_RAISE_INVALID_OPCODE();
2054 }
2055 IEMOP_HLP_DONE_DECODING();
2056
2057 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2058}
2059
2060
2061/** Opcode 0x0f 0x23. */
2062FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2063{
2064 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2065 IEMOP_HLP_MIN_386();
2066 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2068 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2069 return IEMOP_RAISE_INVALID_OPCODE();
2070 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2071 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2072 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2073}
2074
2075
2076/** Opcode 0x0f 0x24. */
2077FNIEMOP_DEF(iemOp_mov_Rd_Td)
2078{
2079 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2080 /** @todo works on 386 and 486. */
2081 /* The RM byte is not considered, see testcase. */
2082 return IEMOP_RAISE_INVALID_OPCODE();
2083}
2084
2085
2086/** Opcode 0x0f 0x26. */
2087FNIEMOP_DEF(iemOp_mov_Td_Rd)
2088{
2089 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2090 /** @todo works on 386 and 486. */
2091 /* The RM byte is not considered, see testcase. */
2092 return IEMOP_RAISE_INVALID_OPCODE();
2093}
2094
2095
2096/** Opcode 0x0f 0x28 - movaps Vps, Wps */
2097FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2098{
2099 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
2100 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2101 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2102 {
2103 /*
2104 * Register, register.
2105 */
2106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2107 IEM_MC_BEGIN(0, 0);
2108 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2109 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2110 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2111 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2112 IEM_MC_ADVANCE_RIP();
2113 IEM_MC_END();
2114 }
2115 else
2116 {
2117 /*
2118 * Register, memory.
2119 */
2120 IEM_MC_BEGIN(0, 2);
2121 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2123
2124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2126 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2127 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2128
2129 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2130 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2131
2132 IEM_MC_ADVANCE_RIP();
2133 IEM_MC_END();
2134 }
2135 return VINF_SUCCESS;
2136}
2137
2138/** Opcode 0x66 0x0f 0x28 - movapd Vpd, Wpd */
2139FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2140{
2141 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
2142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2143 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2144 {
2145 /*
2146 * Register, register.
2147 */
2148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2149 IEM_MC_BEGIN(0, 0);
2150 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2151 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2152 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2153 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2154 IEM_MC_ADVANCE_RIP();
2155 IEM_MC_END();
2156 }
2157 else
2158 {
2159 /*
2160 * Register, memory.
2161 */
2162 IEM_MC_BEGIN(0, 2);
2163 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2165
2166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2168 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2169 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2170
2171 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2172 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2173
2174 IEM_MC_ADVANCE_RIP();
2175 IEM_MC_END();
2176 }
2177 return VINF_SUCCESS;
2178}
2179
2180/* Opcode 0xf3 0x0f 0x28 - invalid */
2181/* Opcode 0xf2 0x0f 0x28 - invalid */
2182
2183/** Opcode 0x0f 0x29 - movaps Wps, Vps */
2184FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2185{
2186 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
2187 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2188 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2189 {
2190 /*
2191 * Register, register.
2192 */
2193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2194 IEM_MC_BEGIN(0, 0);
2195 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2196 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2197 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2198 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2199 IEM_MC_ADVANCE_RIP();
2200 IEM_MC_END();
2201 }
2202 else
2203 {
2204 /*
2205 * Memory, register.
2206 */
2207 IEM_MC_BEGIN(0, 2);
2208 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2210
2211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2213 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2214 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2215
2216 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2217 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2218
2219 IEM_MC_ADVANCE_RIP();
2220 IEM_MC_END();
2221 }
2222 return VINF_SUCCESS;
2223}
2224
2225/** Opcode 0x66 0x0f 0x29 - movapd Wpd,Vpd */
2226FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2227{
2228 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
2229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2230 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2231 {
2232 /*
2233 * Register, register.
2234 */
2235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2236 IEM_MC_BEGIN(0, 0);
2237 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2238 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2239 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2240 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2241 IEM_MC_ADVANCE_RIP();
2242 IEM_MC_END();
2243 }
2244 else
2245 {
2246 /*
2247 * Memory, register.
2248 */
2249 IEM_MC_BEGIN(0, 2);
2250 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2252
2253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2255 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2256 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2257
2258 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2259 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2260
2261 IEM_MC_ADVANCE_RIP();
2262 IEM_MC_END();
2263 }
2264 return VINF_SUCCESS;
2265}
2266
2267/* Opcode 0xf3 0x0f 0x29 - invalid */
2268/* Opcode 0xf2 0x0f 0x29 - invalid */
2269
2270
2271/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2272FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2273/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2274FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2275/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2276FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2277/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2278FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2279
2280
2281/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
2282FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2283{
2284 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2285 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2286 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2287 {
2288 /*
2289 * memory, register.
2290 */
2291 IEM_MC_BEGIN(0, 2);
2292 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2294
2295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2297 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2298 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2299
2300 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2301 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2302
2303 IEM_MC_ADVANCE_RIP();
2304 IEM_MC_END();
2305 }
2306 /* The register, register encoding is invalid. */
2307 else
2308 return IEMOP_RAISE_INVALID_OPCODE();
2309 return VINF_SUCCESS;
2310}
2311
2312/** Opcode 0x66 0x0f 0x2b - movntpd Mpd, Vpd */
2313FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2314{
2315 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2316 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2317 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2318 {
2319 /*
2320 * memory, register.
2321 */
2322 IEM_MC_BEGIN(0, 2);
2323 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2325
2326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2328 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2329 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2330
2331 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2332 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2333
2334 IEM_MC_ADVANCE_RIP();
2335 IEM_MC_END();
2336 }
2337 /* The register, register encoding is invalid. */
2338 else
2339 return IEMOP_RAISE_INVALID_OPCODE();
2340 return VINF_SUCCESS;
2341}
2342/* Opcode 0xf3 0x0f 0x2b - invalid */
2343/* Opcode 0xf2 0x0f 0x2b - invalid */
2344
2345
2346/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2347FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2348/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2349FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2350/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2351FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2352/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2353FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2354
2355/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2356FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2357/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2358FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2359/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2360FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2361/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2362FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2363
2364/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2365FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2366/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2367FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2368/* Opcode 0xf3 0x0f 0x2e - invalid */
2369/* Opcode 0xf2 0x0f 0x2e - invalid */
2370
2371/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2372FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2373/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2374FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2375/* Opcode 0xf3 0x0f 0x2f - invalid */
2376/* Opcode 0xf2 0x0f 0x2f - invalid */
2377
2378/** Opcode 0x0f 0x30. */
2379FNIEMOP_DEF(iemOp_wrmsr)
2380{
2381 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2383 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2384}
2385
2386
2387/** Opcode 0x0f 0x31. */
2388FNIEMOP_DEF(iemOp_rdtsc)
2389{
2390 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2392 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2393}
2394
2395
2396/** Opcode 0x0f 0x33. */
2397FNIEMOP_DEF(iemOp_rdmsr)
2398{
2399 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2401 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2402}
2403
2404
2405/** Opcode 0x0f 0x34. */
2406FNIEMOP_DEF(iemOp_rdpmc)
2407{
2408 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2410 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2411}
2412
2413
2414/** Opcode 0x0f 0x34. */
2415FNIEMOP_STUB(iemOp_sysenter);
2416/** Opcode 0x0f 0x35. */
2417FNIEMOP_STUB(iemOp_sysexit);
2418/** Opcode 0x0f 0x37. */
2419FNIEMOP_STUB(iemOp_getsec);
2420
2421
2422/** Opcode 0x0f 0x38. */
2423FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2424{
2425#ifdef IEM_WITH_THREE_0F_38
2426 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2427 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2428#else
2429 IEMOP_BITCH_ABOUT_STUB();
2430 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2431#endif
2432}
2433
2434
2435/** Opcode 0x0f 0x3a. */
2436FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2437{
2438#ifdef IEM_WITH_THREE_0F_3A
2439 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2440 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2441#else
2442 IEMOP_BITCH_ABOUT_STUB();
2443 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2444#endif
2445}
2446
2447
2448/**
2449 * Implements a conditional move.
2450 *
2451 * Wish there was an obvious way to do this where we could share and reduce
2452 * code bloat.
2453 *
2454 * @param a_Cnd The conditional "microcode" operation.
2455 */
2456#define CMOV_X(a_Cnd) \
2457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2458 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2459 { \
2460 switch (pVCpu->iem.s.enmEffOpSize) \
2461 { \
2462 case IEMMODE_16BIT: \
2463 IEM_MC_BEGIN(0, 1); \
2464 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2465 a_Cnd { \
2466 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2467 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2468 } IEM_MC_ENDIF(); \
2469 IEM_MC_ADVANCE_RIP(); \
2470 IEM_MC_END(); \
2471 return VINF_SUCCESS; \
2472 \
2473 case IEMMODE_32BIT: \
2474 IEM_MC_BEGIN(0, 1); \
2475 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2476 a_Cnd { \
2477 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2478 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2479 } IEM_MC_ELSE() { \
2480 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2481 } IEM_MC_ENDIF(); \
2482 IEM_MC_ADVANCE_RIP(); \
2483 IEM_MC_END(); \
2484 return VINF_SUCCESS; \
2485 \
2486 case IEMMODE_64BIT: \
2487 IEM_MC_BEGIN(0, 1); \
2488 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2489 a_Cnd { \
2490 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2491 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2492 } IEM_MC_ENDIF(); \
2493 IEM_MC_ADVANCE_RIP(); \
2494 IEM_MC_END(); \
2495 return VINF_SUCCESS; \
2496 \
2497 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2498 } \
2499 } \
2500 else \
2501 { \
2502 switch (pVCpu->iem.s.enmEffOpSize) \
2503 { \
2504 case IEMMODE_16BIT: \
2505 IEM_MC_BEGIN(0, 2); \
2506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2507 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2509 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2510 a_Cnd { \
2511 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2512 } IEM_MC_ENDIF(); \
2513 IEM_MC_ADVANCE_RIP(); \
2514 IEM_MC_END(); \
2515 return VINF_SUCCESS; \
2516 \
2517 case IEMMODE_32BIT: \
2518 IEM_MC_BEGIN(0, 2); \
2519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2520 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2522 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2523 a_Cnd { \
2524 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2525 } IEM_MC_ELSE() { \
2526 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2527 } IEM_MC_ENDIF(); \
2528 IEM_MC_ADVANCE_RIP(); \
2529 IEM_MC_END(); \
2530 return VINF_SUCCESS; \
2531 \
2532 case IEMMODE_64BIT: \
2533 IEM_MC_BEGIN(0, 2); \
2534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2535 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2537 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2538 a_Cnd { \
2539 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2540 } IEM_MC_ENDIF(); \
2541 IEM_MC_ADVANCE_RIP(); \
2542 IEM_MC_END(); \
2543 return VINF_SUCCESS; \
2544 \
2545 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2546 } \
2547 } do {} while (0)
2548
2549
2550
2551/** Opcode 0x0f 0x40. */
2552FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2553{
2554 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2555 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2556}
2557
2558
2559/** Opcode 0x0f 0x41. */
2560FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2561{
2562 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2563 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2564}
2565
2566
2567/** Opcode 0x0f 0x42. */
2568FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2569{
2570 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2571 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2572}
2573
2574
2575/** Opcode 0x0f 0x43. */
2576FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2577{
2578 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2579 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2580}
2581
2582
2583/** Opcode 0x0f 0x44. */
2584FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2585{
2586 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2587 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2588}
2589
2590
2591/** Opcode 0x0f 0x45. */
2592FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2593{
2594 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2595 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2596}
2597
2598
2599/** Opcode 0x0f 0x46. */
2600FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2601{
2602 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2603 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2604}
2605
2606
2607/** Opcode 0x0f 0x47. */
2608FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2609{
2610 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2611 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2612}
2613
2614
2615/** Opcode 0x0f 0x48. */
2616FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2617{
2618 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2619 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2620}
2621
2622
2623/** Opcode 0x0f 0x49. */
2624FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2625{
2626 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2627 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2628}
2629
2630
2631/** Opcode 0x0f 0x4a. */
2632FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2633{
2634 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2635 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2636}
2637
2638
2639/** Opcode 0x0f 0x4b. */
2640FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2641{
2642 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2643 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2644}
2645
2646
2647/** Opcode 0x0f 0x4c. */
2648FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2649{
2650 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2651 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2652}
2653
2654
2655/** Opcode 0x0f 0x4d. */
2656FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2657{
2658 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2659 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2660}
2661
2662
2663/** Opcode 0x0f 0x4e. */
2664FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2665{
2666 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2667 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2668}
2669
2670
2671/** Opcode 0x0f 0x4f. */
2672FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2673{
2674 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2675 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2676}
2677
2678#undef CMOV_X
2679
2680/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2681FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2682/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2683FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2684/* Opcode 0xf3 0x0f 0x50 - invalid */
2685/* Opcode 0xf2 0x0f 0x50 - invalid */
2686
2687/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2688FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2689/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2690FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2691/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2692FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2693/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2694FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2695
2696/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2697FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2698/* Opcode 0x66 0x0f 0x52 - invalid */
2699/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2700FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2701/* Opcode 0xf2 0x0f 0x52 - invalid */
2702
2703/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2704FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2705/* Opcode 0x66 0x0f 0x53 - invalid */
2706/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2707FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2708/* Opcode 0xf2 0x0f 0x53 - invalid */
2709
2710/** Opcode 0x0f 0x54 - andps Vps, Wps */
2711FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2712/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2713FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2714/* Opcode 0xf3 0x0f 0x54 - invalid */
2715/* Opcode 0xf2 0x0f 0x54 - invalid */
2716
2717/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2718FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2719/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2720FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2721/* Opcode 0xf3 0x0f 0x55 - invalid */
2722/* Opcode 0xf2 0x0f 0x55 - invalid */
2723
2724/** Opcode 0x0f 0x56 - orps Vps, Wps */
2725FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2726/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2727FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2728/* Opcode 0xf3 0x0f 0x56 - invalid */
2729/* Opcode 0xf2 0x0f 0x56 - invalid */
2730
2731/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2732FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2733/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2734FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2735/* Opcode 0xf3 0x0f 0x57 - invalid */
2736/* Opcode 0xf2 0x0f 0x57 - invalid */
2737
2738/** Opcode 0x0f 0x58 - addps Vps, Wps */
2739FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2740/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2741FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2742/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2743FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2744/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2745FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2746
2747/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2748FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2749/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2750FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2751/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2752FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2753/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2754FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2755
2756/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2757FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2758/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2759FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2760/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2761FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2762/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2763FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2764
2765/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2766FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2767/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2768FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2769/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2770FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2771/* Opcode 0xf2 0x0f 0x5b - invalid */
2772
2773/** Opcode 0x0f 0x5c - subps Vps, Wps */
2774FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2775/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2776FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2777/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2778FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2779/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2780FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2781
2782/** Opcode 0x0f 0x5d - minps Vps, Wps */
2783FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2784/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2785FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2786/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2787FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2788/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2789FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2790
2791/** Opcode 0x0f 0x5e - divps Vps, Wps */
2792FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2793/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2794FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2795/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2796FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2797/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2798FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2799
2800/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2801FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2802/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2803FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2804/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2805FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2806/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2807FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2808
2809/**
2810 * Common worker for MMX instructions on the forms:
2811 * pxxxx mm1, mm2/mem32
2812 *
2813 * The 2nd operand is the first half of a register, which in the memory case
2814 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2815 * memory accessed for MMX.
2816 *
2817 * Exceptions type 4.
2818 */
2819FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2820{
2821 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2822 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2823 {
2824 /*
2825 * Register, register.
2826 */
2827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2828 IEM_MC_BEGIN(2, 0);
2829 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2830 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2831 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2832 IEM_MC_PREPARE_SSE_USAGE();
2833 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2834 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2835 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2836 IEM_MC_ADVANCE_RIP();
2837 IEM_MC_END();
2838 }
2839 else
2840 {
2841 /*
2842 * Register, memory.
2843 */
2844 IEM_MC_BEGIN(2, 2);
2845 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2846 IEM_MC_LOCAL(uint64_t, uSrc);
2847 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2849
2850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2852 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2853 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2854
2855 IEM_MC_PREPARE_SSE_USAGE();
2856 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2857 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2858
2859 IEM_MC_ADVANCE_RIP();
2860 IEM_MC_END();
2861 }
2862 return VINF_SUCCESS;
2863}
2864
2865
2866/**
2867 * Common worker for SSE2 instructions on the forms:
2868 * pxxxx xmm1, xmm2/mem128
2869 *
2870 * The 2nd operand is the first half of a register, which in the memory case
2871 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2872 * memory accessed for MMX.
2873 *
2874 * Exceptions type 4.
2875 */
2876FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2877{
2878 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2879 if (!pImpl->pfnU64)
2880 return IEMOP_RAISE_INVALID_OPCODE();
2881 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2882 {
2883 /*
2884 * Register, register.
2885 */
2886 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2887 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2889 IEM_MC_BEGIN(2, 0);
2890 IEM_MC_ARG(uint64_t *, pDst, 0);
2891 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2892 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2893 IEM_MC_PREPARE_FPU_USAGE();
2894 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2895 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2896 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2897 IEM_MC_ADVANCE_RIP();
2898 IEM_MC_END();
2899 }
2900 else
2901 {
2902 /*
2903 * Register, memory.
2904 */
2905 IEM_MC_BEGIN(2, 2);
2906 IEM_MC_ARG(uint64_t *, pDst, 0);
2907 IEM_MC_LOCAL(uint32_t, uSrc);
2908 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2910
2911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2913 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2914 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2915
2916 IEM_MC_PREPARE_FPU_USAGE();
2917 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2918 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2919
2920 IEM_MC_ADVANCE_RIP();
2921 IEM_MC_END();
2922 }
2923 return VINF_SUCCESS;
2924}
2925
2926
2927/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2928FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2929{
2930 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2931 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2932}
2933
2934/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
2935FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
2936{
2937 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
2938 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2939}
2940
2941/* Opcode 0xf3 0x0f 0x60 - invalid */
2942
2943
2944/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2945FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2946{
2947 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2948 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2949}
2950
2951/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
2952FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
2953{
2954 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
2955 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2956}
2957
2958/* Opcode 0xf3 0x0f 0x61 - invalid */
2959
2960
2961/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2962FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2963{
2964 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2965 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2966}
2967
2968/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
2969FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
2970{
2971 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
2972 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2973}
2974
2975/* Opcode 0xf3 0x0f 0x62 - invalid */
2976
2977
2978
2979/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2980FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2981/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
2982FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
2983/* Opcode 0xf3 0x0f 0x63 - invalid */
2984
2985/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2986FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2987/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
2988FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
2989/* Opcode 0xf3 0x0f 0x64 - invalid */
2990
2991/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2992FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2993/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
2994FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
2995/* Opcode 0xf3 0x0f 0x65 - invalid */
2996
2997/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2998FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2999/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3000FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3001/* Opcode 0xf3 0x0f 0x66 - invalid */
3002
3003/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3004FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3005/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3006FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3007/* Opcode 0xf3 0x0f 0x67 - invalid */
3008
3009
3010/**
3011 * Common worker for MMX instructions on the form:
3012 * pxxxx mm1, mm2/mem64
3013 *
3014 * The 2nd operand is the second half of a register, which in the memory case
3015 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3016 * where it may read the full 128 bits or only the upper 64 bits.
3017 *
3018 * Exceptions type 4.
3019 */
3020FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3021{
3022 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3023 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3024 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3025 {
3026 /*
3027 * Register, register.
3028 */
3029 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3030 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3032 IEM_MC_BEGIN(2, 0);
3033 IEM_MC_ARG(uint64_t *, pDst, 0);
3034 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3035 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3036 IEM_MC_PREPARE_FPU_USAGE();
3037 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3038 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3039 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3040 IEM_MC_ADVANCE_RIP();
3041 IEM_MC_END();
3042 }
3043 else
3044 {
3045 /*
3046 * Register, memory.
3047 */
3048 IEM_MC_BEGIN(2, 2);
3049 IEM_MC_ARG(uint64_t *, pDst, 0);
3050 IEM_MC_LOCAL(uint64_t, uSrc);
3051 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3053
3054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3056 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3057 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3058
3059 IEM_MC_PREPARE_FPU_USAGE();
3060 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3061 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3062
3063 IEM_MC_ADVANCE_RIP();
3064 IEM_MC_END();
3065 }
3066 return VINF_SUCCESS;
3067}
3068
3069
3070/**
3071 * Common worker for SSE2 instructions on the form:
3072 * pxxxx xmm1, xmm2/mem128
3073 *
3074 * The 2nd operand is the second half of a register, which in the memory case
3075 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3076 * where it may read the full 128 bits or only the upper 64 bits.
3077 *
3078 * Exceptions type 4.
3079 */
3080FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3081{
3082 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3083 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3084 {
3085 /*
3086 * Register, register.
3087 */
3088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3089 IEM_MC_BEGIN(2, 0);
3090 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3091 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3092 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3093 IEM_MC_PREPARE_SSE_USAGE();
3094 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3095 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3096 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3097 IEM_MC_ADVANCE_RIP();
3098 IEM_MC_END();
3099 }
3100 else
3101 {
3102 /*
3103 * Register, memory.
3104 */
3105 IEM_MC_BEGIN(2, 2);
3106 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3107 IEM_MC_LOCAL(RTUINT128U, uSrc);
3108 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3110
3111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3113 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3114 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3115
3116 IEM_MC_PREPARE_SSE_USAGE();
3117 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3118 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3119
3120 IEM_MC_ADVANCE_RIP();
3121 IEM_MC_END();
3122 }
3123 return VINF_SUCCESS;
3124}
3125
3126
3127/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3128FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3129{
3130 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3131 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3132}
3133
3134/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3135FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3136{
3137 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3138 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3139}
3140/* Opcode 0xf3 0x0f 0x68 - invalid */
3141
3142
3143/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3144FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3145{
3146 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3147 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3148}
3149
3150/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3151FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3152{
3153 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3154 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3155
3156}
3157/* Opcode 0xf3 0x0f 0x69 - invalid */
3158
3159
3160/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3161FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3162{
3163 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3164 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3165}
3166
3167/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3168FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3169{
3170 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3171 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3172}
3173/* Opcode 0xf3 0x0f 0x6a - invalid */
3174
3175
3176/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3177FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3178/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3179FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3180/* Opcode 0xf3 0x0f 0x6b - invalid */
3181
3182
3183/* Opcode 0x0f 0x6c - invalid */
3184
3185/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3186FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3187{
3188 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3189 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3190}
3191
3192/* Opcode 0xf3 0x0f 0x6c - invalid */
3193/* Opcode 0xf2 0x0f 0x6c - invalid */
3194
3195
3196/* Opcode 0x0f 0x6d - invalid */
3197
3198/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3199FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3200{
3201 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3202 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3203}
3204
3205/* Opcode 0xf3 0x0f 0x6d - invalid */
3206
3207
3208/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
3209FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3210{
3211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3212 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3213 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
3214 else
3215 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
3216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3217 {
3218 /* MMX, greg */
3219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3220 IEM_MC_BEGIN(0, 1);
3221 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3222 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3223 IEM_MC_LOCAL(uint64_t, u64Tmp);
3224 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3225 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3226 else
3227 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3228 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3229 IEM_MC_ADVANCE_RIP();
3230 IEM_MC_END();
3231 }
3232 else
3233 {
3234 /* MMX, [mem] */
3235 IEM_MC_BEGIN(0, 2);
3236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3237 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3240 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3241 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3242 {
3243 IEM_MC_LOCAL(uint64_t, u64Tmp);
3244 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3245 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3246 }
3247 else
3248 {
3249 IEM_MC_LOCAL(uint32_t, u32Tmp);
3250 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3251 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3252 }
3253 IEM_MC_ADVANCE_RIP();
3254 IEM_MC_END();
3255 }
3256 return VINF_SUCCESS;
3257}
3258
3259/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
3260FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3261{
3262 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3263 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3264 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
3265 else
3266 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
3267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3268 {
3269 /* XMM, greg*/
3270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3271 IEM_MC_BEGIN(0, 1);
3272 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3273 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3274 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3275 {
3276 IEM_MC_LOCAL(uint64_t, u64Tmp);
3277 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3278 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3279 }
3280 else
3281 {
3282 IEM_MC_LOCAL(uint32_t, u32Tmp);
3283 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3284 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3285 }
3286 IEM_MC_ADVANCE_RIP();
3287 IEM_MC_END();
3288 }
3289 else
3290 {
3291 /* XMM, [mem] */
3292 IEM_MC_BEGIN(0, 2);
3293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3294 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3297 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3298 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3299 {
3300 IEM_MC_LOCAL(uint64_t, u64Tmp);
3301 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3302 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3303 }
3304 else
3305 {
3306 IEM_MC_LOCAL(uint32_t, u32Tmp);
3307 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3308 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3309 }
3310 IEM_MC_ADVANCE_RIP();
3311 IEM_MC_END();
3312 }
3313 return VINF_SUCCESS;
3314}
3315
3316/* Opcode 0xf3 0x0f 0x6e - invalid */
3317
3318
3319/** Opcode 0x0f 0x6f - movq Pq, Qq */
3320FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3321{
3322 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3323 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3324 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3325 {
3326 /*
3327 * Register, register.
3328 */
3329 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3330 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3332 IEM_MC_BEGIN(0, 1);
3333 IEM_MC_LOCAL(uint64_t, u64Tmp);
3334 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3335 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3336 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3337 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3338 IEM_MC_ADVANCE_RIP();
3339 IEM_MC_END();
3340 }
3341 else
3342 {
3343 /*
3344 * Register, memory.
3345 */
3346 IEM_MC_BEGIN(0, 2);
3347 IEM_MC_LOCAL(uint64_t, u64Tmp);
3348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3349
3350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3352 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3353 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3354 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3355 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3356
3357 IEM_MC_ADVANCE_RIP();
3358 IEM_MC_END();
3359 }
3360 return VINF_SUCCESS;
3361}
3362
3363/** Opcode 0x66 0x0f 0x6f - movdqa Vx, Wx */
3364FNIEMOP_DEF(iemOp_movdqa_Vx_Wx)
3365{
3366 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3367 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3368 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3369 {
3370 /*
3371 * Register, register.
3372 */
3373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3374 IEM_MC_BEGIN(0, 0);
3375 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3376 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3377 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3378 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3379 IEM_MC_ADVANCE_RIP();
3380 IEM_MC_END();
3381 }
3382 else
3383 {
3384 /*
3385 * Register, memory.
3386 */
3387 IEM_MC_BEGIN(0, 2);
3388 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3390
3391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3393 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3394 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3395 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3396 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3397
3398 IEM_MC_ADVANCE_RIP();
3399 IEM_MC_END();
3400 }
3401 return VINF_SUCCESS;
3402}
3403
3404/** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */
3405FNIEMOP_DEF(iemOp_movdqu_Vx_Wx)
3406{
3407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3408 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3409 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3410 {
3411 /*
3412 * Register, register.
3413 */
3414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3415 IEM_MC_BEGIN(0, 0);
3416 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3417 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3418 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3419 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3420 IEM_MC_ADVANCE_RIP();
3421 IEM_MC_END();
3422 }
3423 else
3424 {
3425 /*
3426 * Register, memory.
3427 */
3428 IEM_MC_BEGIN(0, 2);
3429 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3431
3432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3434 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3436 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3437 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3438
3439 IEM_MC_ADVANCE_RIP();
3440 IEM_MC_END();
3441 }
3442 return VINF_SUCCESS;
3443}
3444
3445
3446/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3447FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3448{
3449 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3450 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3451 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3452 {
3453 /*
3454 * Register, register.
3455 */
3456 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3458
3459 IEM_MC_BEGIN(3, 0);
3460 IEM_MC_ARG(uint64_t *, pDst, 0);
3461 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3462 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3463 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3464 IEM_MC_PREPARE_FPU_USAGE();
3465 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3466 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3467 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3468 IEM_MC_ADVANCE_RIP();
3469 IEM_MC_END();
3470 }
3471 else
3472 {
3473 /*
3474 * Register, memory.
3475 */
3476 IEM_MC_BEGIN(3, 2);
3477 IEM_MC_ARG(uint64_t *, pDst, 0);
3478 IEM_MC_LOCAL(uint64_t, uSrc);
3479 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3481
3482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3483 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3484 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3486 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3487
3488 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3489 IEM_MC_PREPARE_FPU_USAGE();
3490 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3491 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3492
3493 IEM_MC_ADVANCE_RIP();
3494 IEM_MC_END();
3495 }
3496 return VINF_SUCCESS;
3497}
3498
3499/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3500FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3501{
3502 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3504 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3505 {
3506 /*
3507 * Register, register.
3508 */
3509 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3511
3512 IEM_MC_BEGIN(3, 0);
3513 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3514 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3515 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3516 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3517 IEM_MC_PREPARE_SSE_USAGE();
3518 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3519 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3520 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3521 IEM_MC_ADVANCE_RIP();
3522 IEM_MC_END();
3523 }
3524 else
3525 {
3526 /*
3527 * Register, memory.
3528 */
3529 IEM_MC_BEGIN(3, 2);
3530 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3531 IEM_MC_LOCAL(RTUINT128U, uSrc);
3532 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3534
3535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3536 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3537 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3539 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3540
3541 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3542 IEM_MC_PREPARE_SSE_USAGE();
3543 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3544 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3545
3546 IEM_MC_ADVANCE_RIP();
3547 IEM_MC_END();
3548 }
3549 return VINF_SUCCESS;
3550}
3551
3552/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3553FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3554{
3555 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3556 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3557 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3558 {
3559 /*
3560 * Register, register.
3561 */
3562 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3564
3565 IEM_MC_BEGIN(3, 0);
3566 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3567 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3568 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3569 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3570 IEM_MC_PREPARE_SSE_USAGE();
3571 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3572 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3573 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3574 IEM_MC_ADVANCE_RIP();
3575 IEM_MC_END();
3576 }
3577 else
3578 {
3579 /*
3580 * Register, memory.
3581 */
3582 IEM_MC_BEGIN(3, 2);
3583 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3584 IEM_MC_LOCAL(RTUINT128U, uSrc);
3585 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3587
3588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3589 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3590 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3592 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3593
3594 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3595 IEM_MC_PREPARE_SSE_USAGE();
3596 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3597 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3598
3599 IEM_MC_ADVANCE_RIP();
3600 IEM_MC_END();
3601 }
3602 return VINF_SUCCESS;
3603}
3604
3605/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3606FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3607{
3608 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3609 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3610 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3611 {
3612 /*
3613 * Register, register.
3614 */
3615 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3617
3618 IEM_MC_BEGIN(3, 0);
3619 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3620 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3621 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3622 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3623 IEM_MC_PREPARE_SSE_USAGE();
3624 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3625 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3626 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3627 IEM_MC_ADVANCE_RIP();
3628 IEM_MC_END();
3629 }
3630 else
3631 {
3632 /*
3633 * Register, memory.
3634 */
3635 IEM_MC_BEGIN(3, 2);
3636 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3637 IEM_MC_LOCAL(RTUINT128U, uSrc);
3638 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3640
3641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3642 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3643 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3645 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3646
3647 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3648 IEM_MC_PREPARE_SSE_USAGE();
3649 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3650 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3651
3652 IEM_MC_ADVANCE_RIP();
3653 IEM_MC_END();
3654 }
3655 return VINF_SUCCESS;
3656}
3657
3658
3659/** Opcode 0x0f 0x71 11/2. */
3660FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3661
3662/** Opcode 0x66 0x0f 0x71 11/2. */
3663FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3664
3665/** Opcode 0x0f 0x71 11/4. */
3666FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3667
3668/** Opcode 0x66 0x0f 0x71 11/4. */
3669FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3670
3671/** Opcode 0x0f 0x71 11/6. */
3672FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3673
3674/** Opcode 0x66 0x0f 0x71 11/6. */
3675FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3676
3677
3678/**
3679 * Group 12 jump table for register variant.
3680 */
3681IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3682{
3683 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3684 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3685 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3686 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3687 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3688 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3689 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3690 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3691};
3692AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3693
3694
3695/** Opcode 0x0f 0x71. */
3696FNIEMOP_DEF(iemOp_Grp12)
3697{
3698 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3699 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3700 /* register, register */
3701 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3702 + pVCpu->iem.s.idxPrefix], bRm);
3703 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3704}
3705
3706
3707/** Opcode 0x0f 0x72 11/2. */
3708FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3709
3710/** Opcode 0x66 0x0f 0x72 11/2. */
3711FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
3712
3713/** Opcode 0x0f 0x72 11/4. */
3714FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3715
3716/** Opcode 0x66 0x0f 0x72 11/4. */
3717FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
3718
3719/** Opcode 0x0f 0x72 11/6. */
3720FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3721
3722/** Opcode 0x66 0x0f 0x72 11/6. */
3723FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
3724
3725
3726/**
3727 * Group 13 jump table for register variant.
3728 */
3729IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3730{
3731 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3732 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3733 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3734 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3735 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3736 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3737 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3738 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3739};
3740AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3741
3742/** Opcode 0x0f 0x72. */
3743FNIEMOP_DEF(iemOp_Grp13)
3744{
3745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3746 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3747 /* register, register */
3748 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3749 + pVCpu->iem.s.idxPrefix], bRm);
3750 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3751}
3752
3753
3754/** Opcode 0x0f 0x73 11/2. */
3755FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3756
3757/** Opcode 0x66 0x0f 0x73 11/2. */
3758FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
3759
3760/** Opcode 0x66 0x0f 0x73 11/3. */
3761FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
3762
3763/** Opcode 0x0f 0x73 11/6. */
3764FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3765
3766/** Opcode 0x66 0x0f 0x73 11/6. */
3767FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
3768
3769/** Opcode 0x66 0x0f 0x73 11/7. */
3770FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
3771
3772/**
3773 * Group 14 jump table for register variant.
3774 */
3775IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
3776{
3777 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3778 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3779 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3780 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3781 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3782 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3783 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3784 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3785};
3786AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
3787
3788
3789/** Opcode 0x0f 0x73. */
3790FNIEMOP_DEF(iemOp_Grp14)
3791{
3792 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3793 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3794 /* register, register */
3795 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3796 + pVCpu->iem.s.idxPrefix], bRm);
3797 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3798}
3799
3800
3801/**
3802 * Common worker for MMX instructions on the form:
3803 * pxxx mm1, mm2/mem64
3804 */
3805FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3806{
3807 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3808 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3809 {
3810 /*
3811 * Register, register.
3812 */
3813 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3814 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3816 IEM_MC_BEGIN(2, 0);
3817 IEM_MC_ARG(uint64_t *, pDst, 0);
3818 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3819 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3820 IEM_MC_PREPARE_FPU_USAGE();
3821 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3822 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3823 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3824 IEM_MC_ADVANCE_RIP();
3825 IEM_MC_END();
3826 }
3827 else
3828 {
3829 /*
3830 * Register, memory.
3831 */
3832 IEM_MC_BEGIN(2, 2);
3833 IEM_MC_ARG(uint64_t *, pDst, 0);
3834 IEM_MC_LOCAL(uint64_t, uSrc);
3835 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3837
3838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3840 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3841 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3842
3843 IEM_MC_PREPARE_FPU_USAGE();
3844 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3845 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3846
3847 IEM_MC_ADVANCE_RIP();
3848 IEM_MC_END();
3849 }
3850 return VINF_SUCCESS;
3851}
3852
3853
3854/**
3855 * Common worker for SSE2 instructions on the forms:
3856 * pxxx xmm1, xmm2/mem128
3857 *
3858 * Proper alignment of the 128-bit operand is enforced.
3859 * Exceptions type 4. SSE2 cpuid checks.
3860 */
3861FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3862{
3863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3864 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3865 {
3866 /*
3867 * Register, register.
3868 */
3869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3870 IEM_MC_BEGIN(2, 0);
3871 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3872 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3873 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3874 IEM_MC_PREPARE_SSE_USAGE();
3875 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3876 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3877 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3878 IEM_MC_ADVANCE_RIP();
3879 IEM_MC_END();
3880 }
3881 else
3882 {
3883 /*
3884 * Register, memory.
3885 */
3886 IEM_MC_BEGIN(2, 2);
3887 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3888 IEM_MC_LOCAL(RTUINT128U, uSrc);
3889 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3891
3892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3894 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3895 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3896
3897 IEM_MC_PREPARE_SSE_USAGE();
3898 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3899 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3900
3901 IEM_MC_ADVANCE_RIP();
3902 IEM_MC_END();
3903 }
3904 return VINF_SUCCESS;
3905}
3906
3907
3908/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3909FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3910{
3911 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3912 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3913}
3914
3915/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
3916FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
3917{
3918 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
3919 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3920}
3921
3922/* Opcode 0xf3 0x0f 0x74 - invalid */
3923/* Opcode 0xf2 0x0f 0x74 - invalid */
3924
3925
3926/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3927FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3928{
3929 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3930 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3931}
3932
3933/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
3934FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
3935{
3936 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
3937 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3938}
3939
3940/* Opcode 0xf3 0x0f 0x75 - invalid */
3941/* Opcode 0xf2 0x0f 0x75 - invalid */
3942
3943
3944/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3945FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3946{
3947 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3948 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3949}
3950
3951/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
3952FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
3953{
3954 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
3955 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3956}
3957
3958/* Opcode 0xf3 0x0f 0x76 - invalid */
3959/* Opcode 0xf2 0x0f 0x76 - invalid */
3960
3961
3962/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
3963FNIEMOP_STUB(iemOp_emms);
3964/* Opcode 0x66 0x0f 0x77 - invalid */
3965/* Opcode 0xf3 0x0f 0x77 - invalid */
3966/* Opcode 0xf2 0x0f 0x77 - invalid */
3967
3968/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3969FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3970/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3971FNIEMOP_STUB(iemOp_AmdGrp17);
3972/* Opcode 0xf3 0x0f 0x78 - invalid */
3973/* Opcode 0xf2 0x0f 0x78 - invalid */
3974
3975/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3976FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3977/* Opcode 0x66 0x0f 0x79 - invalid */
3978/* Opcode 0xf3 0x0f 0x79 - invalid */
3979/* Opcode 0xf2 0x0f 0x79 - invalid */
3980
3981/* Opcode 0x0f 0x7a - invalid */
3982/* Opcode 0x66 0x0f 0x7a - invalid */
3983/* Opcode 0xf3 0x0f 0x7a - invalid */
3984/* Opcode 0xf2 0x0f 0x7a - invalid */
3985
3986/* Opcode 0x0f 0x7b - invalid */
3987/* Opcode 0x66 0x0f 0x7b - invalid */
3988/* Opcode 0xf3 0x0f 0x7b - invalid */
3989/* Opcode 0xf2 0x0f 0x7b - invalid */
3990
3991/* Opcode 0x0f 0x7c - invalid */
3992/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
3993FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
3994/* Opcode 0xf3 0x0f 0x7c - invalid */
3995/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
3996FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
3997
3998/* Opcode 0x0f 0x7d - invalid */
3999/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4000FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4001/* Opcode 0xf3 0x0f 0x7d - invalid */
4002/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4003FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4004
4005
4006/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4007FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4008{
4009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4010 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4011 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
4012 else
4013 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
4014 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4015 {
4016 /* greg, MMX */
4017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4018 IEM_MC_BEGIN(0, 1);
4019 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4020 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4021 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4022 {
4023 IEM_MC_LOCAL(uint64_t, u64Tmp);
4024 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4025 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4026 }
4027 else
4028 {
4029 IEM_MC_LOCAL(uint32_t, u32Tmp);
4030 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4031 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4032 }
4033 IEM_MC_ADVANCE_RIP();
4034 IEM_MC_END();
4035 }
4036 else
4037 {
4038 /* [mem], MMX */
4039 IEM_MC_BEGIN(0, 2);
4040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4041 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4044 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4045 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4046 {
4047 IEM_MC_LOCAL(uint64_t, u64Tmp);
4048 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4049 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4050 }
4051 else
4052 {
4053 IEM_MC_LOCAL(uint32_t, u32Tmp);
4054 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4055 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4056 }
4057 IEM_MC_ADVANCE_RIP();
4058 IEM_MC_END();
4059 }
4060 return VINF_SUCCESS;
4061}
4062
4063/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
4064FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4065{
4066 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4067 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4068 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
4069 else
4070 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
4071 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4072 {
4073 /* greg, XMM */
4074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4075 IEM_MC_BEGIN(0, 1);
4076 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4077 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4078 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4079 {
4080 IEM_MC_LOCAL(uint64_t, u64Tmp);
4081 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4082 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4083 }
4084 else
4085 {
4086 IEM_MC_LOCAL(uint32_t, u32Tmp);
4087 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4088 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4089 }
4090 IEM_MC_ADVANCE_RIP();
4091 IEM_MC_END();
4092 }
4093 else
4094 {
4095 /* [mem], XMM */
4096 IEM_MC_BEGIN(0, 2);
4097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4098 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4099 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4101 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4102 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4103 {
4104 IEM_MC_LOCAL(uint64_t, u64Tmp);
4105 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4106 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4107 }
4108 else
4109 {
4110 IEM_MC_LOCAL(uint32_t, u32Tmp);
4111 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4112 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4113 }
4114 IEM_MC_ADVANCE_RIP();
4115 IEM_MC_END();
4116 }
4117 return VINF_SUCCESS;
4118}
4119
4120/** Opcode 0xf3 0x0f 0x7e - movq Vq, Wq */
4121FNIEMOP_STUB(iemOp_movq_Vq_Wq);
4122/* Opcode 0xf2 0x0f 0x7e - invalid */
4123
4124
4125/** Opcode 0x0f 0x7f - movq Qq, Pq */
4126FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4127{
4128 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4130 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4131 {
4132 /*
4133 * Register, register.
4134 */
4135 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4136 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4138 IEM_MC_BEGIN(0, 1);
4139 IEM_MC_LOCAL(uint64_t, u64Tmp);
4140 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4141 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4142 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4143 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4144 IEM_MC_ADVANCE_RIP();
4145 IEM_MC_END();
4146 }
4147 else
4148 {
4149 /*
4150 * Register, memory.
4151 */
4152 IEM_MC_BEGIN(0, 2);
4153 IEM_MC_LOCAL(uint64_t, u64Tmp);
4154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4155
4156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4158 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4159 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4160
4161 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4162 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4163
4164 IEM_MC_ADVANCE_RIP();
4165 IEM_MC_END();
4166 }
4167 return VINF_SUCCESS;
4168}
4169
4170/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4171FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4172{
4173 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4174 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4175 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4176 {
4177 /*
4178 * Register, register.
4179 */
4180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4181 IEM_MC_BEGIN(0, 0);
4182 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4183 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4184 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4185 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4186 IEM_MC_ADVANCE_RIP();
4187 IEM_MC_END();
4188 }
4189 else
4190 {
4191 /*
4192 * Register, memory.
4193 */
4194 IEM_MC_BEGIN(0, 2);
4195 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4197
4198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4200 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4201 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4202
4203 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4204 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4205
4206 IEM_MC_ADVANCE_RIP();
4207 IEM_MC_END();
4208 }
4209 return VINF_SUCCESS;
4210}
4211
4212/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4213FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4214{
4215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4216 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4218 {
4219 /*
4220 * Register, register.
4221 */
4222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4223 IEM_MC_BEGIN(0, 0);
4224 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4225 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4226 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4227 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4228 IEM_MC_ADVANCE_RIP();
4229 IEM_MC_END();
4230 }
4231 else
4232 {
4233 /*
4234 * Register, memory.
4235 */
4236 IEM_MC_BEGIN(0, 2);
4237 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4239
4240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4242 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4243 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4244
4245 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4246 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4247
4248 IEM_MC_ADVANCE_RIP();
4249 IEM_MC_END();
4250 }
4251 return VINF_SUCCESS;
4252}
4253
4254/* Opcode 0xf2 0x0f 0x7f - invalid */
4255
4256
4257
4258/** Opcode 0x0f 0x80. */
4259FNIEMOP_DEF(iemOp_jo_Jv)
4260{
4261 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4262 IEMOP_HLP_MIN_386();
4263 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4264 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4265 {
4266 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4268
4269 IEM_MC_BEGIN(0, 0);
4270 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4271 IEM_MC_REL_JMP_S16(i16Imm);
4272 } IEM_MC_ELSE() {
4273 IEM_MC_ADVANCE_RIP();
4274 } IEM_MC_ENDIF();
4275 IEM_MC_END();
4276 }
4277 else
4278 {
4279 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4281
4282 IEM_MC_BEGIN(0, 0);
4283 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4284 IEM_MC_REL_JMP_S32(i32Imm);
4285 } IEM_MC_ELSE() {
4286 IEM_MC_ADVANCE_RIP();
4287 } IEM_MC_ENDIF();
4288 IEM_MC_END();
4289 }
4290 return VINF_SUCCESS;
4291}
4292
4293
4294/** Opcode 0x0f 0x81. */
4295FNIEMOP_DEF(iemOp_jno_Jv)
4296{
4297 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4298 IEMOP_HLP_MIN_386();
4299 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4300 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4301 {
4302 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4304
4305 IEM_MC_BEGIN(0, 0);
4306 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4307 IEM_MC_ADVANCE_RIP();
4308 } IEM_MC_ELSE() {
4309 IEM_MC_REL_JMP_S16(i16Imm);
4310 } IEM_MC_ENDIF();
4311 IEM_MC_END();
4312 }
4313 else
4314 {
4315 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4317
4318 IEM_MC_BEGIN(0, 0);
4319 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4320 IEM_MC_ADVANCE_RIP();
4321 } IEM_MC_ELSE() {
4322 IEM_MC_REL_JMP_S32(i32Imm);
4323 } IEM_MC_ENDIF();
4324 IEM_MC_END();
4325 }
4326 return VINF_SUCCESS;
4327}
4328
4329
4330/** Opcode 0x0f 0x82. */
4331FNIEMOP_DEF(iemOp_jc_Jv)
4332{
4333 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4334 IEMOP_HLP_MIN_386();
4335 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4336 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4337 {
4338 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4340
4341 IEM_MC_BEGIN(0, 0);
4342 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4343 IEM_MC_REL_JMP_S16(i16Imm);
4344 } IEM_MC_ELSE() {
4345 IEM_MC_ADVANCE_RIP();
4346 } IEM_MC_ENDIF();
4347 IEM_MC_END();
4348 }
4349 else
4350 {
4351 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4353
4354 IEM_MC_BEGIN(0, 0);
4355 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4356 IEM_MC_REL_JMP_S32(i32Imm);
4357 } IEM_MC_ELSE() {
4358 IEM_MC_ADVANCE_RIP();
4359 } IEM_MC_ENDIF();
4360 IEM_MC_END();
4361 }
4362 return VINF_SUCCESS;
4363}
4364
4365
4366/** Opcode 0x0f 0x83. */
4367FNIEMOP_DEF(iemOp_jnc_Jv)
4368{
4369 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4370 IEMOP_HLP_MIN_386();
4371 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4372 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4373 {
4374 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4376
4377 IEM_MC_BEGIN(0, 0);
4378 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4379 IEM_MC_ADVANCE_RIP();
4380 } IEM_MC_ELSE() {
4381 IEM_MC_REL_JMP_S16(i16Imm);
4382 } IEM_MC_ENDIF();
4383 IEM_MC_END();
4384 }
4385 else
4386 {
4387 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4389
4390 IEM_MC_BEGIN(0, 0);
4391 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4392 IEM_MC_ADVANCE_RIP();
4393 } IEM_MC_ELSE() {
4394 IEM_MC_REL_JMP_S32(i32Imm);
4395 } IEM_MC_ENDIF();
4396 IEM_MC_END();
4397 }
4398 return VINF_SUCCESS;
4399}
4400
4401
4402/** Opcode 0x0f 0x84. */
4403FNIEMOP_DEF(iemOp_je_Jv)
4404{
4405 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4406 IEMOP_HLP_MIN_386();
4407 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4408 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4409 {
4410 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4412
4413 IEM_MC_BEGIN(0, 0);
4414 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4415 IEM_MC_REL_JMP_S16(i16Imm);
4416 } IEM_MC_ELSE() {
4417 IEM_MC_ADVANCE_RIP();
4418 } IEM_MC_ENDIF();
4419 IEM_MC_END();
4420 }
4421 else
4422 {
4423 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4425
4426 IEM_MC_BEGIN(0, 0);
4427 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4428 IEM_MC_REL_JMP_S32(i32Imm);
4429 } IEM_MC_ELSE() {
4430 IEM_MC_ADVANCE_RIP();
4431 } IEM_MC_ENDIF();
4432 IEM_MC_END();
4433 }
4434 return VINF_SUCCESS;
4435}
4436
4437
4438/** Opcode 0x0f 0x85. */
4439FNIEMOP_DEF(iemOp_jne_Jv)
4440{
4441 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4442 IEMOP_HLP_MIN_386();
4443 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4444 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4445 {
4446 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4448
4449 IEM_MC_BEGIN(0, 0);
4450 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4451 IEM_MC_ADVANCE_RIP();
4452 } IEM_MC_ELSE() {
4453 IEM_MC_REL_JMP_S16(i16Imm);
4454 } IEM_MC_ENDIF();
4455 IEM_MC_END();
4456 }
4457 else
4458 {
4459 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4461
4462 IEM_MC_BEGIN(0, 0);
4463 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4464 IEM_MC_ADVANCE_RIP();
4465 } IEM_MC_ELSE() {
4466 IEM_MC_REL_JMP_S32(i32Imm);
4467 } IEM_MC_ENDIF();
4468 IEM_MC_END();
4469 }
4470 return VINF_SUCCESS;
4471}
4472
4473
4474/** Opcode 0x0f 0x86. */
4475FNIEMOP_DEF(iemOp_jbe_Jv)
4476{
4477 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4478 IEMOP_HLP_MIN_386();
4479 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4480 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4481 {
4482 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4484
4485 IEM_MC_BEGIN(0, 0);
4486 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4487 IEM_MC_REL_JMP_S16(i16Imm);
4488 } IEM_MC_ELSE() {
4489 IEM_MC_ADVANCE_RIP();
4490 } IEM_MC_ENDIF();
4491 IEM_MC_END();
4492 }
4493 else
4494 {
4495 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4497
4498 IEM_MC_BEGIN(0, 0);
4499 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4500 IEM_MC_REL_JMP_S32(i32Imm);
4501 } IEM_MC_ELSE() {
4502 IEM_MC_ADVANCE_RIP();
4503 } IEM_MC_ENDIF();
4504 IEM_MC_END();
4505 }
4506 return VINF_SUCCESS;
4507}
4508
4509
4510/** Opcode 0x0f 0x87. */
4511FNIEMOP_DEF(iemOp_jnbe_Jv)
4512{
4513 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4514 IEMOP_HLP_MIN_386();
4515 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4516 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4517 {
4518 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4520
4521 IEM_MC_BEGIN(0, 0);
4522 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4523 IEM_MC_ADVANCE_RIP();
4524 } IEM_MC_ELSE() {
4525 IEM_MC_REL_JMP_S16(i16Imm);
4526 } IEM_MC_ENDIF();
4527 IEM_MC_END();
4528 }
4529 else
4530 {
4531 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4533
4534 IEM_MC_BEGIN(0, 0);
4535 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4536 IEM_MC_ADVANCE_RIP();
4537 } IEM_MC_ELSE() {
4538 IEM_MC_REL_JMP_S32(i32Imm);
4539 } IEM_MC_ENDIF();
4540 IEM_MC_END();
4541 }
4542 return VINF_SUCCESS;
4543}
4544
4545
4546/** Opcode 0x0f 0x88. */
4547FNIEMOP_DEF(iemOp_js_Jv)
4548{
4549 IEMOP_MNEMONIC(js_Jv, "js Jv");
4550 IEMOP_HLP_MIN_386();
4551 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4552 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4553 {
4554 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4556
4557 IEM_MC_BEGIN(0, 0);
4558 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4559 IEM_MC_REL_JMP_S16(i16Imm);
4560 } IEM_MC_ELSE() {
4561 IEM_MC_ADVANCE_RIP();
4562 } IEM_MC_ENDIF();
4563 IEM_MC_END();
4564 }
4565 else
4566 {
4567 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4569
4570 IEM_MC_BEGIN(0, 0);
4571 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4572 IEM_MC_REL_JMP_S32(i32Imm);
4573 } IEM_MC_ELSE() {
4574 IEM_MC_ADVANCE_RIP();
4575 } IEM_MC_ENDIF();
4576 IEM_MC_END();
4577 }
4578 return VINF_SUCCESS;
4579}
4580
4581
4582/** Opcode 0x0f 0x89. */
4583FNIEMOP_DEF(iemOp_jns_Jv)
4584{
4585 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4586 IEMOP_HLP_MIN_386();
4587 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4588 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4589 {
4590 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4592
4593 IEM_MC_BEGIN(0, 0);
4594 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4595 IEM_MC_ADVANCE_RIP();
4596 } IEM_MC_ELSE() {
4597 IEM_MC_REL_JMP_S16(i16Imm);
4598 } IEM_MC_ENDIF();
4599 IEM_MC_END();
4600 }
4601 else
4602 {
4603 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4605
4606 IEM_MC_BEGIN(0, 0);
4607 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4608 IEM_MC_ADVANCE_RIP();
4609 } IEM_MC_ELSE() {
4610 IEM_MC_REL_JMP_S32(i32Imm);
4611 } IEM_MC_ENDIF();
4612 IEM_MC_END();
4613 }
4614 return VINF_SUCCESS;
4615}
4616
4617
4618/** Opcode 0x0f 0x8a. */
4619FNIEMOP_DEF(iemOp_jp_Jv)
4620{
4621 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4622 IEMOP_HLP_MIN_386();
4623 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4624 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4625 {
4626 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4628
4629 IEM_MC_BEGIN(0, 0);
4630 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4631 IEM_MC_REL_JMP_S16(i16Imm);
4632 } IEM_MC_ELSE() {
4633 IEM_MC_ADVANCE_RIP();
4634 } IEM_MC_ENDIF();
4635 IEM_MC_END();
4636 }
4637 else
4638 {
4639 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4641
4642 IEM_MC_BEGIN(0, 0);
4643 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4644 IEM_MC_REL_JMP_S32(i32Imm);
4645 } IEM_MC_ELSE() {
4646 IEM_MC_ADVANCE_RIP();
4647 } IEM_MC_ENDIF();
4648 IEM_MC_END();
4649 }
4650 return VINF_SUCCESS;
4651}
4652
4653
4654/** Opcode 0x0f 0x8b. */
4655FNIEMOP_DEF(iemOp_jnp_Jv)
4656{
4657 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4658 IEMOP_HLP_MIN_386();
4659 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4660 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4661 {
4662 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4664
4665 IEM_MC_BEGIN(0, 0);
4666 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4667 IEM_MC_ADVANCE_RIP();
4668 } IEM_MC_ELSE() {
4669 IEM_MC_REL_JMP_S16(i16Imm);
4670 } IEM_MC_ENDIF();
4671 IEM_MC_END();
4672 }
4673 else
4674 {
4675 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4677
4678 IEM_MC_BEGIN(0, 0);
4679 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4680 IEM_MC_ADVANCE_RIP();
4681 } IEM_MC_ELSE() {
4682 IEM_MC_REL_JMP_S32(i32Imm);
4683 } IEM_MC_ENDIF();
4684 IEM_MC_END();
4685 }
4686 return VINF_SUCCESS;
4687}
4688
4689
4690/** Opcode 0x0f 0x8c. */
4691FNIEMOP_DEF(iemOp_jl_Jv)
4692{
4693 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4694 IEMOP_HLP_MIN_386();
4695 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4696 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4697 {
4698 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4700
4701 IEM_MC_BEGIN(0, 0);
4702 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4703 IEM_MC_REL_JMP_S16(i16Imm);
4704 } IEM_MC_ELSE() {
4705 IEM_MC_ADVANCE_RIP();
4706 } IEM_MC_ENDIF();
4707 IEM_MC_END();
4708 }
4709 else
4710 {
4711 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4713
4714 IEM_MC_BEGIN(0, 0);
4715 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4716 IEM_MC_REL_JMP_S32(i32Imm);
4717 } IEM_MC_ELSE() {
4718 IEM_MC_ADVANCE_RIP();
4719 } IEM_MC_ENDIF();
4720 IEM_MC_END();
4721 }
4722 return VINF_SUCCESS;
4723}
4724
4725
4726/** Opcode 0x0f 0x8d. */
4727FNIEMOP_DEF(iemOp_jnl_Jv)
4728{
4729 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4730 IEMOP_HLP_MIN_386();
4731 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4732 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4733 {
4734 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4736
4737 IEM_MC_BEGIN(0, 0);
4738 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4739 IEM_MC_ADVANCE_RIP();
4740 } IEM_MC_ELSE() {
4741 IEM_MC_REL_JMP_S16(i16Imm);
4742 } IEM_MC_ENDIF();
4743 IEM_MC_END();
4744 }
4745 else
4746 {
4747 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4749
4750 IEM_MC_BEGIN(0, 0);
4751 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4752 IEM_MC_ADVANCE_RIP();
4753 } IEM_MC_ELSE() {
4754 IEM_MC_REL_JMP_S32(i32Imm);
4755 } IEM_MC_ENDIF();
4756 IEM_MC_END();
4757 }
4758 return VINF_SUCCESS;
4759}
4760
4761
4762/** Opcode 0x0f 0x8e. */
4763FNIEMOP_DEF(iemOp_jle_Jv)
4764{
4765 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4766 IEMOP_HLP_MIN_386();
4767 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4768 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4769 {
4770 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4772
4773 IEM_MC_BEGIN(0, 0);
4774 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4775 IEM_MC_REL_JMP_S16(i16Imm);
4776 } IEM_MC_ELSE() {
4777 IEM_MC_ADVANCE_RIP();
4778 } IEM_MC_ENDIF();
4779 IEM_MC_END();
4780 }
4781 else
4782 {
4783 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4785
4786 IEM_MC_BEGIN(0, 0);
4787 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4788 IEM_MC_REL_JMP_S32(i32Imm);
4789 } IEM_MC_ELSE() {
4790 IEM_MC_ADVANCE_RIP();
4791 } IEM_MC_ENDIF();
4792 IEM_MC_END();
4793 }
4794 return VINF_SUCCESS;
4795}
4796
4797
4798/** Opcode 0x0f 0x8f. */
4799FNIEMOP_DEF(iemOp_jnle_Jv)
4800{
4801 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4802 IEMOP_HLP_MIN_386();
4803 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4804 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4805 {
4806 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4808
4809 IEM_MC_BEGIN(0, 0);
4810 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4811 IEM_MC_ADVANCE_RIP();
4812 } IEM_MC_ELSE() {
4813 IEM_MC_REL_JMP_S16(i16Imm);
4814 } IEM_MC_ENDIF();
4815 IEM_MC_END();
4816 }
4817 else
4818 {
4819 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4821
4822 IEM_MC_BEGIN(0, 0);
4823 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4824 IEM_MC_ADVANCE_RIP();
4825 } IEM_MC_ELSE() {
4826 IEM_MC_REL_JMP_S32(i32Imm);
4827 } IEM_MC_ENDIF();
4828 IEM_MC_END();
4829 }
4830 return VINF_SUCCESS;
4831}
4832
4833
4834/** Opcode 0x0f 0x90. */
4835FNIEMOP_DEF(iemOp_seto_Eb)
4836{
4837 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4838 IEMOP_HLP_MIN_386();
4839 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4840
4841 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4842 * any way. AMD says it's "unused", whatever that means. We're
4843 * ignoring for now. */
4844 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4845 {
4846 /* register target */
4847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4848 IEM_MC_BEGIN(0, 0);
4849 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4850 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4851 } IEM_MC_ELSE() {
4852 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4853 } IEM_MC_ENDIF();
4854 IEM_MC_ADVANCE_RIP();
4855 IEM_MC_END();
4856 }
4857 else
4858 {
4859 /* memory target */
4860 IEM_MC_BEGIN(0, 1);
4861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4864 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4865 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4866 } IEM_MC_ELSE() {
4867 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4868 } IEM_MC_ENDIF();
4869 IEM_MC_ADVANCE_RIP();
4870 IEM_MC_END();
4871 }
4872 return VINF_SUCCESS;
4873}
4874
4875
4876/** Opcode 0x0f 0x91. */
4877FNIEMOP_DEF(iemOp_setno_Eb)
4878{
4879 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4880 IEMOP_HLP_MIN_386();
4881 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4882
4883 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4884 * any way. AMD says it's "unused", whatever that means. We're
4885 * ignoring for now. */
4886 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4887 {
4888 /* register target */
4889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4890 IEM_MC_BEGIN(0, 0);
4891 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4892 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4893 } IEM_MC_ELSE() {
4894 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4895 } IEM_MC_ENDIF();
4896 IEM_MC_ADVANCE_RIP();
4897 IEM_MC_END();
4898 }
4899 else
4900 {
4901 /* memory target */
4902 IEM_MC_BEGIN(0, 1);
4903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4906 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4907 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4908 } IEM_MC_ELSE() {
4909 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4910 } IEM_MC_ENDIF();
4911 IEM_MC_ADVANCE_RIP();
4912 IEM_MC_END();
4913 }
4914 return VINF_SUCCESS;
4915}
4916
4917
4918/** Opcode 0x0f 0x92. */
4919FNIEMOP_DEF(iemOp_setc_Eb)
4920{
4921 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4922 IEMOP_HLP_MIN_386();
4923 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4924
4925 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4926 * any way. AMD says it's "unused", whatever that means. We're
4927 * ignoring for now. */
4928 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4929 {
4930 /* register target */
4931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4932 IEM_MC_BEGIN(0, 0);
4933 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4934 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4935 } IEM_MC_ELSE() {
4936 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4937 } IEM_MC_ENDIF();
4938 IEM_MC_ADVANCE_RIP();
4939 IEM_MC_END();
4940 }
4941 else
4942 {
4943 /* memory target */
4944 IEM_MC_BEGIN(0, 1);
4945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4948 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4949 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4950 } IEM_MC_ELSE() {
4951 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4952 } IEM_MC_ENDIF();
4953 IEM_MC_ADVANCE_RIP();
4954 IEM_MC_END();
4955 }
4956 return VINF_SUCCESS;
4957}
4958
4959
4960/** Opcode 0x0f 0x93. */
4961FNIEMOP_DEF(iemOp_setnc_Eb)
4962{
4963 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4964 IEMOP_HLP_MIN_386();
4965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4966
4967 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4968 * any way. AMD says it's "unused", whatever that means. We're
4969 * ignoring for now. */
4970 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4971 {
4972 /* register target */
4973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4974 IEM_MC_BEGIN(0, 0);
4975 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4976 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4977 } IEM_MC_ELSE() {
4978 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4979 } IEM_MC_ENDIF();
4980 IEM_MC_ADVANCE_RIP();
4981 IEM_MC_END();
4982 }
4983 else
4984 {
4985 /* memory target */
4986 IEM_MC_BEGIN(0, 1);
4987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4990 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4991 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4992 } IEM_MC_ELSE() {
4993 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4994 } IEM_MC_ENDIF();
4995 IEM_MC_ADVANCE_RIP();
4996 IEM_MC_END();
4997 }
4998 return VINF_SUCCESS;
4999}
5000
5001
5002/** Opcode 0x0f 0x94. */
5003FNIEMOP_DEF(iemOp_sete_Eb)
5004{
5005 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5006 IEMOP_HLP_MIN_386();
5007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5008
5009 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5010 * any way. AMD says it's "unused", whatever that means. We're
5011 * ignoring for now. */
5012 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5013 {
5014 /* register target */
5015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5016 IEM_MC_BEGIN(0, 0);
5017 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5018 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5019 } IEM_MC_ELSE() {
5020 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5021 } IEM_MC_ENDIF();
5022 IEM_MC_ADVANCE_RIP();
5023 IEM_MC_END();
5024 }
5025 else
5026 {
5027 /* memory target */
5028 IEM_MC_BEGIN(0, 1);
5029 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5032 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5033 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5034 } IEM_MC_ELSE() {
5035 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5036 } IEM_MC_ENDIF();
5037 IEM_MC_ADVANCE_RIP();
5038 IEM_MC_END();
5039 }
5040 return VINF_SUCCESS;
5041}
5042
5043
5044/** Opcode 0x0f 0x95. */
5045FNIEMOP_DEF(iemOp_setne_Eb)
5046{
5047 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5048 IEMOP_HLP_MIN_386();
5049 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5050
5051 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5052 * any way. AMD says it's "unused", whatever that means. We're
5053 * ignoring for now. */
5054 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5055 {
5056 /* register target */
5057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5058 IEM_MC_BEGIN(0, 0);
5059 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5060 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5061 } IEM_MC_ELSE() {
5062 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5063 } IEM_MC_ENDIF();
5064 IEM_MC_ADVANCE_RIP();
5065 IEM_MC_END();
5066 }
5067 else
5068 {
5069 /* memory target */
5070 IEM_MC_BEGIN(0, 1);
5071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5074 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5075 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5076 } IEM_MC_ELSE() {
5077 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5078 } IEM_MC_ENDIF();
5079 IEM_MC_ADVANCE_RIP();
5080 IEM_MC_END();
5081 }
5082 return VINF_SUCCESS;
5083}
5084
5085
5086/** Opcode 0x0f 0x96. */
5087FNIEMOP_DEF(iemOp_setbe_Eb)
5088{
5089 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5090 IEMOP_HLP_MIN_386();
5091 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5092
5093 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5094 * any way. AMD says it's "unused", whatever that means. We're
5095 * ignoring for now. */
5096 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5097 {
5098 /* register target */
5099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5100 IEM_MC_BEGIN(0, 0);
5101 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5102 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5103 } IEM_MC_ELSE() {
5104 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5105 } IEM_MC_ENDIF();
5106 IEM_MC_ADVANCE_RIP();
5107 IEM_MC_END();
5108 }
5109 else
5110 {
5111 /* memory target */
5112 IEM_MC_BEGIN(0, 1);
5113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5116 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5117 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5118 } IEM_MC_ELSE() {
5119 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5120 } IEM_MC_ENDIF();
5121 IEM_MC_ADVANCE_RIP();
5122 IEM_MC_END();
5123 }
5124 return VINF_SUCCESS;
5125}
5126
5127
5128/** Opcode 0x0f 0x97. */
5129FNIEMOP_DEF(iemOp_setnbe_Eb)
5130{
5131 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5132 IEMOP_HLP_MIN_386();
5133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5134
5135 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5136 * any way. AMD says it's "unused", whatever that means. We're
5137 * ignoring for now. */
5138 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5139 {
5140 /* register target */
5141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5142 IEM_MC_BEGIN(0, 0);
5143 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5144 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5145 } IEM_MC_ELSE() {
5146 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5147 } IEM_MC_ENDIF();
5148 IEM_MC_ADVANCE_RIP();
5149 IEM_MC_END();
5150 }
5151 else
5152 {
5153 /* memory target */
5154 IEM_MC_BEGIN(0, 1);
5155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5158 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5159 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5160 } IEM_MC_ELSE() {
5161 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5162 } IEM_MC_ENDIF();
5163 IEM_MC_ADVANCE_RIP();
5164 IEM_MC_END();
5165 }
5166 return VINF_SUCCESS;
5167}
5168
5169
5170/** Opcode 0x0f 0x98. */
5171FNIEMOP_DEF(iemOp_sets_Eb)
5172{
5173 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5174 IEMOP_HLP_MIN_386();
5175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5176
5177 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5178 * any way. AMD says it's "unused", whatever that means. We're
5179 * ignoring for now. */
5180 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5181 {
5182 /* register target */
5183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5184 IEM_MC_BEGIN(0, 0);
5185 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5186 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5187 } IEM_MC_ELSE() {
5188 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5189 } IEM_MC_ENDIF();
5190 IEM_MC_ADVANCE_RIP();
5191 IEM_MC_END();
5192 }
5193 else
5194 {
5195 /* memory target */
5196 IEM_MC_BEGIN(0, 1);
5197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5200 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5201 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5202 } IEM_MC_ELSE() {
5203 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5204 } IEM_MC_ENDIF();
5205 IEM_MC_ADVANCE_RIP();
5206 IEM_MC_END();
5207 }
5208 return VINF_SUCCESS;
5209}
5210
5211
5212/** Opcode 0x0f 0x99. */
5213FNIEMOP_DEF(iemOp_setns_Eb)
5214{
5215 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5216 IEMOP_HLP_MIN_386();
5217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5218
5219 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5220 * any way. AMD says it's "unused", whatever that means. We're
5221 * ignoring for now. */
5222 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5223 {
5224 /* register target */
5225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5226 IEM_MC_BEGIN(0, 0);
5227 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5228 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5229 } IEM_MC_ELSE() {
5230 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5231 } IEM_MC_ENDIF();
5232 IEM_MC_ADVANCE_RIP();
5233 IEM_MC_END();
5234 }
5235 else
5236 {
5237 /* memory target */
5238 IEM_MC_BEGIN(0, 1);
5239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5242 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5243 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5244 } IEM_MC_ELSE() {
5245 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5246 } IEM_MC_ENDIF();
5247 IEM_MC_ADVANCE_RIP();
5248 IEM_MC_END();
5249 }
5250 return VINF_SUCCESS;
5251}
5252
5253
5254/** Opcode 0x0f 0x9a. */
5255FNIEMOP_DEF(iemOp_setp_Eb)
5256{
5257 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5258 IEMOP_HLP_MIN_386();
5259 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5260
5261 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5262 * any way. AMD says it's "unused", whatever that means. We're
5263 * ignoring for now. */
5264 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5265 {
5266 /* register target */
5267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5268 IEM_MC_BEGIN(0, 0);
5269 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5270 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5271 } IEM_MC_ELSE() {
5272 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5273 } IEM_MC_ENDIF();
5274 IEM_MC_ADVANCE_RIP();
5275 IEM_MC_END();
5276 }
5277 else
5278 {
5279 /* memory target */
5280 IEM_MC_BEGIN(0, 1);
5281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5284 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5285 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5286 } IEM_MC_ELSE() {
5287 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5288 } IEM_MC_ENDIF();
5289 IEM_MC_ADVANCE_RIP();
5290 IEM_MC_END();
5291 }
5292 return VINF_SUCCESS;
5293}
5294
5295
5296/** Opcode 0x0f 0x9b. */
5297FNIEMOP_DEF(iemOp_setnp_Eb)
5298{
5299 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5300 IEMOP_HLP_MIN_386();
5301 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5302
5303 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5304 * any way. AMD says it's "unused", whatever that means. We're
5305 * ignoring for now. */
5306 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5307 {
5308 /* register target */
5309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5310 IEM_MC_BEGIN(0, 0);
5311 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5312 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5313 } IEM_MC_ELSE() {
5314 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5315 } IEM_MC_ENDIF();
5316 IEM_MC_ADVANCE_RIP();
5317 IEM_MC_END();
5318 }
5319 else
5320 {
5321 /* memory target */
5322 IEM_MC_BEGIN(0, 1);
5323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5326 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5327 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5328 } IEM_MC_ELSE() {
5329 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5330 } IEM_MC_ENDIF();
5331 IEM_MC_ADVANCE_RIP();
5332 IEM_MC_END();
5333 }
5334 return VINF_SUCCESS;
5335}
5336
5337
5338/** Opcode 0x0f 0x9c. */
5339FNIEMOP_DEF(iemOp_setl_Eb)
5340{
5341 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5342 IEMOP_HLP_MIN_386();
5343 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5344
5345 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5346 * any way. AMD says it's "unused", whatever that means. We're
5347 * ignoring for now. */
5348 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5349 {
5350 /* register target */
5351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5352 IEM_MC_BEGIN(0, 0);
5353 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5354 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5355 } IEM_MC_ELSE() {
5356 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5357 } IEM_MC_ENDIF();
5358 IEM_MC_ADVANCE_RIP();
5359 IEM_MC_END();
5360 }
5361 else
5362 {
5363 /* memory target */
5364 IEM_MC_BEGIN(0, 1);
5365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5368 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5369 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5370 } IEM_MC_ELSE() {
5371 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5372 } IEM_MC_ENDIF();
5373 IEM_MC_ADVANCE_RIP();
5374 IEM_MC_END();
5375 }
5376 return VINF_SUCCESS;
5377}
5378
5379
5380/** Opcode 0x0f 0x9d. */
5381FNIEMOP_DEF(iemOp_setnl_Eb)
5382{
5383 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5384 IEMOP_HLP_MIN_386();
5385 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5386
5387 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5388 * any way. AMD says it's "unused", whatever that means. We're
5389 * ignoring for now. */
5390 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5391 {
5392 /* register target */
5393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5394 IEM_MC_BEGIN(0, 0);
5395 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5396 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5397 } IEM_MC_ELSE() {
5398 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5399 } IEM_MC_ENDIF();
5400 IEM_MC_ADVANCE_RIP();
5401 IEM_MC_END();
5402 }
5403 else
5404 {
5405 /* memory target */
5406 IEM_MC_BEGIN(0, 1);
5407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5410 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5411 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5412 } IEM_MC_ELSE() {
5413 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5414 } IEM_MC_ENDIF();
5415 IEM_MC_ADVANCE_RIP();
5416 IEM_MC_END();
5417 }
5418 return VINF_SUCCESS;
5419}
5420
5421
5422/** Opcode 0x0f 0x9e. */
5423FNIEMOP_DEF(iemOp_setle_Eb)
5424{
5425 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5426 IEMOP_HLP_MIN_386();
5427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5428
5429 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5430 * any way. AMD says it's "unused", whatever that means. We're
5431 * ignoring for now. */
5432 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5433 {
5434 /* register target */
5435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5436 IEM_MC_BEGIN(0, 0);
5437 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5438 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5439 } IEM_MC_ELSE() {
5440 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5441 } IEM_MC_ENDIF();
5442 IEM_MC_ADVANCE_RIP();
5443 IEM_MC_END();
5444 }
5445 else
5446 {
5447 /* memory target */
5448 IEM_MC_BEGIN(0, 1);
5449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5452 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5453 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5454 } IEM_MC_ELSE() {
5455 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5456 } IEM_MC_ENDIF();
5457 IEM_MC_ADVANCE_RIP();
5458 IEM_MC_END();
5459 }
5460 return VINF_SUCCESS;
5461}
5462
5463
5464/** Opcode 0x0f 0x9f. */
5465FNIEMOP_DEF(iemOp_setnle_Eb)
5466{
5467 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5468 IEMOP_HLP_MIN_386();
5469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5470
5471 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5472 * any way. AMD says it's "unused", whatever that means. We're
5473 * ignoring for now. */
5474 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5475 {
5476 /* register target */
5477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5478 IEM_MC_BEGIN(0, 0);
5479 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5480 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5481 } IEM_MC_ELSE() {
5482 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5483 } IEM_MC_ENDIF();
5484 IEM_MC_ADVANCE_RIP();
5485 IEM_MC_END();
5486 }
5487 else
5488 {
5489 /* memory target */
5490 IEM_MC_BEGIN(0, 1);
5491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5494 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5495 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5496 } IEM_MC_ELSE() {
5497 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5498 } IEM_MC_ENDIF();
5499 IEM_MC_ADVANCE_RIP();
5500 IEM_MC_END();
5501 }
5502 return VINF_SUCCESS;
5503}
5504
5505
5506/**
5507 * Common 'push segment-register' helper.
5508 */
5509FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5510{
5511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5512 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5513 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5514
5515 switch (pVCpu->iem.s.enmEffOpSize)
5516 {
5517 case IEMMODE_16BIT:
5518 IEM_MC_BEGIN(0, 1);
5519 IEM_MC_LOCAL(uint16_t, u16Value);
5520 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5521 IEM_MC_PUSH_U16(u16Value);
5522 IEM_MC_ADVANCE_RIP();
5523 IEM_MC_END();
5524 break;
5525
5526 case IEMMODE_32BIT:
5527 IEM_MC_BEGIN(0, 1);
5528 IEM_MC_LOCAL(uint32_t, u32Value);
5529 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5530 IEM_MC_PUSH_U32_SREG(u32Value);
5531 IEM_MC_ADVANCE_RIP();
5532 IEM_MC_END();
5533 break;
5534
5535 case IEMMODE_64BIT:
5536 IEM_MC_BEGIN(0, 1);
5537 IEM_MC_LOCAL(uint64_t, u64Value);
5538 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5539 IEM_MC_PUSH_U64(u64Value);
5540 IEM_MC_ADVANCE_RIP();
5541 IEM_MC_END();
5542 break;
5543 }
5544
5545 return VINF_SUCCESS;
5546}
5547
5548
5549/** Opcode 0x0f 0xa0. */
5550FNIEMOP_DEF(iemOp_push_fs)
5551{
5552 IEMOP_MNEMONIC(push_fs, "push fs");
5553 IEMOP_HLP_MIN_386();
5554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5555 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5556}
5557
5558
5559/** Opcode 0x0f 0xa1. */
5560FNIEMOP_DEF(iemOp_pop_fs)
5561{
5562 IEMOP_MNEMONIC(pop_fs, "pop fs");
5563 IEMOP_HLP_MIN_386();
5564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5565 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5566}
5567
5568
5569/** Opcode 0x0f 0xa2. */
5570FNIEMOP_DEF(iemOp_cpuid)
5571{
5572 IEMOP_MNEMONIC(cpuid, "cpuid");
5573 IEMOP_HLP_MIN_486(); /* not all 486es. */
5574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5575 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5576}
5577
5578
5579/**
5580 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5581 * iemOp_bts_Ev_Gv.
5582 */
5583FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5584{
5585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5586 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5587
5588 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5589 {
5590 /* register destination. */
5591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5592 switch (pVCpu->iem.s.enmEffOpSize)
5593 {
5594 case IEMMODE_16BIT:
5595 IEM_MC_BEGIN(3, 0);
5596 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5597 IEM_MC_ARG(uint16_t, u16Src, 1);
5598 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5599
5600 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5601 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5602 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5603 IEM_MC_REF_EFLAGS(pEFlags);
5604 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5605
5606 IEM_MC_ADVANCE_RIP();
5607 IEM_MC_END();
5608 return VINF_SUCCESS;
5609
5610 case IEMMODE_32BIT:
5611 IEM_MC_BEGIN(3, 0);
5612 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5613 IEM_MC_ARG(uint32_t, u32Src, 1);
5614 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5615
5616 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5617 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5618 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5619 IEM_MC_REF_EFLAGS(pEFlags);
5620 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5621
5622 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5623 IEM_MC_ADVANCE_RIP();
5624 IEM_MC_END();
5625 return VINF_SUCCESS;
5626
5627 case IEMMODE_64BIT:
5628 IEM_MC_BEGIN(3, 0);
5629 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5630 IEM_MC_ARG(uint64_t, u64Src, 1);
5631 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5632
5633 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5634 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5635 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5636 IEM_MC_REF_EFLAGS(pEFlags);
5637 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5638
5639 IEM_MC_ADVANCE_RIP();
5640 IEM_MC_END();
5641 return VINF_SUCCESS;
5642
5643 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5644 }
5645 }
5646 else
5647 {
5648 /* memory destination. */
5649
5650 uint32_t fAccess;
5651 if (pImpl->pfnLockedU16)
5652 fAccess = IEM_ACCESS_DATA_RW;
5653 else /* BT */
5654 fAccess = IEM_ACCESS_DATA_R;
5655
5656 /** @todo test negative bit offsets! */
5657 switch (pVCpu->iem.s.enmEffOpSize)
5658 {
5659 case IEMMODE_16BIT:
5660 IEM_MC_BEGIN(3, 2);
5661 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5662 IEM_MC_ARG(uint16_t, u16Src, 1);
5663 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5665 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5666
5667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5668 if (pImpl->pfnLockedU16)
5669 IEMOP_HLP_DONE_DECODING();
5670 else
5671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5672 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5673 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5674 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5675 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5676 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5677 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5678 IEM_MC_FETCH_EFLAGS(EFlags);
5679
5680 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5681 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5682 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5683 else
5684 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5685 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5686
5687 IEM_MC_COMMIT_EFLAGS(EFlags);
5688 IEM_MC_ADVANCE_RIP();
5689 IEM_MC_END();
5690 return VINF_SUCCESS;
5691
5692 case IEMMODE_32BIT:
5693 IEM_MC_BEGIN(3, 2);
5694 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5695 IEM_MC_ARG(uint32_t, u32Src, 1);
5696 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5698 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5699
5700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5701 if (pImpl->pfnLockedU16)
5702 IEMOP_HLP_DONE_DECODING();
5703 else
5704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5705 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5706 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5707 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5708 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5709 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5710 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5711 IEM_MC_FETCH_EFLAGS(EFlags);
5712
5713 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5714 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5715 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5716 else
5717 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5718 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5719
5720 IEM_MC_COMMIT_EFLAGS(EFlags);
5721 IEM_MC_ADVANCE_RIP();
5722 IEM_MC_END();
5723 return VINF_SUCCESS;
5724
5725 case IEMMODE_64BIT:
5726 IEM_MC_BEGIN(3, 2);
5727 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5728 IEM_MC_ARG(uint64_t, u64Src, 1);
5729 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5731 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5732
5733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5734 if (pImpl->pfnLockedU16)
5735 IEMOP_HLP_DONE_DECODING();
5736 else
5737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5738 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5739 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5740 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5741 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5742 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5743 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5744 IEM_MC_FETCH_EFLAGS(EFlags);
5745
5746 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5747 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5748 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5749 else
5750 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5751 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5752
5753 IEM_MC_COMMIT_EFLAGS(EFlags);
5754 IEM_MC_ADVANCE_RIP();
5755 IEM_MC_END();
5756 return VINF_SUCCESS;
5757
5758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5759 }
5760 }
5761}
5762
5763
5764/** Opcode 0x0f 0xa3. */
5765FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5766{
5767 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5768 IEMOP_HLP_MIN_386();
5769 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5770}
5771
5772
5773/**
5774 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5775 */
5776FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5777{
5778 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5779 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5780
5781 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5782 {
5783 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5785
5786 switch (pVCpu->iem.s.enmEffOpSize)
5787 {
5788 case IEMMODE_16BIT:
5789 IEM_MC_BEGIN(4, 0);
5790 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5791 IEM_MC_ARG(uint16_t, u16Src, 1);
5792 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5793 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5794
5795 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5796 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5797 IEM_MC_REF_EFLAGS(pEFlags);
5798 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5799
5800 IEM_MC_ADVANCE_RIP();
5801 IEM_MC_END();
5802 return VINF_SUCCESS;
5803
5804 case IEMMODE_32BIT:
5805 IEM_MC_BEGIN(4, 0);
5806 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5807 IEM_MC_ARG(uint32_t, u32Src, 1);
5808 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5809 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5810
5811 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5812 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5813 IEM_MC_REF_EFLAGS(pEFlags);
5814 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5815
5816 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5817 IEM_MC_ADVANCE_RIP();
5818 IEM_MC_END();
5819 return VINF_SUCCESS;
5820
5821 case IEMMODE_64BIT:
5822 IEM_MC_BEGIN(4, 0);
5823 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5824 IEM_MC_ARG(uint64_t, u64Src, 1);
5825 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5826 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5827
5828 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5829 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5830 IEM_MC_REF_EFLAGS(pEFlags);
5831 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5832
5833 IEM_MC_ADVANCE_RIP();
5834 IEM_MC_END();
5835 return VINF_SUCCESS;
5836
5837 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5838 }
5839 }
5840 else
5841 {
5842 switch (pVCpu->iem.s.enmEffOpSize)
5843 {
5844 case IEMMODE_16BIT:
5845 IEM_MC_BEGIN(4, 2);
5846 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5847 IEM_MC_ARG(uint16_t, u16Src, 1);
5848 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5849 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5851
5852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5853 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5854 IEM_MC_ASSIGN(cShiftArg, cShift);
5855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5856 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5857 IEM_MC_FETCH_EFLAGS(EFlags);
5858 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5859 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5860
5861 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5862 IEM_MC_COMMIT_EFLAGS(EFlags);
5863 IEM_MC_ADVANCE_RIP();
5864 IEM_MC_END();
5865 return VINF_SUCCESS;
5866
5867 case IEMMODE_32BIT:
5868 IEM_MC_BEGIN(4, 2);
5869 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5870 IEM_MC_ARG(uint32_t, u32Src, 1);
5871 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5872 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5874
5875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5876 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5877 IEM_MC_ASSIGN(cShiftArg, cShift);
5878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5879 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5880 IEM_MC_FETCH_EFLAGS(EFlags);
5881 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5882 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5883
5884 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5885 IEM_MC_COMMIT_EFLAGS(EFlags);
5886 IEM_MC_ADVANCE_RIP();
5887 IEM_MC_END();
5888 return VINF_SUCCESS;
5889
5890 case IEMMODE_64BIT:
5891 IEM_MC_BEGIN(4, 2);
5892 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5893 IEM_MC_ARG(uint64_t, u64Src, 1);
5894 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5895 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5897
5898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5899 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5900 IEM_MC_ASSIGN(cShiftArg, cShift);
5901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5902 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5903 IEM_MC_FETCH_EFLAGS(EFlags);
5904 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5905 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5906
5907 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5908 IEM_MC_COMMIT_EFLAGS(EFlags);
5909 IEM_MC_ADVANCE_RIP();
5910 IEM_MC_END();
5911 return VINF_SUCCESS;
5912
5913 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5914 }
5915 }
5916}
5917
5918
5919/**
5920 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5921 */
5922FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5923{
5924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5925 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5926
5927 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5928 {
5929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5930
5931 switch (pVCpu->iem.s.enmEffOpSize)
5932 {
5933 case IEMMODE_16BIT:
5934 IEM_MC_BEGIN(4, 0);
5935 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5936 IEM_MC_ARG(uint16_t, u16Src, 1);
5937 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5938 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5939
5940 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5941 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5942 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5943 IEM_MC_REF_EFLAGS(pEFlags);
5944 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5945
5946 IEM_MC_ADVANCE_RIP();
5947 IEM_MC_END();
5948 return VINF_SUCCESS;
5949
5950 case IEMMODE_32BIT:
5951 IEM_MC_BEGIN(4, 0);
5952 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5953 IEM_MC_ARG(uint32_t, u32Src, 1);
5954 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5955 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5956
5957 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5958 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5959 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5960 IEM_MC_REF_EFLAGS(pEFlags);
5961 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5962
5963 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5964 IEM_MC_ADVANCE_RIP();
5965 IEM_MC_END();
5966 return VINF_SUCCESS;
5967
5968 case IEMMODE_64BIT:
5969 IEM_MC_BEGIN(4, 0);
5970 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5971 IEM_MC_ARG(uint64_t, u64Src, 1);
5972 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5973 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5974
5975 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5976 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5977 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5978 IEM_MC_REF_EFLAGS(pEFlags);
5979 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5980
5981 IEM_MC_ADVANCE_RIP();
5982 IEM_MC_END();
5983 return VINF_SUCCESS;
5984
5985 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5986 }
5987 }
5988 else
5989 {
5990 switch (pVCpu->iem.s.enmEffOpSize)
5991 {
5992 case IEMMODE_16BIT:
5993 IEM_MC_BEGIN(4, 2);
5994 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5995 IEM_MC_ARG(uint16_t, u16Src, 1);
5996 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5997 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5999
6000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6002 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6003 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6004 IEM_MC_FETCH_EFLAGS(EFlags);
6005 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6006 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6007
6008 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6009 IEM_MC_COMMIT_EFLAGS(EFlags);
6010 IEM_MC_ADVANCE_RIP();
6011 IEM_MC_END();
6012 return VINF_SUCCESS;
6013
6014 case IEMMODE_32BIT:
6015 IEM_MC_BEGIN(4, 2);
6016 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6017 IEM_MC_ARG(uint32_t, u32Src, 1);
6018 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6019 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6021
6022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6024 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6025 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6026 IEM_MC_FETCH_EFLAGS(EFlags);
6027 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6028 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6029
6030 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6031 IEM_MC_COMMIT_EFLAGS(EFlags);
6032 IEM_MC_ADVANCE_RIP();
6033 IEM_MC_END();
6034 return VINF_SUCCESS;
6035
6036 case IEMMODE_64BIT:
6037 IEM_MC_BEGIN(4, 2);
6038 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6039 IEM_MC_ARG(uint64_t, u64Src, 1);
6040 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6041 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6043
6044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6046 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6047 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6048 IEM_MC_FETCH_EFLAGS(EFlags);
6049 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6050 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6051
6052 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6053 IEM_MC_COMMIT_EFLAGS(EFlags);
6054 IEM_MC_ADVANCE_RIP();
6055 IEM_MC_END();
6056 return VINF_SUCCESS;
6057
6058 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6059 }
6060 }
6061}
6062
6063
6064
6065/** Opcode 0x0f 0xa4. */
6066FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6067{
6068 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6069 IEMOP_HLP_MIN_386();
6070 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6071}
6072
6073
6074/** Opcode 0x0f 0xa5. */
6075FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6076{
6077 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6078 IEMOP_HLP_MIN_386();
6079 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6080}
6081
6082
6083/** Opcode 0x0f 0xa8. */
6084FNIEMOP_DEF(iemOp_push_gs)
6085{
6086 IEMOP_MNEMONIC(push_gs, "push gs");
6087 IEMOP_HLP_MIN_386();
6088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6089 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6090}
6091
6092
6093/** Opcode 0x0f 0xa9. */
6094FNIEMOP_DEF(iemOp_pop_gs)
6095{
6096 IEMOP_MNEMONIC(pop_gs, "pop gs");
6097 IEMOP_HLP_MIN_386();
6098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6099 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6100}
6101
6102
6103/** Opcode 0x0f 0xaa. */
6104FNIEMOP_DEF(iemOp_rsm)
6105{
6106 IEMOP_MNEMONIC(rsm, "rsm");
6107 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
6108 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
6109 * intercept). */
6110 IEMOP_BITCH_ABOUT_STUB();
6111 return IEMOP_RAISE_INVALID_OPCODE();
6112}
6113
6114//IEMOP_HLP_MIN_386();
6115
6116
6117/** Opcode 0x0f 0xab. */
6118FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6119{
6120 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6121 IEMOP_HLP_MIN_386();
6122 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6123}
6124
6125
6126/** Opcode 0x0f 0xac. */
6127FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6128{
6129 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6130 IEMOP_HLP_MIN_386();
6131 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6132}
6133
6134
6135/** Opcode 0x0f 0xad. */
6136FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6137{
6138 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6139 IEMOP_HLP_MIN_386();
6140 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6141}
6142
6143
6144/** Opcode 0x0f 0xae mem/0. */
6145FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6146{
6147 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6148 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6149 return IEMOP_RAISE_INVALID_OPCODE();
6150
6151 IEM_MC_BEGIN(3, 1);
6152 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6153 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6154 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6157 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6158 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6159 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6160 IEM_MC_END();
6161 return VINF_SUCCESS;
6162}
6163
6164
6165/** Opcode 0x0f 0xae mem/1. */
6166FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6167{
6168 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6169 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6170 return IEMOP_RAISE_INVALID_OPCODE();
6171
6172 IEM_MC_BEGIN(3, 1);
6173 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6174 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6175 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6178 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6179 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6180 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6181 IEM_MC_END();
6182 return VINF_SUCCESS;
6183}
6184
6185
6186/**
6187 * @opmaps grp15
6188 * @opcode !11/2
6189 * @oppfx none
6190 * @opcpuid sse
6191 * @opgroup og_sse_mxcsrsm
6192 * @opxcpttype 5
6193 * @optest op1=0 -> mxcsr=0
6194 * @optest op1=0x2083 -> mxcsr=0x2083
6195 * @optest op1=0xfffffffe -> value.xcpt=0xd
6196 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6197 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6198 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6199 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6200 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6201 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6202 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6203 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6204 */
6205FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6206{
6207 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6208 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6209 return IEMOP_RAISE_INVALID_OPCODE();
6210
6211 IEM_MC_BEGIN(2, 0);
6212 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6213 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6216 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6217 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6218 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6219 IEM_MC_END();
6220 return VINF_SUCCESS;
6221}
6222
6223
6224/**
6225 * @opmaps grp15
6226 * @opcode !11/3
6227 * @oppfx none
6228 * @opcpuid sse
6229 * @opgroup og_sse_mxcsrsm
6230 * @opxcpttype 5
6231 * @optest mxcsr=0 -> op1=0
6232 * @optest mxcsr=0x2083 -> op1=0x2083
6233 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6234 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6235 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6236 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6237 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6238 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6239 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6240 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6241 */
6242FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6243{
6244 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, MdWO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6245 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6246 return IEMOP_RAISE_INVALID_OPCODE();
6247
6248 IEM_MC_BEGIN(2, 0);
6249 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6250 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6253 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6254 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6255 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6256 IEM_MC_END();
6257 return VINF_SUCCESS;
6258}
6259
6260
6261/**
6262 * @opmaps grp15
6263 * @opcode !11/4
6264 * @oppfx none
6265 * @opcpuid xsave
6266 * @opgroup og_system
6267 * @opxcpttype none
6268 */
6269FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6270{
6271 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, MRW, DISOPTYPE_HARMLESS, 0);
6272 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6273 return IEMOP_RAISE_INVALID_OPCODE();
6274
6275 IEM_MC_BEGIN(3, 0);
6276 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6277 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6278 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6281 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6282 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6283 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6284 IEM_MC_END();
6285 return VINF_SUCCESS;
6286}
6287
6288
6289/**
6290 * @opmaps grp15
6291 * @opcode !11/5
6292 * @oppfx none
6293 * @opcpuid xsave
6294 * @opgroup og_system
6295 * @opxcpttype none
6296 */
6297FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6298{
6299 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, MRO, DISOPTYPE_HARMLESS, 0);
6300 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6301 return IEMOP_RAISE_INVALID_OPCODE();
6302
6303 IEM_MC_BEGIN(3, 0);
6304 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6305 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6306 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6309 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6310 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6311 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6312 IEM_MC_END();
6313 return VINF_SUCCESS;
6314}
6315
6316/** Opcode 0x0f 0xae mem/6. */
6317FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6318
6319/**
6320 * @opmaps grp15
6321 * @opcode !11/7
6322 * @oppfx none
6323 * @opcpuid clfsh
6324 * @opgroup og_cachectl
6325 * @optest op1=1 ->
6326 */
6327FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6328{
6329 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6330 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6331 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6332
6333 IEM_MC_BEGIN(2, 0);
6334 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6335 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6338 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6339 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6340 IEM_MC_END();
6341 return VINF_SUCCESS;
6342}
6343
6344/**
6345 * @opmaps grp15
6346 * @opcode !11/7
6347 * @oppfx 0x66
6348 * @opcpuid clflushopt
6349 * @opgroup og_cachectl
6350 * @optest op1=1 ->
6351 */
6352FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6353{
6354 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6355 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6356 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6357
6358 IEM_MC_BEGIN(2, 0);
6359 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6360 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6363 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6364 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6365 IEM_MC_END();
6366 return VINF_SUCCESS;
6367}
6368
6369
6370/** Opcode 0x0f 0xae 11b/5. */
6371FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6372{
6373 RT_NOREF_PV(bRm);
6374 IEMOP_MNEMONIC(lfence, "lfence");
6375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6376 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6377 return IEMOP_RAISE_INVALID_OPCODE();
6378
6379 IEM_MC_BEGIN(0, 0);
6380 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6381 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6382 else
6383 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6384 IEM_MC_ADVANCE_RIP();
6385 IEM_MC_END();
6386 return VINF_SUCCESS;
6387}
6388
6389
6390/** Opcode 0x0f 0xae 11b/6. */
6391FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6392{
6393 RT_NOREF_PV(bRm);
6394 IEMOP_MNEMONIC(mfence, "mfence");
6395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6396 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6397 return IEMOP_RAISE_INVALID_OPCODE();
6398
6399 IEM_MC_BEGIN(0, 0);
6400 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6401 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6402 else
6403 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6404 IEM_MC_ADVANCE_RIP();
6405 IEM_MC_END();
6406 return VINF_SUCCESS;
6407}
6408
6409
6410/** Opcode 0x0f 0xae 11b/7. */
6411FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6412{
6413 RT_NOREF_PV(bRm);
6414 IEMOP_MNEMONIC(sfence, "sfence");
6415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6416 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6417 return IEMOP_RAISE_INVALID_OPCODE();
6418
6419 IEM_MC_BEGIN(0, 0);
6420 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6421 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6422 else
6423 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6424 IEM_MC_ADVANCE_RIP();
6425 IEM_MC_END();
6426 return VINF_SUCCESS;
6427}
6428
6429
6430/** Opcode 0xf3 0x0f 0xae 11b/0. */
6431FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6432
6433/** Opcode 0xf3 0x0f 0xae 11b/1. */
6434FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6435
6436/** Opcode 0xf3 0x0f 0xae 11b/2. */
6437FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6438
6439/** Opcode 0xf3 0x0f 0xae 11b/3. */
6440FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6441
6442
6443/**
6444 * Group 15 jump table for register variant.
6445 */
6446IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6447{ /* pfx: none, 066h, 0f3h, 0f2h */
6448 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6449 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6450 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6451 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6452 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6453 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6454 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6455 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6456};
6457AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6458
6459
6460/**
6461 * Group 15 jump table for memory variant.
6462 */
6463IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6464{ /* pfx: none, 066h, 0f3h, 0f2h */
6465 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6466 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6467 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6468 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6469 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6470 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6471 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6472 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6473};
6474AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6475
6476
6477/** Opcode 0x0f 0xae. */
6478FNIEMOP_DEF(iemOp_Grp15)
6479{
6480 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6482 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6483 /* register, register */
6484 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6485 + pVCpu->iem.s.idxPrefix], bRm);
6486 /* memory, register */
6487 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6488 + pVCpu->iem.s.idxPrefix], bRm);
6489}
6490
6491
6492/** Opcode 0x0f 0xaf. */
6493FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6494{
6495 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6496 IEMOP_HLP_MIN_386();
6497 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6498 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6499}
6500
6501
6502/** Opcode 0x0f 0xb0. */
6503FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6504{
6505 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6506 IEMOP_HLP_MIN_486();
6507 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6508
6509 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6510 {
6511 IEMOP_HLP_DONE_DECODING();
6512 IEM_MC_BEGIN(4, 0);
6513 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6514 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6515 IEM_MC_ARG(uint8_t, u8Src, 2);
6516 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6517
6518 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6519 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6520 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6521 IEM_MC_REF_EFLAGS(pEFlags);
6522 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6523 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6524 else
6525 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6526
6527 IEM_MC_ADVANCE_RIP();
6528 IEM_MC_END();
6529 }
6530 else
6531 {
6532 IEM_MC_BEGIN(4, 3);
6533 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6534 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6535 IEM_MC_ARG(uint8_t, u8Src, 2);
6536 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6538 IEM_MC_LOCAL(uint8_t, u8Al);
6539
6540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6541 IEMOP_HLP_DONE_DECODING();
6542 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6543 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6544 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6545 IEM_MC_FETCH_EFLAGS(EFlags);
6546 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6547 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6548 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6549 else
6550 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6551
6552 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6553 IEM_MC_COMMIT_EFLAGS(EFlags);
6554 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6555 IEM_MC_ADVANCE_RIP();
6556 IEM_MC_END();
6557 }
6558 return VINF_SUCCESS;
6559}
6560
6561/** Opcode 0x0f 0xb1. */
6562FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6563{
6564 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6565 IEMOP_HLP_MIN_486();
6566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6567
6568 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6569 {
6570 IEMOP_HLP_DONE_DECODING();
6571 switch (pVCpu->iem.s.enmEffOpSize)
6572 {
6573 case IEMMODE_16BIT:
6574 IEM_MC_BEGIN(4, 0);
6575 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6576 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6577 IEM_MC_ARG(uint16_t, u16Src, 2);
6578 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6579
6580 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6581 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6582 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6583 IEM_MC_REF_EFLAGS(pEFlags);
6584 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6585 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6586 else
6587 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6588
6589 IEM_MC_ADVANCE_RIP();
6590 IEM_MC_END();
6591 return VINF_SUCCESS;
6592
6593 case IEMMODE_32BIT:
6594 IEM_MC_BEGIN(4, 0);
6595 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6596 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6597 IEM_MC_ARG(uint32_t, u32Src, 2);
6598 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6599
6600 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6601 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6602 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6603 IEM_MC_REF_EFLAGS(pEFlags);
6604 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6605 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6606 else
6607 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6608
6609 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6610 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6611 IEM_MC_ADVANCE_RIP();
6612 IEM_MC_END();
6613 return VINF_SUCCESS;
6614
6615 case IEMMODE_64BIT:
6616 IEM_MC_BEGIN(4, 0);
6617 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6618 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6619#ifdef RT_ARCH_X86
6620 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6621#else
6622 IEM_MC_ARG(uint64_t, u64Src, 2);
6623#endif
6624 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6625
6626 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6627 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6628 IEM_MC_REF_EFLAGS(pEFlags);
6629#ifdef RT_ARCH_X86
6630 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6631 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6632 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6633 else
6634 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6635#else
6636 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6637 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6638 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6639 else
6640 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6641#endif
6642
6643 IEM_MC_ADVANCE_RIP();
6644 IEM_MC_END();
6645 return VINF_SUCCESS;
6646
6647 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6648 }
6649 }
6650 else
6651 {
6652 switch (pVCpu->iem.s.enmEffOpSize)
6653 {
6654 case IEMMODE_16BIT:
6655 IEM_MC_BEGIN(4, 3);
6656 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6657 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6658 IEM_MC_ARG(uint16_t, u16Src, 2);
6659 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6661 IEM_MC_LOCAL(uint16_t, u16Ax);
6662
6663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6664 IEMOP_HLP_DONE_DECODING();
6665 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6666 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6667 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6668 IEM_MC_FETCH_EFLAGS(EFlags);
6669 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6670 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6671 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6672 else
6673 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6674
6675 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6676 IEM_MC_COMMIT_EFLAGS(EFlags);
6677 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6678 IEM_MC_ADVANCE_RIP();
6679 IEM_MC_END();
6680 return VINF_SUCCESS;
6681
6682 case IEMMODE_32BIT:
6683 IEM_MC_BEGIN(4, 3);
6684 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6685 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6686 IEM_MC_ARG(uint32_t, u32Src, 2);
6687 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6689 IEM_MC_LOCAL(uint32_t, u32Eax);
6690
6691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6692 IEMOP_HLP_DONE_DECODING();
6693 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6694 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6695 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6696 IEM_MC_FETCH_EFLAGS(EFlags);
6697 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6698 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6699 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6700 else
6701 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6702
6703 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6704 IEM_MC_COMMIT_EFLAGS(EFlags);
6705 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6706 IEM_MC_ADVANCE_RIP();
6707 IEM_MC_END();
6708 return VINF_SUCCESS;
6709
6710 case IEMMODE_64BIT:
6711 IEM_MC_BEGIN(4, 3);
6712 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6713 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6714#ifdef RT_ARCH_X86
6715 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6716#else
6717 IEM_MC_ARG(uint64_t, u64Src, 2);
6718#endif
6719 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6721 IEM_MC_LOCAL(uint64_t, u64Rax);
6722
6723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6724 IEMOP_HLP_DONE_DECODING();
6725 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6726 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6727 IEM_MC_FETCH_EFLAGS(EFlags);
6728 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6729#ifdef RT_ARCH_X86
6730 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6731 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6732 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6733 else
6734 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6735#else
6736 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6737 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6738 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6739 else
6740 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6741#endif
6742
6743 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6744 IEM_MC_COMMIT_EFLAGS(EFlags);
6745 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6746 IEM_MC_ADVANCE_RIP();
6747 IEM_MC_END();
6748 return VINF_SUCCESS;
6749
6750 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6751 }
6752 }
6753}
6754
6755
6756FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6757{
6758 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6759 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6760
6761 switch (pVCpu->iem.s.enmEffOpSize)
6762 {
6763 case IEMMODE_16BIT:
6764 IEM_MC_BEGIN(5, 1);
6765 IEM_MC_ARG(uint16_t, uSel, 0);
6766 IEM_MC_ARG(uint16_t, offSeg, 1);
6767 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6768 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6769 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6770 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6773 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6774 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6775 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6776 IEM_MC_END();
6777 return VINF_SUCCESS;
6778
6779 case IEMMODE_32BIT:
6780 IEM_MC_BEGIN(5, 1);
6781 IEM_MC_ARG(uint16_t, uSel, 0);
6782 IEM_MC_ARG(uint32_t, offSeg, 1);
6783 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6784 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6785 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6786 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6789 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6790 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6791 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6792 IEM_MC_END();
6793 return VINF_SUCCESS;
6794
6795 case IEMMODE_64BIT:
6796 IEM_MC_BEGIN(5, 1);
6797 IEM_MC_ARG(uint16_t, uSel, 0);
6798 IEM_MC_ARG(uint64_t, offSeg, 1);
6799 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6800 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6801 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6802 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6805 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6806 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6807 else
6808 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6809 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6810 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6811 IEM_MC_END();
6812 return VINF_SUCCESS;
6813
6814 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6815 }
6816}
6817
6818
6819/** Opcode 0x0f 0xb2. */
6820FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6821{
6822 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6823 IEMOP_HLP_MIN_386();
6824 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6825 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6826 return IEMOP_RAISE_INVALID_OPCODE();
6827 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6828}
6829
6830
6831/** Opcode 0x0f 0xb3. */
6832FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6833{
6834 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6835 IEMOP_HLP_MIN_386();
6836 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6837}
6838
6839
6840/** Opcode 0x0f 0xb4. */
6841FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6842{
6843 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6844 IEMOP_HLP_MIN_386();
6845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6847 return IEMOP_RAISE_INVALID_OPCODE();
6848 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6849}
6850
6851
6852/** Opcode 0x0f 0xb5. */
6853FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6854{
6855 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6856 IEMOP_HLP_MIN_386();
6857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6858 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6859 return IEMOP_RAISE_INVALID_OPCODE();
6860 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6861}
6862
6863
6864/** Opcode 0x0f 0xb6. */
6865FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6866{
6867 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6868 IEMOP_HLP_MIN_386();
6869
6870 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6871
6872 /*
6873 * If rm is denoting a register, no more instruction bytes.
6874 */
6875 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6876 {
6877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6878 switch (pVCpu->iem.s.enmEffOpSize)
6879 {
6880 case IEMMODE_16BIT:
6881 IEM_MC_BEGIN(0, 1);
6882 IEM_MC_LOCAL(uint16_t, u16Value);
6883 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6884 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6885 IEM_MC_ADVANCE_RIP();
6886 IEM_MC_END();
6887 return VINF_SUCCESS;
6888
6889 case IEMMODE_32BIT:
6890 IEM_MC_BEGIN(0, 1);
6891 IEM_MC_LOCAL(uint32_t, u32Value);
6892 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6893 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6894 IEM_MC_ADVANCE_RIP();
6895 IEM_MC_END();
6896 return VINF_SUCCESS;
6897
6898 case IEMMODE_64BIT:
6899 IEM_MC_BEGIN(0, 1);
6900 IEM_MC_LOCAL(uint64_t, u64Value);
6901 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6902 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6903 IEM_MC_ADVANCE_RIP();
6904 IEM_MC_END();
6905 return VINF_SUCCESS;
6906
6907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6908 }
6909 }
6910 else
6911 {
6912 /*
6913 * We're loading a register from memory.
6914 */
6915 switch (pVCpu->iem.s.enmEffOpSize)
6916 {
6917 case IEMMODE_16BIT:
6918 IEM_MC_BEGIN(0, 2);
6919 IEM_MC_LOCAL(uint16_t, u16Value);
6920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6923 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6924 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6925 IEM_MC_ADVANCE_RIP();
6926 IEM_MC_END();
6927 return VINF_SUCCESS;
6928
6929 case IEMMODE_32BIT:
6930 IEM_MC_BEGIN(0, 2);
6931 IEM_MC_LOCAL(uint32_t, u32Value);
6932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6935 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6936 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6937 IEM_MC_ADVANCE_RIP();
6938 IEM_MC_END();
6939 return VINF_SUCCESS;
6940
6941 case IEMMODE_64BIT:
6942 IEM_MC_BEGIN(0, 2);
6943 IEM_MC_LOCAL(uint64_t, u64Value);
6944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6947 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6948 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6949 IEM_MC_ADVANCE_RIP();
6950 IEM_MC_END();
6951 return VINF_SUCCESS;
6952
6953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6954 }
6955 }
6956}
6957
6958
6959/** Opcode 0x0f 0xb7. */
6960FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6961{
6962 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6963 IEMOP_HLP_MIN_386();
6964
6965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6966
6967 /** @todo Not entirely sure how the operand size prefix is handled here,
6968 * assuming that it will be ignored. Would be nice to have a few
6969 * test for this. */
6970 /*
6971 * If rm is denoting a register, no more instruction bytes.
6972 */
6973 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6974 {
6975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6976 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6977 {
6978 IEM_MC_BEGIN(0, 1);
6979 IEM_MC_LOCAL(uint32_t, u32Value);
6980 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6981 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6982 IEM_MC_ADVANCE_RIP();
6983 IEM_MC_END();
6984 }
6985 else
6986 {
6987 IEM_MC_BEGIN(0, 1);
6988 IEM_MC_LOCAL(uint64_t, u64Value);
6989 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6990 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6991 IEM_MC_ADVANCE_RIP();
6992 IEM_MC_END();
6993 }
6994 }
6995 else
6996 {
6997 /*
6998 * We're loading a register from memory.
6999 */
7000 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7001 {
7002 IEM_MC_BEGIN(0, 2);
7003 IEM_MC_LOCAL(uint32_t, u32Value);
7004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7007 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7008 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7009 IEM_MC_ADVANCE_RIP();
7010 IEM_MC_END();
7011 }
7012 else
7013 {
7014 IEM_MC_BEGIN(0, 2);
7015 IEM_MC_LOCAL(uint64_t, u64Value);
7016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7019 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7020 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7021 IEM_MC_ADVANCE_RIP();
7022 IEM_MC_END();
7023 }
7024 }
7025 return VINF_SUCCESS;
7026}
7027
7028
7029/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7030FNIEMOP_UD_STUB(iemOp_jmpe);
7031/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7032FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7033
7034
7035/**
7036 * @opcode 0xb9
7037 * @opinvalid intel-modrm
7038 * @optest ->
7039 */
7040FNIEMOP_DEF(iemOp_Grp10)
7041{
7042 /*
7043 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7044 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7045 */
7046 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7047 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZE); /* just picked Gb,Eb here. */
7048 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7049}
7050
7051
7052/** Opcode 0x0f 0xba. */
7053FNIEMOP_DEF(iemOp_Grp8)
7054{
7055 IEMOP_HLP_MIN_386();
7056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7057 PCIEMOPBINSIZES pImpl;
7058 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7059 {
7060 case 0: case 1: case 2: case 3:
7061 /* Both AMD and Intel want full modr/m decoding and imm8. */
7062 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7063 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7064 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7065 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7066 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7067 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7068 }
7069 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7070
7071 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7072 {
7073 /* register destination. */
7074 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7076
7077 switch (pVCpu->iem.s.enmEffOpSize)
7078 {
7079 case IEMMODE_16BIT:
7080 IEM_MC_BEGIN(3, 0);
7081 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7082 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7083 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7084
7085 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7086 IEM_MC_REF_EFLAGS(pEFlags);
7087 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7088
7089 IEM_MC_ADVANCE_RIP();
7090 IEM_MC_END();
7091 return VINF_SUCCESS;
7092
7093 case IEMMODE_32BIT:
7094 IEM_MC_BEGIN(3, 0);
7095 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7096 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7097 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7098
7099 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7100 IEM_MC_REF_EFLAGS(pEFlags);
7101 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7102
7103 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7104 IEM_MC_ADVANCE_RIP();
7105 IEM_MC_END();
7106 return VINF_SUCCESS;
7107
7108 case IEMMODE_64BIT:
7109 IEM_MC_BEGIN(3, 0);
7110 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7111 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7112 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7113
7114 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7115 IEM_MC_REF_EFLAGS(pEFlags);
7116 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7117
7118 IEM_MC_ADVANCE_RIP();
7119 IEM_MC_END();
7120 return VINF_SUCCESS;
7121
7122 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7123 }
7124 }
7125 else
7126 {
7127 /* memory destination. */
7128
7129 uint32_t fAccess;
7130 if (pImpl->pfnLockedU16)
7131 fAccess = IEM_ACCESS_DATA_RW;
7132 else /* BT */
7133 fAccess = IEM_ACCESS_DATA_R;
7134
7135 /** @todo test negative bit offsets! */
7136 switch (pVCpu->iem.s.enmEffOpSize)
7137 {
7138 case IEMMODE_16BIT:
7139 IEM_MC_BEGIN(3, 1);
7140 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7141 IEM_MC_ARG(uint16_t, u16Src, 1);
7142 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7144
7145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7146 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7147 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7148 if (pImpl->pfnLockedU16)
7149 IEMOP_HLP_DONE_DECODING();
7150 else
7151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7152 IEM_MC_FETCH_EFLAGS(EFlags);
7153 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7155 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7156 else
7157 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7158 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7159
7160 IEM_MC_COMMIT_EFLAGS(EFlags);
7161 IEM_MC_ADVANCE_RIP();
7162 IEM_MC_END();
7163 return VINF_SUCCESS;
7164
7165 case IEMMODE_32BIT:
7166 IEM_MC_BEGIN(3, 1);
7167 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7168 IEM_MC_ARG(uint32_t, u32Src, 1);
7169 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7171
7172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7173 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7174 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7175 if (pImpl->pfnLockedU16)
7176 IEMOP_HLP_DONE_DECODING();
7177 else
7178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7179 IEM_MC_FETCH_EFLAGS(EFlags);
7180 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7181 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7182 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7183 else
7184 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7185 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7186
7187 IEM_MC_COMMIT_EFLAGS(EFlags);
7188 IEM_MC_ADVANCE_RIP();
7189 IEM_MC_END();
7190 return VINF_SUCCESS;
7191
7192 case IEMMODE_64BIT:
7193 IEM_MC_BEGIN(3, 1);
7194 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7195 IEM_MC_ARG(uint64_t, u64Src, 1);
7196 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7198
7199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7200 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7201 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7202 if (pImpl->pfnLockedU16)
7203 IEMOP_HLP_DONE_DECODING();
7204 else
7205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7206 IEM_MC_FETCH_EFLAGS(EFlags);
7207 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7210 else
7211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7212 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7213
7214 IEM_MC_COMMIT_EFLAGS(EFlags);
7215 IEM_MC_ADVANCE_RIP();
7216 IEM_MC_END();
7217 return VINF_SUCCESS;
7218
7219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7220 }
7221 }
7222}
7223
7224
7225/** Opcode 0x0f 0xbb. */
7226FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7227{
7228 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7229 IEMOP_HLP_MIN_386();
7230 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7231}
7232
7233
7234/** Opcode 0x0f 0xbc. */
7235FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7236{
7237 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7238 IEMOP_HLP_MIN_386();
7239 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7240 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7241}
7242
7243
7244/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7245FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7246
7247
7248/** Opcode 0x0f 0xbd. */
7249FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7250{
7251 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7252 IEMOP_HLP_MIN_386();
7253 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7254 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7255}
7256
7257
7258/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7259FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7260
7261
7262/** Opcode 0x0f 0xbe. */
7263FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7264{
7265 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7266 IEMOP_HLP_MIN_386();
7267
7268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7269
7270 /*
7271 * If rm is denoting a register, no more instruction bytes.
7272 */
7273 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7274 {
7275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7276 switch (pVCpu->iem.s.enmEffOpSize)
7277 {
7278 case IEMMODE_16BIT:
7279 IEM_MC_BEGIN(0, 1);
7280 IEM_MC_LOCAL(uint16_t, u16Value);
7281 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7282 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7283 IEM_MC_ADVANCE_RIP();
7284 IEM_MC_END();
7285 return VINF_SUCCESS;
7286
7287 case IEMMODE_32BIT:
7288 IEM_MC_BEGIN(0, 1);
7289 IEM_MC_LOCAL(uint32_t, u32Value);
7290 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7291 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7292 IEM_MC_ADVANCE_RIP();
7293 IEM_MC_END();
7294 return VINF_SUCCESS;
7295
7296 case IEMMODE_64BIT:
7297 IEM_MC_BEGIN(0, 1);
7298 IEM_MC_LOCAL(uint64_t, u64Value);
7299 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7300 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7301 IEM_MC_ADVANCE_RIP();
7302 IEM_MC_END();
7303 return VINF_SUCCESS;
7304
7305 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7306 }
7307 }
7308 else
7309 {
7310 /*
7311 * We're loading a register from memory.
7312 */
7313 switch (pVCpu->iem.s.enmEffOpSize)
7314 {
7315 case IEMMODE_16BIT:
7316 IEM_MC_BEGIN(0, 2);
7317 IEM_MC_LOCAL(uint16_t, u16Value);
7318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7321 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7322 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7323 IEM_MC_ADVANCE_RIP();
7324 IEM_MC_END();
7325 return VINF_SUCCESS;
7326
7327 case IEMMODE_32BIT:
7328 IEM_MC_BEGIN(0, 2);
7329 IEM_MC_LOCAL(uint32_t, u32Value);
7330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7333 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7334 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7335 IEM_MC_ADVANCE_RIP();
7336 IEM_MC_END();
7337 return VINF_SUCCESS;
7338
7339 case IEMMODE_64BIT:
7340 IEM_MC_BEGIN(0, 2);
7341 IEM_MC_LOCAL(uint64_t, u64Value);
7342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7345 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7346 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7347 IEM_MC_ADVANCE_RIP();
7348 IEM_MC_END();
7349 return VINF_SUCCESS;
7350
7351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7352 }
7353 }
7354}
7355
7356
7357/** Opcode 0x0f 0xbf. */
7358FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7359{
7360 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7361 IEMOP_HLP_MIN_386();
7362
7363 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7364
7365 /** @todo Not entirely sure how the operand size prefix is handled here,
7366 * assuming that it will be ignored. Would be nice to have a few
7367 * test for this. */
7368 /*
7369 * If rm is denoting a register, no more instruction bytes.
7370 */
7371 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7372 {
7373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7374 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7375 {
7376 IEM_MC_BEGIN(0, 1);
7377 IEM_MC_LOCAL(uint32_t, u32Value);
7378 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7379 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7380 IEM_MC_ADVANCE_RIP();
7381 IEM_MC_END();
7382 }
7383 else
7384 {
7385 IEM_MC_BEGIN(0, 1);
7386 IEM_MC_LOCAL(uint64_t, u64Value);
7387 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7388 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7389 IEM_MC_ADVANCE_RIP();
7390 IEM_MC_END();
7391 }
7392 }
7393 else
7394 {
7395 /*
7396 * We're loading a register from memory.
7397 */
7398 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7399 {
7400 IEM_MC_BEGIN(0, 2);
7401 IEM_MC_LOCAL(uint32_t, u32Value);
7402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7405 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7406 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7407 IEM_MC_ADVANCE_RIP();
7408 IEM_MC_END();
7409 }
7410 else
7411 {
7412 IEM_MC_BEGIN(0, 2);
7413 IEM_MC_LOCAL(uint64_t, u64Value);
7414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7417 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7418 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7419 IEM_MC_ADVANCE_RIP();
7420 IEM_MC_END();
7421 }
7422 }
7423 return VINF_SUCCESS;
7424}
7425
7426
7427/** Opcode 0x0f 0xc0. */
7428FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7429{
7430 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7431 IEMOP_HLP_MIN_486();
7432 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7433
7434 /*
7435 * If rm is denoting a register, no more instruction bytes.
7436 */
7437 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7438 {
7439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7440
7441 IEM_MC_BEGIN(3, 0);
7442 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7443 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7444 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7445
7446 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7447 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7448 IEM_MC_REF_EFLAGS(pEFlags);
7449 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7450
7451 IEM_MC_ADVANCE_RIP();
7452 IEM_MC_END();
7453 }
7454 else
7455 {
7456 /*
7457 * We're accessing memory.
7458 */
7459 IEM_MC_BEGIN(3, 3);
7460 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7461 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7462 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7463 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7465
7466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7467 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7468 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7469 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7470 IEM_MC_FETCH_EFLAGS(EFlags);
7471 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7472 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7473 else
7474 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7475
7476 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7477 IEM_MC_COMMIT_EFLAGS(EFlags);
7478 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7479 IEM_MC_ADVANCE_RIP();
7480 IEM_MC_END();
7481 return VINF_SUCCESS;
7482 }
7483 return VINF_SUCCESS;
7484}
7485
7486
7487/** Opcode 0x0f 0xc1. */
7488FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7489{
7490 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7491 IEMOP_HLP_MIN_486();
7492 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7493
7494 /*
7495 * If rm is denoting a register, no more instruction bytes.
7496 */
7497 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7498 {
7499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7500
7501 switch (pVCpu->iem.s.enmEffOpSize)
7502 {
7503 case IEMMODE_16BIT:
7504 IEM_MC_BEGIN(3, 0);
7505 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7506 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7507 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7508
7509 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7510 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7511 IEM_MC_REF_EFLAGS(pEFlags);
7512 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7513
7514 IEM_MC_ADVANCE_RIP();
7515 IEM_MC_END();
7516 return VINF_SUCCESS;
7517
7518 case IEMMODE_32BIT:
7519 IEM_MC_BEGIN(3, 0);
7520 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7521 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7522 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7523
7524 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7525 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7526 IEM_MC_REF_EFLAGS(pEFlags);
7527 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7528
7529 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7530 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7531 IEM_MC_ADVANCE_RIP();
7532 IEM_MC_END();
7533 return VINF_SUCCESS;
7534
7535 case IEMMODE_64BIT:
7536 IEM_MC_BEGIN(3, 0);
7537 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7538 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7539 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7540
7541 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7542 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7543 IEM_MC_REF_EFLAGS(pEFlags);
7544 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7545
7546 IEM_MC_ADVANCE_RIP();
7547 IEM_MC_END();
7548 return VINF_SUCCESS;
7549
7550 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7551 }
7552 }
7553 else
7554 {
7555 /*
7556 * We're accessing memory.
7557 */
7558 switch (pVCpu->iem.s.enmEffOpSize)
7559 {
7560 case IEMMODE_16BIT:
7561 IEM_MC_BEGIN(3, 3);
7562 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7563 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7564 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7565 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7567
7568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7569 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7570 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7571 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7572 IEM_MC_FETCH_EFLAGS(EFlags);
7573 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7574 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7575 else
7576 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7577
7578 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7579 IEM_MC_COMMIT_EFLAGS(EFlags);
7580 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7581 IEM_MC_ADVANCE_RIP();
7582 IEM_MC_END();
7583 return VINF_SUCCESS;
7584
7585 case IEMMODE_32BIT:
7586 IEM_MC_BEGIN(3, 3);
7587 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7588 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7589 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7590 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7592
7593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7594 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7595 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7596 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7597 IEM_MC_FETCH_EFLAGS(EFlags);
7598 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7599 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7600 else
7601 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7602
7603 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7604 IEM_MC_COMMIT_EFLAGS(EFlags);
7605 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7606 IEM_MC_ADVANCE_RIP();
7607 IEM_MC_END();
7608 return VINF_SUCCESS;
7609
7610 case IEMMODE_64BIT:
7611 IEM_MC_BEGIN(3, 3);
7612 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7613 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7614 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7615 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7617
7618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7619 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7620 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7621 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7622 IEM_MC_FETCH_EFLAGS(EFlags);
7623 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7624 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7625 else
7626 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7627
7628 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7629 IEM_MC_COMMIT_EFLAGS(EFlags);
7630 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7631 IEM_MC_ADVANCE_RIP();
7632 IEM_MC_END();
7633 return VINF_SUCCESS;
7634
7635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7636 }
7637 }
7638}
7639
7640
7641/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
7642FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
7643/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
7644FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
7645/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
7646FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
7647/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
7648FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
7649
7650
7651/** Opcode 0x0f 0xc3. */
7652FNIEMOP_DEF(iemOp_movnti_My_Gy)
7653{
7654 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7655
7656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7657
7658 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7659 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7660 {
7661 switch (pVCpu->iem.s.enmEffOpSize)
7662 {
7663 case IEMMODE_32BIT:
7664 IEM_MC_BEGIN(0, 2);
7665 IEM_MC_LOCAL(uint32_t, u32Value);
7666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7667
7668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7670 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7671 return IEMOP_RAISE_INVALID_OPCODE();
7672
7673 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7674 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7675 IEM_MC_ADVANCE_RIP();
7676 IEM_MC_END();
7677 break;
7678
7679 case IEMMODE_64BIT:
7680 IEM_MC_BEGIN(0, 2);
7681 IEM_MC_LOCAL(uint64_t, u64Value);
7682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7683
7684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7686 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7687 return IEMOP_RAISE_INVALID_OPCODE();
7688
7689 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7690 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7691 IEM_MC_ADVANCE_RIP();
7692 IEM_MC_END();
7693 break;
7694
7695 case IEMMODE_16BIT:
7696 /** @todo check this form. */
7697 return IEMOP_RAISE_INVALID_OPCODE();
7698 }
7699 }
7700 else
7701 return IEMOP_RAISE_INVALID_OPCODE();
7702 return VINF_SUCCESS;
7703}
7704/* Opcode 0x66 0x0f 0xc3 - invalid */
7705/* Opcode 0xf3 0x0f 0xc3 - invalid */
7706/* Opcode 0xf2 0x0f 0xc3 - invalid */
7707
7708/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
7709FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7710/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
7711FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
7712/* Opcode 0xf3 0x0f 0xc4 - invalid */
7713/* Opcode 0xf2 0x0f 0xc4 - invalid */
7714
7715/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7716FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7717/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
7718FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
7719/* Opcode 0xf3 0x0f 0xc5 - invalid */
7720/* Opcode 0xf2 0x0f 0xc5 - invalid */
7721
7722/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
7723FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
7724/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
7725FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
7726/* Opcode 0xf3 0x0f 0xc6 - invalid */
7727/* Opcode 0xf2 0x0f 0xc6 - invalid */
7728
7729
7730/** Opcode 0x0f 0xc7 !11/1. */
7731FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7732{
7733 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7734
7735 IEM_MC_BEGIN(4, 3);
7736 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7737 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7738 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7739 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7740 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7741 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7743
7744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7745 IEMOP_HLP_DONE_DECODING();
7746 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7747
7748 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7749 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7750 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7751
7752 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7753 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7754 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7755
7756 IEM_MC_FETCH_EFLAGS(EFlags);
7757 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7758 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7759 else
7760 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7761
7762 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7763 IEM_MC_COMMIT_EFLAGS(EFlags);
7764 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7765 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7766 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7767 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7768 IEM_MC_ENDIF();
7769 IEM_MC_ADVANCE_RIP();
7770
7771 IEM_MC_END();
7772 return VINF_SUCCESS;
7773}
7774
7775
7776/** Opcode REX.W 0x0f 0xc7 !11/1. */
7777FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7778{
7779 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7780 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7781 {
7782#if 0
7783 RT_NOREF(bRm);
7784 IEMOP_BITCH_ABOUT_STUB();
7785 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7786#else
7787 IEM_MC_BEGIN(4, 3);
7788 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7789 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7790 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7791 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7792 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7793 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7795
7796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7797 IEMOP_HLP_DONE_DECODING();
7798 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7799 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7800
7801 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7802 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7803 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7804
7805 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7806 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7807 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7808
7809 IEM_MC_FETCH_EFLAGS(EFlags);
7810# ifdef RT_ARCH_AMD64
7811 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7812 {
7813 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7814 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7815 else
7816 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7817 }
7818 else
7819# endif
7820 {
7821 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7822 accesses and not all all atomic, which works fine on in UNI CPU guest
7823 configuration (ignoring DMA). If guest SMP is active we have no choice
7824 but to use a rendezvous callback here. Sigh. */
7825 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7826 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7827 else
7828 {
7829 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7830 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7831 }
7832 }
7833
7834 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7835 IEM_MC_COMMIT_EFLAGS(EFlags);
7836 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7837 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7838 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7839 IEM_MC_ENDIF();
7840 IEM_MC_ADVANCE_RIP();
7841
7842 IEM_MC_END();
7843 return VINF_SUCCESS;
7844#endif
7845 }
7846 Log(("cmpxchg16b -> #UD\n"));
7847 return IEMOP_RAISE_INVALID_OPCODE();
7848}
7849
7850FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
7851{
7852 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7853 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7854 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7855}
7856
7857/** Opcode 0x0f 0xc7 11/6. */
7858FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7859
7860/** Opcode 0x0f 0xc7 !11/6. */
7861FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7862
7863/** Opcode 0x66 0x0f 0xc7 !11/6. */
7864FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7865
7866/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7867FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7868
7869/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7870FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7871
7872/** Opcode 0x0f 0xc7 11/7. */
7873FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
7874
7875
7876/**
7877 * Group 9 jump table for register variant.
7878 */
7879IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
7880{ /* pfx: none, 066h, 0f3h, 0f2h */
7881 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7882 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
7883 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7884 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7885 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7886 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7887 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7888 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7889};
7890AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
7891
7892
7893/**
7894 * Group 9 jump table for memory variant.
7895 */
7896IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
7897{ /* pfx: none, 066h, 0f3h, 0f2h */
7898 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7899 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
7900 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7901 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7902 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7903 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7904 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
7905 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7906};
7907AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
7908
7909
7910/** Opcode 0x0f 0xc7. */
7911FNIEMOP_DEF(iemOp_Grp9)
7912{
7913 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7914 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7915 /* register, register */
7916 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7917 + pVCpu->iem.s.idxPrefix], bRm);
7918 /* memory, register */
7919 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7920 + pVCpu->iem.s.idxPrefix], bRm);
7921}
7922
7923
7924/**
7925 * Common 'bswap register' helper.
7926 */
7927FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7928{
7929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7930 switch (pVCpu->iem.s.enmEffOpSize)
7931 {
7932 case IEMMODE_16BIT:
7933 IEM_MC_BEGIN(1, 0);
7934 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7935 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7936 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7937 IEM_MC_ADVANCE_RIP();
7938 IEM_MC_END();
7939 return VINF_SUCCESS;
7940
7941 case IEMMODE_32BIT:
7942 IEM_MC_BEGIN(1, 0);
7943 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7944 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7945 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7946 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7947 IEM_MC_ADVANCE_RIP();
7948 IEM_MC_END();
7949 return VINF_SUCCESS;
7950
7951 case IEMMODE_64BIT:
7952 IEM_MC_BEGIN(1, 0);
7953 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7954 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7955 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7956 IEM_MC_ADVANCE_RIP();
7957 IEM_MC_END();
7958 return VINF_SUCCESS;
7959
7960 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7961 }
7962}
7963
7964
7965/** Opcode 0x0f 0xc8. */
7966FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7967{
7968 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7969 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7970 prefix. REX.B is the correct prefix it appears. For a parallel
7971 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7972 IEMOP_HLP_MIN_486();
7973 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7974}
7975
7976
7977/** Opcode 0x0f 0xc9. */
7978FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7979{
7980 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7981 IEMOP_HLP_MIN_486();
7982 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7983}
7984
7985
7986/** Opcode 0x0f 0xca. */
7987FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7988{
7989 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7990 IEMOP_HLP_MIN_486();
7991 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7992}
7993
7994
7995/** Opcode 0x0f 0xcb. */
7996FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7997{
7998 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7999 IEMOP_HLP_MIN_486();
8000 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8001}
8002
8003
8004/** Opcode 0x0f 0xcc. */
8005FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8006{
8007 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8008 IEMOP_HLP_MIN_486();
8009 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8010}
8011
8012
8013/** Opcode 0x0f 0xcd. */
8014FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8015{
8016 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8017 IEMOP_HLP_MIN_486();
8018 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8019}
8020
8021
8022/** Opcode 0x0f 0xce. */
8023FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8024{
8025 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8026 IEMOP_HLP_MIN_486();
8027 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8028}
8029
8030
8031/** Opcode 0x0f 0xcf. */
8032FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8033{
8034 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8035 IEMOP_HLP_MIN_486();
8036 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8037}
8038
8039
8040/* Opcode 0x0f 0xd0 - invalid */
8041/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8042FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8043/* Opcode 0xf3 0x0f 0xd0 - invalid */
8044/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8045FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8046
8047/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8048FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8049/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8050FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8051/* Opcode 0xf3 0x0f 0xd1 - invalid */
8052/* Opcode 0xf2 0x0f 0xd1 - invalid */
8053
8054/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8055FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8056/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8057FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8058/* Opcode 0xf3 0x0f 0xd2 - invalid */
8059/* Opcode 0xf2 0x0f 0xd2 - invalid */
8060
8061/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8062FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8063/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8064FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8065/* Opcode 0xf3 0x0f 0xd3 - invalid */
8066/* Opcode 0xf2 0x0f 0xd3 - invalid */
8067
8068/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8069FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8070/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8071FNIEMOP_STUB(iemOp_paddq_Vx_W);
8072/* Opcode 0xf3 0x0f 0xd4 - invalid */
8073/* Opcode 0xf2 0x0f 0xd4 - invalid */
8074
8075/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8076FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8077/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8078FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8079/* Opcode 0xf3 0x0f 0xd5 - invalid */
8080/* Opcode 0xf2 0x0f 0xd5 - invalid */
8081
8082/* Opcode 0x0f 0xd6 - invalid */
8083
8084/**
8085 * @opcode 0xd6
8086 * @oppfx 0x66
8087 * @opcpuid sse2
8088 * @opgroup og_sse2_pcksclr_datamove
8089 * @opxcpttype none
8090 * @optest op1=-1 op2=2 -> op1=2
8091 * @optest op1=0 op2=-42 -> op1=-42
8092 */
8093FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8094{
8095 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
8096 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8097 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8098 {
8099 /*
8100 * Register, register.
8101 */
8102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8103 IEM_MC_BEGIN(0, 2);
8104 IEM_MC_LOCAL(uint64_t, uSrc);
8105
8106 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8107 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8108
8109 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8110 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8111
8112 IEM_MC_ADVANCE_RIP();
8113 IEM_MC_END();
8114 }
8115 else
8116 {
8117 /*
8118 * Memory, register.
8119 */
8120 IEM_MC_BEGIN(0, 2);
8121 IEM_MC_LOCAL(uint64_t, uSrc);
8122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8123
8124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8126 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8127 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8128
8129 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8130 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8131
8132 IEM_MC_ADVANCE_RIP();
8133 IEM_MC_END();
8134 }
8135 return VINF_SUCCESS;
8136}
8137
8138
8139/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
8140FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
8141/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
8142FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
8143#if 0
8144FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
8145{
8146 /* Docs says register only. */
8147 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8148
8149 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8150 {
8151 case IEM_OP_PRF_SIZE_OP: /* SSE */
8152 I E M O P _ M N E M O N I C(movq_Wq_Vq, "movq Wq,Vq");
8153 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8154 IEM_MC_BEGIN(2, 0);
8155 IEM_MC_ARG(uint64_t *, pDst, 0);
8156 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8157 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8158 IEM_MC_PREPARE_SSE_USAGE();
8159 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8160 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8161 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8162 IEM_MC_ADVANCE_RIP();
8163 IEM_MC_END();
8164 return VINF_SUCCESS;
8165
8166 case 0: /* MMX */
8167 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
8168 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8169 IEM_MC_BEGIN(2, 0);
8170 IEM_MC_ARG(uint64_t *, pDst, 0);
8171 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8172 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8173 IEM_MC_PREPARE_FPU_USAGE();
8174 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8175 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8176 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8177 IEM_MC_ADVANCE_RIP();
8178 IEM_MC_END();
8179 return VINF_SUCCESS;
8180
8181 default:
8182 return IEMOP_RAISE_INVALID_OPCODE();
8183 }
8184}
8185#endif
8186
8187
8188/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8189FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8190{
8191 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8192 /** @todo testcase: Check that the instruction implicitly clears the high
8193 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8194 * and opcode modifications are made to work with the whole width (not
8195 * just 128). */
8196 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8197 /* Docs says register only. */
8198 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8199 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8200 {
8201 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8202 IEM_MC_BEGIN(2, 0);
8203 IEM_MC_ARG(uint64_t *, pDst, 0);
8204 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8205 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8206 IEM_MC_PREPARE_FPU_USAGE();
8207 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8208 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8209 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8210 IEM_MC_ADVANCE_RIP();
8211 IEM_MC_END();
8212 return VINF_SUCCESS;
8213 }
8214 return IEMOP_RAISE_INVALID_OPCODE();
8215}
8216
8217/** Opcode 0x66 0x0f 0xd7 - */
8218FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8219{
8220 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8221 /** @todo testcase: Check that the instruction implicitly clears the high
8222 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8223 * and opcode modifications are made to work with the whole width (not
8224 * just 128). */
8225 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8226 /* Docs says register only. */
8227 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8228 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8229 {
8230 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8231 IEM_MC_BEGIN(2, 0);
8232 IEM_MC_ARG(uint64_t *, pDst, 0);
8233 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8234 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8235 IEM_MC_PREPARE_SSE_USAGE();
8236 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8237 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8238 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8239 IEM_MC_ADVANCE_RIP();
8240 IEM_MC_END();
8241 return VINF_SUCCESS;
8242 }
8243 return IEMOP_RAISE_INVALID_OPCODE();
8244}
8245
8246/* Opcode 0xf3 0x0f 0xd7 - invalid */
8247/* Opcode 0xf2 0x0f 0xd7 - invalid */
8248
8249
8250/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8251FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8252/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8253FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8254/* Opcode 0xf3 0x0f 0xd8 - invalid */
8255/* Opcode 0xf2 0x0f 0xd8 - invalid */
8256
8257/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8258FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8259/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8260FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8261/* Opcode 0xf3 0x0f 0xd9 - invalid */
8262/* Opcode 0xf2 0x0f 0xd9 - invalid */
8263
8264/** Opcode 0x0f 0xda - pminub Pq, Qq */
8265FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8266/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8267FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8268/* Opcode 0xf3 0x0f 0xda - invalid */
8269/* Opcode 0xf2 0x0f 0xda - invalid */
8270
8271/** Opcode 0x0f 0xdb - pand Pq, Qq */
8272FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8273/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8274FNIEMOP_STUB(iemOp_pand_Vx_W);
8275/* Opcode 0xf3 0x0f 0xdb - invalid */
8276/* Opcode 0xf2 0x0f 0xdb - invalid */
8277
8278/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8279FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8280/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8281FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8282/* Opcode 0xf3 0x0f 0xdc - invalid */
8283/* Opcode 0xf2 0x0f 0xdc - invalid */
8284
8285/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8286FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8287/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8288FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8289/* Opcode 0xf3 0x0f 0xdd - invalid */
8290/* Opcode 0xf2 0x0f 0xdd - invalid */
8291
8292/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8293FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8294/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8295FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8296/* Opcode 0xf3 0x0f 0xde - invalid */
8297/* Opcode 0xf2 0x0f 0xde - invalid */
8298
8299/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8300FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8301/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8302FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8303/* Opcode 0xf3 0x0f 0xdf - invalid */
8304/* Opcode 0xf2 0x0f 0xdf - invalid */
8305
8306/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8307FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8308/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8309FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8310/* Opcode 0xf3 0x0f 0xe0 - invalid */
8311/* Opcode 0xf2 0x0f 0xe0 - invalid */
8312
8313/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8314FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8315/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8316FNIEMOP_STUB(iemOp_psraw_Vx_W);
8317/* Opcode 0xf3 0x0f 0xe1 - invalid */
8318/* Opcode 0xf2 0x0f 0xe1 - invalid */
8319
8320/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8321FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8322/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8323FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8324/* Opcode 0xf3 0x0f 0xe2 - invalid */
8325/* Opcode 0xf2 0x0f 0xe2 - invalid */
8326
8327/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8328FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8329/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8330FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8331/* Opcode 0xf3 0x0f 0xe3 - invalid */
8332/* Opcode 0xf2 0x0f 0xe3 - invalid */
8333
8334/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8335FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8336/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8337FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8338/* Opcode 0xf3 0x0f 0xe4 - invalid */
8339/* Opcode 0xf2 0x0f 0xe4 - invalid */
8340
8341/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8342FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8343/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8344FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8345/* Opcode 0xf3 0x0f 0xe5 - invalid */
8346/* Opcode 0xf2 0x0f 0xe5 - invalid */
8347
8348/* Opcode 0x0f 0xe6 - invalid */
8349/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8350FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8351/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8352FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8353/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8354FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8355
8356
8357/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8358FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8359{
8360 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8361 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8362 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8363 {
8364 /* Register, memory. */
8365 IEM_MC_BEGIN(0, 2);
8366 IEM_MC_LOCAL(uint64_t, uSrc);
8367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8368
8369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8371 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8372 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8373
8374 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8375 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8376
8377 IEM_MC_ADVANCE_RIP();
8378 IEM_MC_END();
8379 return VINF_SUCCESS;
8380 }
8381 /* The register, register encoding is invalid. */
8382 return IEMOP_RAISE_INVALID_OPCODE();
8383}
8384
8385/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
8386FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
8387{
8388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8389 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8390 {
8391 /* Register, memory. */
8392 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
8393 IEM_MC_BEGIN(0, 2);
8394 IEM_MC_LOCAL(RTUINT128U, uSrc);
8395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8396
8397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8399 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8400 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8401
8402 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8403 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8404
8405 IEM_MC_ADVANCE_RIP();
8406 IEM_MC_END();
8407 return VINF_SUCCESS;
8408 }
8409
8410 /* The register, register encoding is invalid. */
8411 return IEMOP_RAISE_INVALID_OPCODE();
8412}
8413
8414/* Opcode 0xf3 0x0f 0xe7 - invalid */
8415/* Opcode 0xf2 0x0f 0xe7 - invalid */
8416
8417
8418/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8419FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8420/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8421FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8422/* Opcode 0xf3 0x0f 0xe8 - invalid */
8423/* Opcode 0xf2 0x0f 0xe8 - invalid */
8424
8425/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8426FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8427/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8428FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8429/* Opcode 0xf3 0x0f 0xe9 - invalid */
8430/* Opcode 0xf2 0x0f 0xe9 - invalid */
8431
8432/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8433FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8434/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8435FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8436/* Opcode 0xf3 0x0f 0xea - invalid */
8437/* Opcode 0xf2 0x0f 0xea - invalid */
8438
8439/** Opcode 0x0f 0xeb - por Pq, Qq */
8440FNIEMOP_STUB(iemOp_por_Pq_Qq);
8441/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8442FNIEMOP_STUB(iemOp_por_Vx_W);
8443/* Opcode 0xf3 0x0f 0xeb - invalid */
8444/* Opcode 0xf2 0x0f 0xeb - invalid */
8445
8446/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8447FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8448/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8449FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8450/* Opcode 0xf3 0x0f 0xec - invalid */
8451/* Opcode 0xf2 0x0f 0xec - invalid */
8452
8453/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8454FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8455/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8456FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8457/* Opcode 0xf3 0x0f 0xed - invalid */
8458/* Opcode 0xf2 0x0f 0xed - invalid */
8459
8460/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8461FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8462/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8463FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8464/* Opcode 0xf3 0x0f 0xee - invalid */
8465/* Opcode 0xf2 0x0f 0xee - invalid */
8466
8467
8468/** Opcode 0x0f 0xef - pxor Pq, Qq */
8469FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8470{
8471 IEMOP_MNEMONIC(pxor, "pxor");
8472 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8473}
8474
8475/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8476FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8477{
8478 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8479 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8480}
8481
8482/* Opcode 0xf3 0x0f 0xef - invalid */
8483/* Opcode 0xf2 0x0f 0xef - invalid */
8484
8485/* Opcode 0x0f 0xf0 - invalid */
8486/* Opcode 0x66 0x0f 0xf0 - invalid */
8487/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8488FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8489
8490/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8491FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8492/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8493FNIEMOP_STUB(iemOp_psllw_Vx_W);
8494/* Opcode 0xf2 0x0f 0xf1 - invalid */
8495
8496/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8497FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8498/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8499FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8500/* Opcode 0xf2 0x0f 0xf2 - invalid */
8501
8502/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8503FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8504/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8505FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8506/* Opcode 0xf2 0x0f 0xf3 - invalid */
8507
8508/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8509FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8510/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8511FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8512/* Opcode 0xf2 0x0f 0xf4 - invalid */
8513
8514/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8515FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8516/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
8517FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
8518/* Opcode 0xf2 0x0f 0xf5 - invalid */
8519
8520/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8521FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8522/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
8523FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
8524/* Opcode 0xf2 0x0f 0xf6 - invalid */
8525
8526/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8527FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8528/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
8529FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
8530/* Opcode 0xf2 0x0f 0xf7 - invalid */
8531
8532/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8533FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8534/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
8535FNIEMOP_STUB(iemOp_psubb_Vx_W);
8536/* Opcode 0xf2 0x0f 0xf8 - invalid */
8537
8538/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8539FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8540/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
8541FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
8542/* Opcode 0xf2 0x0f 0xf9 - invalid */
8543
8544/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8545FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8546/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
8547FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
8548/* Opcode 0xf2 0x0f 0xfa - invalid */
8549
8550/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8551FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8552/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
8553FNIEMOP_STUB(iemOp_psubq_Vx_W);
8554/* Opcode 0xf2 0x0f 0xfb - invalid */
8555
8556/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8557FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8558/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
8559FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
8560/* Opcode 0xf2 0x0f 0xfc - invalid */
8561
8562/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8563FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8564/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
8565FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
8566/* Opcode 0xf2 0x0f 0xfd - invalid */
8567
8568/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8569FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8570/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
8571FNIEMOP_STUB(iemOp_paddd_Vx_W);
8572/* Opcode 0xf2 0x0f 0xfe - invalid */
8573
8574
8575/** Opcode **** 0x0f 0xff - UD0 */
8576FNIEMOP_DEF(iemOp_ud0)
8577{
8578 IEMOP_MNEMONIC(ud0, "ud0");
8579 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8580 {
8581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8582#ifndef TST_IEM_CHECK_MC
8583 RTGCPTR GCPtrEff;
8584 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8585 if (rcStrict != VINF_SUCCESS)
8586 return rcStrict;
8587#endif
8588 IEMOP_HLP_DONE_DECODING();
8589 }
8590 return IEMOP_RAISE_INVALID_OPCODE();
8591}
8592
8593
8594
8595/**
8596 * Two byte opcode map, first byte 0x0f.
8597 *
8598 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
8599 * check if it needs updating as well when making changes.
8600 */
8601IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8602{
8603 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8604 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8605 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8606 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8607 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8608 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8609 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8610 /* 0x06 */ IEMOP_X4(iemOp_clts),
8611 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8612 /* 0x08 */ IEMOP_X4(iemOp_invd),
8613 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8614 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8615 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8616 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8617 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8618 /* 0x0e */ IEMOP_X4(iemOp_femms),
8619 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8620
8621 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
8622 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
8623 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
8624 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8625 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8626 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8627 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vx_Wx, iemOp_InvalidNeedRM,
8628 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8629 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8630 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8631 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8632 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8633 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8634 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8635 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8636 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8637
8638 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8639 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8640 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8641 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8642 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8643 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8644 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8645 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8646 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8647 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8648 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
8649 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8650 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
8651 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
8652 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8653 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8654
8655 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8656 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8657 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8658 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8659 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8660 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8661 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8662 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8663 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
8664 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8665 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
8666 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8667 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8668 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8669 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8670 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8671
8672 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8673 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8674 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8675 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8676 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8677 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8678 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8679 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8680 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8681 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8682 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8683 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8684 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8685 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8686 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8687 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8688
8689 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8690 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
8691 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
8692 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
8693 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8694 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8695 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8696 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8697 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
8698 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
8699 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
8700 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8701 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
8702 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
8703 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
8704 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
8705
8706 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8707 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8708 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8709 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8710 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8711 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8712 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8713 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8714 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8715 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8716 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8717 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8718 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8719 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8720 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8721 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vx_Wx, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM,
8722
8723 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
8724 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8725 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8726 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8727 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8728 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8729 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8730 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8731
8732 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8733 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8734 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8735 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8736 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
8737 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
8738 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
8739 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
8740
8741 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8742 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8743 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8744 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8745 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8746 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8747 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8748 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8749 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8750 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8751 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8752 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8753 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8754 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8755 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8756 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8757
8758 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8759 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8760 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8761 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8762 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8763 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8764 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8765 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8766 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8767 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8768 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8769 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8770 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8771 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8772 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8773 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8774
8775 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8776 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8777 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8778 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8779 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8780 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8781 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8782 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8783 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8784 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8785 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8786 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8787 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8788 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8789 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8790 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8791
8792 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8793 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8794 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8795 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8796 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8797 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8798 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8799 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8800 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8801 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8802 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8803 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8804 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8805 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8806 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8807 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8808
8809 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8810 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8811 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
8812 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8813 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8814 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8815 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8816 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8817 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8818 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8819 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8820 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8821 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8822 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8823 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8824 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8825
8826 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
8827 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8828 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8829 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8830 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8831 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8832 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8833 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8834 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8835 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8836 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8837 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8838 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8839 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8840 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8841 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8842
8843 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8844 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8845 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8846 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8847 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8848 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8849 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
8850 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8851 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8852 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8853 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8854 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8855 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8856 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8857 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8858 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8859
8860 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
8861 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8862 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8863 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8864 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8865 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8866 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8867 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8868 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8869 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8870 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8871 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8872 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8873 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8874 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8875 /* 0xff */ IEMOP_X4(iemOp_ud0),
8876};
8877AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8878
8879/** @} */
8880
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette