VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 67005

Last change on this file since 67005 was 67005, checked in by vboxsync, 8 years ago

IEM: More tests for movd & movq.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 327.0 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 67005 2017-05-22 10:59:07Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.215389.xyz. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
38 switch (pVCpu->iem.s.enmEffOpSize)
39 {
40 case IEMMODE_16BIT:
41 IEM_MC_BEGIN(0, 1);
42 IEM_MC_LOCAL(uint16_t, u16Ldtr);
43 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
44 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
45 IEM_MC_ADVANCE_RIP();
46 IEM_MC_END();
47 break;
48
49 case IEMMODE_32BIT:
50 IEM_MC_BEGIN(0, 1);
51 IEM_MC_LOCAL(uint32_t, u32Ldtr);
52 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
53 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
54 IEM_MC_ADVANCE_RIP();
55 IEM_MC_END();
56 break;
57
58 case IEMMODE_64BIT:
59 IEM_MC_BEGIN(0, 1);
60 IEM_MC_LOCAL(uint64_t, u64Ldtr);
61 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
62 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 break;
66
67 IEM_NOT_REACHED_DEFAULT_CASE_RET();
68 }
69 }
70 else
71 {
72 IEM_MC_BEGIN(0, 2);
73 IEM_MC_LOCAL(uint16_t, u16Ldtr);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
76 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
77 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
78 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
79 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/** Opcode 0x0f 0x00 /1. */
88FNIEMOPRM_DEF(iemOp_Grp6_str)
89{
90 IEMOP_MNEMONIC(str, "str Rv/Mw");
91 IEMOP_HLP_MIN_286();
92 IEMOP_HLP_NO_REAL_OR_V86_MODE();
93
94 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
95 {
96 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
97 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
98 switch (pVCpu->iem.s.enmEffOpSize)
99 {
100 case IEMMODE_16BIT:
101 IEM_MC_BEGIN(0, 1);
102 IEM_MC_LOCAL(uint16_t, u16Tr);
103 IEM_MC_FETCH_TR_U16(u16Tr);
104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
105 IEM_MC_ADVANCE_RIP();
106 IEM_MC_END();
107 break;
108
109 case IEMMODE_32BIT:
110 IEM_MC_BEGIN(0, 1);
111 IEM_MC_LOCAL(uint32_t, u32Tr);
112 IEM_MC_FETCH_TR_U32(u32Tr);
113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
114 IEM_MC_ADVANCE_RIP();
115 IEM_MC_END();
116 break;
117
118 case IEMMODE_64BIT:
119 IEM_MC_BEGIN(0, 1);
120 IEM_MC_LOCAL(uint64_t, u64Tr);
121 IEM_MC_FETCH_TR_U64(u64Tr);
122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 break;
126
127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
128 }
129 }
130 else
131 {
132 IEM_MC_BEGIN(0, 2);
133 IEM_MC_LOCAL(uint16_t, u16Tr);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
136 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
137 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
138 IEM_MC_FETCH_TR_U16(u16Tr);
139 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
140 IEM_MC_ADVANCE_RIP();
141 IEM_MC_END();
142 }
143 return VINF_SUCCESS;
144}
145
146
147/** Opcode 0x0f 0x00 /2. */
148FNIEMOPRM_DEF(iemOp_Grp6_lldt)
149{
150 IEMOP_MNEMONIC(lldt, "lldt Ew");
151 IEMOP_HLP_MIN_286();
152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
153
154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
155 {
156 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
157 IEM_MC_BEGIN(1, 0);
158 IEM_MC_ARG(uint16_t, u16Sel, 0);
159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
161 IEM_MC_END();
162 }
163 else
164 {
165 IEM_MC_BEGIN(1, 1);
166 IEM_MC_ARG(uint16_t, u16Sel, 0);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
169 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
170 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
171 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
172 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
173 IEM_MC_END();
174 }
175 return VINF_SUCCESS;
176}
177
178
179/** Opcode 0x0f 0x00 /3. */
180FNIEMOPRM_DEF(iemOp_Grp6_ltr)
181{
182 IEMOP_MNEMONIC(ltr, "ltr Ew");
183 IEMOP_HLP_MIN_286();
184 IEMOP_HLP_NO_REAL_OR_V86_MODE();
185
186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
187 {
188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
189 IEM_MC_BEGIN(1, 0);
190 IEM_MC_ARG(uint16_t, u16Sel, 0);
191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
192 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
193 IEM_MC_END();
194 }
195 else
196 {
197 IEM_MC_BEGIN(1, 1);
198 IEM_MC_ARG(uint16_t, u16Sel, 0);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
202 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
203 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
204 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
205 IEM_MC_END();
206 }
207 return VINF_SUCCESS;
208}
209
210
211/** Opcode 0x0f 0x00 /3. */
212FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
213{
214 IEMOP_HLP_MIN_286();
215 IEMOP_HLP_NO_REAL_OR_V86_MODE();
216
217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
218 {
219 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
220 IEM_MC_BEGIN(2, 0);
221 IEM_MC_ARG(uint16_t, u16Sel, 0);
222 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
223 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
224 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
225 IEM_MC_END();
226 }
227 else
228 {
229 IEM_MC_BEGIN(2, 1);
230 IEM_MC_ARG(uint16_t, u16Sel, 0);
231 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
234 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
235 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
236 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
237 IEM_MC_END();
238 }
239 return VINF_SUCCESS;
240}
241
242
243/** Opcode 0x0f 0x00 /4. */
244FNIEMOPRM_DEF(iemOp_Grp6_verr)
245{
246 IEMOP_MNEMONIC(verr, "verr Ew");
247 IEMOP_HLP_MIN_286();
248 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
249}
250
251
252/** Opcode 0x0f 0x00 /5. */
253FNIEMOPRM_DEF(iemOp_Grp6_verw)
254{
255 IEMOP_MNEMONIC(verw, "verw Ew");
256 IEMOP_HLP_MIN_286();
257 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
258}
259
260
261/**
262 * Group 6 jump table.
263 */
264IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
265{
266 iemOp_Grp6_sldt,
267 iemOp_Grp6_str,
268 iemOp_Grp6_lldt,
269 iemOp_Grp6_ltr,
270 iemOp_Grp6_verr,
271 iemOp_Grp6_verw,
272 iemOp_InvalidWithRM,
273 iemOp_InvalidWithRM
274};
275
276/** Opcode 0x0f 0x00. */
277FNIEMOP_DEF(iemOp_Grp6)
278{
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
280 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
281}
282
283
284/** Opcode 0x0f 0x01 /0. */
285FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
286{
287 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
288 IEMOP_HLP_MIN_286();
289 IEMOP_HLP_64BIT_OP_SIZE();
290 IEM_MC_BEGIN(2, 1);
291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
292 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
295 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
296 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
297 IEM_MC_END();
298 return VINF_SUCCESS;
299}
300
301
302/** Opcode 0x0f 0x01 /0. */
303FNIEMOP_DEF(iemOp_Grp7_vmcall)
304{
305 IEMOP_BITCH_ABOUT_STUB();
306 return IEMOP_RAISE_INVALID_OPCODE();
307}
308
309
310/** Opcode 0x0f 0x01 /0. */
311FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
312{
313 IEMOP_BITCH_ABOUT_STUB();
314 return IEMOP_RAISE_INVALID_OPCODE();
315}
316
317
318/** Opcode 0x0f 0x01 /0. */
319FNIEMOP_DEF(iemOp_Grp7_vmresume)
320{
321 IEMOP_BITCH_ABOUT_STUB();
322 return IEMOP_RAISE_INVALID_OPCODE();
323}
324
325
326/** Opcode 0x0f 0x01 /0. */
327FNIEMOP_DEF(iemOp_Grp7_vmxoff)
328{
329 IEMOP_BITCH_ABOUT_STUB();
330 return IEMOP_RAISE_INVALID_OPCODE();
331}
332
333
334/** Opcode 0x0f 0x01 /1. */
335FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
336{
337 IEMOP_MNEMONIC(sidt, "sidt Ms");
338 IEMOP_HLP_MIN_286();
339 IEMOP_HLP_64BIT_OP_SIZE();
340 IEM_MC_BEGIN(2, 1);
341 IEM_MC_ARG(uint8_t, iEffSeg, 0);
342 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
345 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
346 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
347 IEM_MC_END();
348 return VINF_SUCCESS;
349}
350
351
352/** Opcode 0x0f 0x01 /1. */
353FNIEMOP_DEF(iemOp_Grp7_monitor)
354{
355 IEMOP_MNEMONIC(monitor, "monitor");
356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
357 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
358}
359
360
361/** Opcode 0x0f 0x01 /1. */
362FNIEMOP_DEF(iemOp_Grp7_mwait)
363{
364 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
367}
368
369
370/** Opcode 0x0f 0x01 /2. */
371FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
372{
373 IEMOP_MNEMONIC(lgdt, "lgdt");
374 IEMOP_HLP_64BIT_OP_SIZE();
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd0. */
389FNIEMOP_DEF(iemOp_Grp7_xgetbv)
390{
391 IEMOP_MNEMONIC(xgetbv, "xgetbv");
392 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
393 {
394 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
396 }
397 return IEMOP_RAISE_INVALID_OPCODE();
398}
399
400
401/** Opcode 0x0f 0x01 0xd1. */
402FNIEMOP_DEF(iemOp_Grp7_xsetbv)
403{
404 IEMOP_MNEMONIC(xsetbv, "xsetbv");
405 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
406 {
407 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
408 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
409 }
410 return IEMOP_RAISE_INVALID_OPCODE();
411}
412
413
414/** Opcode 0x0f 0x01 /3. */
415FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
416{
417 IEMOP_MNEMONIC(lidt, "lidt");
418 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
419 ? IEMMODE_64BIT
420 : pVCpu->iem.s.enmEffOpSize;
421 IEM_MC_BEGIN(3, 1);
422 IEM_MC_ARG(uint8_t, iEffSeg, 0);
423 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
424 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
428 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
429 IEM_MC_END();
430 return VINF_SUCCESS;
431}
432
433
434#ifdef VBOX_WITH_NESTED_HWVIRT
435/** Opcode 0x0f 0x01 0xd8. */
436FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
437{
438 IEMOP_MNEMONIC(vmrun, "vmrun");
439 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
440}
441
442/** Opcode 0x0f 0x01 0xd9. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
444{
445 IEMOP_MNEMONIC(vmmcall, "vmmcall");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
447}
448
449
450/** Opcode 0x0f 0x01 0xda. */
451FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
452{
453 IEMOP_MNEMONIC(vmload, "vmload");
454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
455}
456
457
458/** Opcode 0x0f 0x01 0xdb. */
459FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
460{
461 IEMOP_MNEMONIC(vmsave, "vmsave");
462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
463}
464
465
466/** Opcode 0x0f 0x01 0xdc. */
467FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
468{
469 IEMOP_MNEMONIC(stgi, "stgi");
470 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
471}
472
473
474/** Opcode 0x0f 0x01 0xdd. */
475FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
476{
477 IEMOP_MNEMONIC(clgi, "clgi");
478 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
479}
480
481
482/** Opcode 0x0f 0x01 0xdf. */
483FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
484{
485 IEMOP_MNEMONIC(invlpga, "invlpga");
486 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
487}
488
489
490/** Opcode 0x0f 0x01 0xde. */
491FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
492{
493 IEMOP_MNEMONIC(skinit, "skinit");
494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
495}
496#else
497/** Opcode 0x0f 0x01 0xd8. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
499
500/** Opcode 0x0f 0x01 0xd9. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
502
503/** Opcode 0x0f 0x01 0xda. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
505
506/** Opcode 0x0f 0x01 0xdb. */
507FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
508
509/** Opcode 0x0f 0x01 0xdc. */
510FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
511
512/** Opcode 0x0f 0x01 0xdd. */
513FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
514
515/** Opcode 0x0f 0x01 0xdf. */
516FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520#endif /* VBOX_WITH_NESTED_HWVIRT */
521
522/** Opcode 0x0f 0x01 /4. */
523FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
524{
525 IEMOP_MNEMONIC(smsw, "smsw");
526 IEMOP_HLP_MIN_286();
527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
528 {
529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
530 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
531 switch (pVCpu->iem.s.enmEffOpSize)
532 {
533 case IEMMODE_16BIT:
534 IEM_MC_BEGIN(0, 1);
535 IEM_MC_LOCAL(uint16_t, u16Tmp);
536 IEM_MC_FETCH_CR0_U16(u16Tmp);
537 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
538 { /* likely */ }
539 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
541 else
542 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
543 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
544 IEM_MC_ADVANCE_RIP();
545 IEM_MC_END();
546 return VINF_SUCCESS;
547
548 case IEMMODE_32BIT:
549 IEM_MC_BEGIN(0, 1);
550 IEM_MC_LOCAL(uint32_t, u32Tmp);
551 IEM_MC_FETCH_CR0_U32(u32Tmp);
552 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 return VINF_SUCCESS;
556
557 case IEMMODE_64BIT:
558 IEM_MC_BEGIN(0, 1);
559 IEM_MC_LOCAL(uint64_t, u64Tmp);
560 IEM_MC_FETCH_CR0_U64(u64Tmp);
561 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
562 IEM_MC_ADVANCE_RIP();
563 IEM_MC_END();
564 return VINF_SUCCESS;
565
566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
567 }
568 }
569 else
570 {
571 /* Ignore operand size here, memory refs are always 16-bit. */
572 IEM_MC_BEGIN(0, 2);
573 IEM_MC_LOCAL(uint16_t, u16Tmp);
574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
578 IEM_MC_FETCH_CR0_U16(u16Tmp);
579 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
580 { /* likely */ }
581 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
582 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
583 else
584 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
585 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
586 IEM_MC_ADVANCE_RIP();
587 IEM_MC_END();
588 return VINF_SUCCESS;
589 }
590}
591
592
593/** Opcode 0x0f 0x01 /6. */
594FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
595{
596 /* The operand size is effectively ignored, all is 16-bit and only the
597 lower 3-bits are used. */
598 IEMOP_MNEMONIC(lmsw, "lmsw");
599 IEMOP_HLP_MIN_286();
600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
601 {
602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
603 IEM_MC_BEGIN(1, 0);
604 IEM_MC_ARG(uint16_t, u16Tmp, 0);
605 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
606 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
607 IEM_MC_END();
608 }
609 else
610 {
611 IEM_MC_BEGIN(1, 1);
612 IEM_MC_ARG(uint16_t, u16Tmp, 0);
613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
617 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
618 IEM_MC_END();
619 }
620 return VINF_SUCCESS;
621}
622
623
624/** Opcode 0x0f 0x01 /7. */
625FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
626{
627 IEMOP_MNEMONIC(invlpg, "invlpg");
628 IEMOP_HLP_MIN_486();
629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
630 IEM_MC_BEGIN(1, 1);
631 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
633 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
634 IEM_MC_END();
635 return VINF_SUCCESS;
636}
637
638
639/** Opcode 0x0f 0x01 /7. */
640FNIEMOP_DEF(iemOp_Grp7_swapgs)
641{
642 IEMOP_MNEMONIC(swapgs, "swapgs");
643 IEMOP_HLP_ONLY_64BIT();
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
646}
647
648
649/** Opcode 0x0f 0x01 /7. */
650FNIEMOP_DEF(iemOp_Grp7_rdtscp)
651{
652 IEMOP_MNEMONIC(rdtscp, "rdtscp");
653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
654 /** @todo SVM intercept removal from here. */
655 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RDTSCP, SVM_EXIT_RDTSCP, 0, 0);
656 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
657}
658
659
660/**
661 * Group 7 jump table, memory variant.
662 */
663IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
664{
665 iemOp_Grp7_sgdt,
666 iemOp_Grp7_sidt,
667 iemOp_Grp7_lgdt,
668 iemOp_Grp7_lidt,
669 iemOp_Grp7_smsw,
670 iemOp_InvalidWithRM,
671 iemOp_Grp7_lmsw,
672 iemOp_Grp7_invlpg
673};
674
675
676/** Opcode 0x0f 0x01. */
677FNIEMOP_DEF(iemOp_Grp7)
678{
679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
680 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
681 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
682
683 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
684 {
685 case 0:
686 switch (bRm & X86_MODRM_RM_MASK)
687 {
688 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
689 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
690 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
691 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 1:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
700 }
701 return IEMOP_RAISE_INVALID_OPCODE();
702
703 case 2:
704 switch (bRm & X86_MODRM_RM_MASK)
705 {
706 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
707 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
708 }
709 return IEMOP_RAISE_INVALID_OPCODE();
710
711 case 3:
712 switch (bRm & X86_MODRM_RM_MASK)
713 {
714 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
715 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
716 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
717 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
718 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
719 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
720 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
721 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
723 }
724
725 case 4:
726 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
727
728 case 5:
729 return IEMOP_RAISE_INVALID_OPCODE();
730
731 case 6:
732 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
733
734 case 7:
735 switch (bRm & X86_MODRM_RM_MASK)
736 {
737 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
738 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
739 }
740 return IEMOP_RAISE_INVALID_OPCODE();
741
742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
743 }
744}
745
746/** Opcode 0x0f 0x00 /3. */
747FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
748{
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
751
752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
753 {
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 switch (pVCpu->iem.s.enmEffOpSize)
756 {
757 case IEMMODE_16BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 case IEMMODE_32BIT:
773 case IEMMODE_64BIT:
774 {
775 IEM_MC_BEGIN(3, 0);
776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
777 IEM_MC_ARG(uint16_t, u16Sel, 1);
778 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
779
780 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
781 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
782 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
783
784 IEM_MC_END();
785 return VINF_SUCCESS;
786 }
787
788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
789 }
790 }
791 else
792 {
793 switch (pVCpu->iem.s.enmEffOpSize)
794 {
795 case IEMMODE_16BIT:
796 {
797 IEM_MC_BEGIN(3, 1);
798 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
799 IEM_MC_ARG(uint16_t, u16Sel, 1);
800 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
802
803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
804 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
805
806 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
807 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
808 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
809
810 IEM_MC_END();
811 return VINF_SUCCESS;
812 }
813
814 case IEMMODE_32BIT:
815 case IEMMODE_64BIT:
816 {
817 IEM_MC_BEGIN(3, 1);
818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 1);
820 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
822
823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
824 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
825/** @todo testcase: make sure it's a 16-bit read. */
826
827 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
828 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
829 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
830
831 IEM_MC_END();
832 return VINF_SUCCESS;
833 }
834
835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
836 }
837 }
838}
839
840
841
842/** Opcode 0x0f 0x02. */
843FNIEMOP_DEF(iemOp_lar_Gv_Ew)
844{
845 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
846 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
847}
848
849
850/** Opcode 0x0f 0x03. */
851FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
852{
853 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
854 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
855}
856
857
858/** Opcode 0x0f 0x05. */
859FNIEMOP_DEF(iemOp_syscall)
860{
861 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
863 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
864}
865
866
867/** Opcode 0x0f 0x06. */
868FNIEMOP_DEF(iemOp_clts)
869{
870 IEMOP_MNEMONIC(clts, "clts");
871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
872 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
873}
874
875
876/** Opcode 0x0f 0x07. */
877FNIEMOP_DEF(iemOp_sysret)
878{
879 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
881 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
882}
883
884
885/** Opcode 0x0f 0x08. */
886FNIEMOP_DEF(iemOp_invd)
887{
888 IEMOP_MNEMONIC(invd, "invd");
889#ifdef VBOX_WITH_NESTED_HWVIRT
890 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
891 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
892#else
893 RT_NOREF_PV(pVCpu);
894#endif
895 /** @todo implement invd for the regular case (above only handles nested SVM
896 * exits). */
897 IEMOP_BITCH_ABOUT_STUB();
898 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
899}
900
901// IEMOP_HLP_MIN_486();
902
903
904/** Opcode 0x0f 0x09. */
905FNIEMOP_DEF(iemOp_wbinvd)
906{
907 IEMOP_MNEMONIC(wbinvd, "wbinvd");
908 IEMOP_HLP_MIN_486();
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
910 IEM_MC_BEGIN(0, 0);
911 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
912 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
913 IEM_MC_ADVANCE_RIP();
914 IEM_MC_END();
915 return VINF_SUCCESS; /* ignore for now */
916}
917
918
919/** Opcode 0x0f 0x0b. */
920FNIEMOP_DEF(iemOp_ud2)
921{
922 IEMOP_MNEMONIC(ud2, "ud2");
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926/** Opcode 0x0f 0x0d. */
927FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
928{
929 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
931 {
932 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
933 return IEMOP_RAISE_INVALID_OPCODE();
934 }
935
936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
938 {
939 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
940 return IEMOP_RAISE_INVALID_OPCODE();
941 }
942
943 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
944 {
945 case 2: /* Aliased to /0 for the time being. */
946 case 4: /* Aliased to /0 for the time being. */
947 case 5: /* Aliased to /0 for the time being. */
948 case 6: /* Aliased to /0 for the time being. */
949 case 7: /* Aliased to /0 for the time being. */
950 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
951 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
952 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
954 }
955
956 IEM_MC_BEGIN(0, 1);
957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
960 /* Currently a NOP. */
961 NOREF(GCPtrEffSrc);
962 IEM_MC_ADVANCE_RIP();
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x0e. */
969FNIEMOP_STUB(iemOp_femms);
970
971
972/** Opcode 0x0f 0x0f. */
973FNIEMOP_DEF(iemOp_3Dnow)
974{
975 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
976 {
977 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
978 return IEMOP_RAISE_INVALID_OPCODE();
979 }
980
981#ifdef IEM_WITH_3DNOW
982 /* This is pretty sparse, use switch instead of table. */
983 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
984 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
985#else
986 IEMOP_BITCH_ABOUT_STUB();
987 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
988#endif
989}
990
991
992/**
993 * @opcode 0x10
994 * @oppfx none
995 * @opcpuid sse
996 * @opgroup og_sse_simdfp_datamove
997 * @opxcpttype 4UA
998 * @optest op1=1 op2=2 -> op1=2
999 * @optest op1=0 op2=-22 -> op1=-22
1000 */
1001FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1002{
1003 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1006 {
1007 /*
1008 * Register, register.
1009 */
1010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1011 IEM_MC_BEGIN(0, 0);
1012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1013 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1014 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1015 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1016 IEM_MC_ADVANCE_RIP();
1017 IEM_MC_END();
1018 }
1019 else
1020 {
1021 /*
1022 * Memory, register.
1023 */
1024 IEM_MC_BEGIN(0, 2);
1025 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1027
1028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1032
1033 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1034 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1035
1036 IEM_MC_ADVANCE_RIP();
1037 IEM_MC_END();
1038 }
1039 return VINF_SUCCESS;
1040
1041}
1042
1043
1044/**
1045 * @opcode 0x10
1046 * @oppfx 0x66
1047 * @opcpuid sse2
1048 * @opgroup og_sse2_pcksclr_datamove
1049 * @opxcpttype 4UA
1050 * @optest op1=1 op2=2 -> op1=2
1051 * @optest op1=0 op2=-42 -> op1=-42
1052 */
1053FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1054{
1055 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1058 {
1059 /*
1060 * Register, register.
1061 */
1062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1063 IEM_MC_BEGIN(0, 0);
1064 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1066 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1067 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1068 IEM_MC_ADVANCE_RIP();
1069 IEM_MC_END();
1070 }
1071 else
1072 {
1073 /*
1074 * Memory, register.
1075 */
1076 IEM_MC_BEGIN(0, 2);
1077 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1079
1080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1083 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1084
1085 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1086 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1087
1088 IEM_MC_ADVANCE_RIP();
1089 IEM_MC_END();
1090 }
1091 return VINF_SUCCESS;
1092}
1093
1094
1095/**
1096 * @opcode 0x10
1097 * @oppfx 0xf3
1098 * @opcpuid sse
1099 * @opgroup og_sse_simdfp_datamove
1100 * @opxcpttype 5
1101 * @optest op1=1 op2=2 -> op1=2
1102 * @optest op1=0 op2=-22 -> op1=-22
1103 */
1104FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1105{
1106 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1109 {
1110 /*
1111 * Register, register.
1112 */
1113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1114 IEM_MC_BEGIN(0, 1);
1115 IEM_MC_LOCAL(uint32_t, uSrc);
1116
1117 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1118 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1119 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1120 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 else
1126 {
1127 /*
1128 * Memory, register.
1129 */
1130 IEM_MC_BEGIN(0, 2);
1131 IEM_MC_LOCAL(uint32_t, uSrc);
1132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1133
1134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1136 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1138
1139 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1140 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1141
1142 IEM_MC_ADVANCE_RIP();
1143 IEM_MC_END();
1144 }
1145 return VINF_SUCCESS;
1146}
1147
1148
1149/**
1150 * @opcode 0x10
1151 * @oppfx 0xf2
1152 * @opcpuid sse2
1153 * @opgroup og_sse2_pcksclr_datamove
1154 * @opxcpttype 5
1155 * @optest op1=1 op2=2 -> op1=2
1156 * @optest op1=0 op2=-42 -> op1=-42
1157 */
1158FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1159{
1160 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1163 {
1164 /*
1165 * Register, register.
1166 */
1167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1168 IEM_MC_BEGIN(0, 1);
1169 IEM_MC_LOCAL(uint64_t, uSrc);
1170
1171 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1172 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1173 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1174 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1175
1176 IEM_MC_ADVANCE_RIP();
1177 IEM_MC_END();
1178 }
1179 else
1180 {
1181 /*
1182 * Memory, register.
1183 */
1184 IEM_MC_BEGIN(0, 2);
1185 IEM_MC_LOCAL(uint64_t, uSrc);
1186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1187
1188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1190 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1191 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1192
1193 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1194 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1195
1196 IEM_MC_ADVANCE_RIP();
1197 IEM_MC_END();
1198 }
1199 return VINF_SUCCESS;
1200}
1201
1202
1203/**
1204 * @opcode 0x11
1205 * @oppfx none
1206 * @opcpuid sse
1207 * @opgroup og_sse_simdfp_datamove
1208 * @opxcpttype 4UA
1209 * @optest op1=1 op2=2 -> op1=2
1210 * @optest op1=0 op2=-42 -> op1=-42
1211 */
1212FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1213{
1214 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1217 {
1218 /*
1219 * Register, register.
1220 */
1221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1222 IEM_MC_BEGIN(0, 0);
1223 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1224 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1225 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1226 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1227 IEM_MC_ADVANCE_RIP();
1228 IEM_MC_END();
1229 }
1230 else
1231 {
1232 /*
1233 * Memory, register.
1234 */
1235 IEM_MC_BEGIN(0, 2);
1236 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1238
1239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1243
1244 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1245 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1246
1247 IEM_MC_ADVANCE_RIP();
1248 IEM_MC_END();
1249 }
1250 return VINF_SUCCESS;
1251}
1252
1253
1254/**
1255 * @opcode 0x11
1256 * @oppfx 0x66
1257 * @opcpuid sse2
1258 * @opgroup og_sse2_pcksclr_datamove
1259 * @opxcpttype 4UA
1260 * @optest op1=1 op2=2 -> op1=2
1261 * @optest op1=0 op2=-42 -> op1=-42
1262 */
1263FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1264{
1265 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1268 {
1269 /*
1270 * Register, register.
1271 */
1272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1273 IEM_MC_BEGIN(0, 0);
1274 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1275 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1276 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1277 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1278 IEM_MC_ADVANCE_RIP();
1279 IEM_MC_END();
1280 }
1281 else
1282 {
1283 /*
1284 * Memory, register.
1285 */
1286 IEM_MC_BEGIN(0, 2);
1287 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1289
1290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1292 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1294
1295 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1296 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1297
1298 IEM_MC_ADVANCE_RIP();
1299 IEM_MC_END();
1300 }
1301 return VINF_SUCCESS;
1302}
1303
1304
1305/**
1306 * @opcode 0x11
1307 * @oppfx 0xf3
1308 * @opcpuid sse
1309 * @opgroup og_sse_simdfp_datamove
1310 * @opxcpttype 5
1311 * @optest op1=1 op2=2 -> op1=2
1312 * @optest op1=0 op2=-22 -> op1=-22
1313 */
1314FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1315{
1316 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1319 {
1320 /*
1321 * Register, register.
1322 */
1323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1324 IEM_MC_BEGIN(0, 1);
1325 IEM_MC_LOCAL(uint32_t, uSrc);
1326
1327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1328 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1329 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1330 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1331
1332 IEM_MC_ADVANCE_RIP();
1333 IEM_MC_END();
1334 }
1335 else
1336 {
1337 /*
1338 * Memory, register.
1339 */
1340 IEM_MC_BEGIN(0, 2);
1341 IEM_MC_LOCAL(uint32_t, uSrc);
1342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1343
1344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1346 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1348
1349 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1350 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1351
1352 IEM_MC_ADVANCE_RIP();
1353 IEM_MC_END();
1354 }
1355 return VINF_SUCCESS;
1356}
1357
1358
1359/**
1360 * @opcode 0x11
1361 * @oppfx 0xf2
1362 * @opcpuid sse2
1363 * @opgroup og_sse2_pcksclr_datamove
1364 * @opxcpttype 5
1365 * @optest op1=1 op2=2 -> op1=2
1366 * @optest op1=0 op2=-42 -> op1=-42
1367 */
1368FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1369{
1370 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1373 {
1374 /*
1375 * Register, register.
1376 */
1377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1378 IEM_MC_BEGIN(0, 1);
1379 IEM_MC_LOCAL(uint64_t, uSrc);
1380
1381 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1382 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1383 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1384 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1385
1386 IEM_MC_ADVANCE_RIP();
1387 IEM_MC_END();
1388 }
1389 else
1390 {
1391 /*
1392 * Memory, register.
1393 */
1394 IEM_MC_BEGIN(0, 2);
1395 IEM_MC_LOCAL(uint64_t, uSrc);
1396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1397
1398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1400 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1402
1403 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1404 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1405
1406 IEM_MC_ADVANCE_RIP();
1407 IEM_MC_END();
1408 }
1409 return VINF_SUCCESS;
1410}
1411
1412
1413FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1414{
1415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1416 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1417 {
1418 /**
1419 * @opcode 0x12
1420 * @opcodesub 11 mr/reg
1421 * @oppfx none
1422 * @opcpuid sse
1423 * @opgroup og_sse_simdfp_datamove
1424 * @opxcpttype 5
1425 * @optest op1=1 op2=2 -> op1=2
1426 * @optest op1=0 op2=-42 -> op1=-42
1427 */
1428 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1429
1430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1431 IEM_MC_BEGIN(0, 1);
1432 IEM_MC_LOCAL(uint64_t, uSrc);
1433
1434 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1436 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1437 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1438
1439 IEM_MC_ADVANCE_RIP();
1440 IEM_MC_END();
1441 }
1442 else
1443 {
1444 /**
1445 * @opdone
1446 * @opcode 0x12
1447 * @opcodesub !11 mr/reg
1448 * @oppfx none
1449 * @opcpuid sse
1450 * @opgroup og_sse_simdfp_datamove
1451 * @opxcpttype 5
1452 * @optest op1=1 op2=2 -> op1=2
1453 * @optest op1=0 op2=-42 -> op1=-42
1454 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1455 */
1456 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1457
1458 IEM_MC_BEGIN(0, 2);
1459 IEM_MC_LOCAL(uint64_t, uSrc);
1460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1461
1462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1466
1467 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1468 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1469
1470 IEM_MC_ADVANCE_RIP();
1471 IEM_MC_END();
1472 }
1473 return VINF_SUCCESS;
1474}
1475
1476
1477/**
1478 * @opcode 0x12
1479 * @opcodesub !11 mr/reg
1480 * @oppfx 0x66
1481 * @opcpuid sse2
1482 * @opgroup og_sse2_pcksclr_datamove
1483 * @opxcpttype 5
1484 * @optest op1=1 op2=2 -> op1=2
1485 * @optest op1=0 op2=-42 -> op1=-42
1486 */
1487FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1488{
1489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1490 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1491 {
1492 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1493
1494 IEM_MC_BEGIN(0, 2);
1495 IEM_MC_LOCAL(uint64_t, uSrc);
1496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1497
1498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1502
1503 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1504 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1505
1506 IEM_MC_ADVANCE_RIP();
1507 IEM_MC_END();
1508 return VINF_SUCCESS;
1509 }
1510
1511 /**
1512 * @opdone
1513 * @opmnemonic ud660f12m3
1514 * @opcode 0x12
1515 * @opcodesub 11 mr/reg
1516 * @oppfx 0x66
1517 * @opunused immediate
1518 * @opcpuid sse
1519 * @optest ->
1520 */
1521 return IEMOP_RAISE_INVALID_OPCODE();
1522}
1523
1524
1525/**
1526 * @opcode 0x12
1527 * @oppfx 0xf3
1528 * @opcpuid sse3
1529 * @opgroup og_sse3_pcksclr_datamove
1530 * @opxcpttype 4
1531 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1532 * op1=0x00000002000000020000000100000001
1533 */
1534FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1535{
1536 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1538 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1539 {
1540 /*
1541 * Register, register.
1542 */
1543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1544 IEM_MC_BEGIN(2, 0);
1545 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1546 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1547
1548 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1549 IEM_MC_PREPARE_SSE_USAGE();
1550
1551 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1552 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1553 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1554
1555 IEM_MC_ADVANCE_RIP();
1556 IEM_MC_END();
1557 }
1558 else
1559 {
1560 /*
1561 * Register, memory.
1562 */
1563 IEM_MC_BEGIN(2, 2);
1564 IEM_MC_LOCAL(RTUINT128U, uSrc);
1565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1566 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1567 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1568
1569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1571 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1572 IEM_MC_PREPARE_SSE_USAGE();
1573
1574 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1575 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1576 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1577
1578 IEM_MC_ADVANCE_RIP();
1579 IEM_MC_END();
1580 }
1581 return VINF_SUCCESS;
1582}
1583
1584
1585/**
1586 * @opcode 0x12
1587 * @oppfx 0xf2
1588 * @opcpuid sse3
1589 * @opgroup og_sse3_pcksclr_datamove
1590 * @opxcpttype 5
1591 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1592 * op1=0x22222222111111112222222211111111
1593 */
1594FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1595{
1596 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1599 {
1600 /*
1601 * Register, register.
1602 */
1603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1604 IEM_MC_BEGIN(2, 0);
1605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1606 IEM_MC_ARG(uint64_t, uSrc, 1);
1607
1608 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1609 IEM_MC_PREPARE_SSE_USAGE();
1610
1611 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1612 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1613 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1614
1615 IEM_MC_ADVANCE_RIP();
1616 IEM_MC_END();
1617 }
1618 else
1619 {
1620 /*
1621 * Register, memory.
1622 */
1623 IEM_MC_BEGIN(2, 2);
1624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1625 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1626 IEM_MC_ARG(uint64_t, uSrc, 1);
1627
1628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1630 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1631 IEM_MC_PREPARE_SSE_USAGE();
1632
1633 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1634 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1635 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1636
1637 IEM_MC_ADVANCE_RIP();
1638 IEM_MC_END();
1639 }
1640 return VINF_SUCCESS;
1641}
1642
1643
1644/**
1645 * @opcode 0x13
1646 * @opcodesub !11 mr/reg
1647 * @oppfx none
1648 * @opcpuid sse
1649 * @opgroup og_sse_simdfp_datamove
1650 * @opxcpttype 5
1651 * @optest op1=1 op2=2 -> op1=2
1652 * @optest op1=0 op2=-42 -> op1=-42
1653 */
1654FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1655{
1656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1657 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1658 {
1659 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1660
1661 IEM_MC_BEGIN(0, 2);
1662 IEM_MC_LOCAL(uint64_t, uSrc);
1663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1664
1665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1667 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1668 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1669
1670 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1671 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1672
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 return VINF_SUCCESS;
1676 }
1677
1678 /**
1679 * @opdone
1680 * @opmnemonic ud0f13m3
1681 * @opcode 0x13
1682 * @opcodesub 11 mr/reg
1683 * @oppfx none
1684 * @opunused immediate
1685 * @opcpuid sse
1686 * @optest ->
1687 */
1688 return IEMOP_RAISE_INVALID_OPCODE();
1689}
1690
1691
1692/**
1693 * @opcode 0x13
1694 * @opcodesub !11 mr/reg
1695 * @oppfx 0x66
1696 * @opcpuid sse2
1697 * @opgroup og_sse2_pcksclr_datamove
1698 * @opxcpttype 5
1699 * @optest op1=1 op2=2 -> op1=2
1700 * @optest op1=0 op2=-42 -> op1=-42
1701 */
1702FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1703{
1704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1705 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1706 {
1707 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1708 IEM_MC_BEGIN(0, 2);
1709 IEM_MC_LOCAL(uint64_t, uSrc);
1710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1711
1712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1714 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1715 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1716
1717 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1718 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1719
1720 IEM_MC_ADVANCE_RIP();
1721 IEM_MC_END();
1722 return VINF_SUCCESS;
1723 }
1724
1725 /**
1726 * @opdone
1727 * @opmnemonic ud660f13m3
1728 * @opcode 0x13
1729 * @opcodesub 11 mr/reg
1730 * @oppfx 0x66
1731 * @opunused immediate
1732 * @opcpuid sse
1733 * @optest ->
1734 */
1735 return IEMOP_RAISE_INVALID_OPCODE();
1736}
1737
1738
1739/**
1740 * @opmnemonic udf30f13
1741 * @opcode 0x13
1742 * @oppfx 0xf3
1743 * @opunused intel-modrm
1744 * @opcpuid sse
1745 * @optest ->
1746 * @opdone
1747 */
1748
1749/**
1750 * @opmnemonic udf20f13
1751 * @opcode 0x13
1752 * @oppfx 0xf2
1753 * @opunused intel-modrm
1754 * @opcpuid sse
1755 * @optest ->
1756 * @opdone
1757 */
1758
1759/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1760FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1761/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1762FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1763
1764/**
1765 * @opdone
1766 * @opmnemonic udf30f14
1767 * @opcode 0x14
1768 * @oppfx 0xf3
1769 * @opunused intel-modrm
1770 * @opcpuid sse
1771 * @optest ->
1772 * @opdone
1773 */
1774
1775/**
1776 * @opmnemonic udf20f14
1777 * @opcode 0x14
1778 * @oppfx 0xf2
1779 * @opunused intel-modrm
1780 * @opcpuid sse
1781 * @optest ->
1782 * @opdone
1783 */
1784
1785/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1786FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1787/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1788FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1789/* Opcode 0xf3 0x0f 0x15 - invalid */
1790/* Opcode 0xf2 0x0f 0x15 - invalid */
1791
1792/**
1793 * @opdone
1794 * @opmnemonic udf30f15
1795 * @opcode 0x15
1796 * @oppfx 0xf3
1797 * @opunused intel-modrm
1798 * @opcpuid sse
1799 * @optest ->
1800 * @opdone
1801 */
1802
1803/**
1804 * @opmnemonic udf20f15
1805 * @opcode 0x15
1806 * @oppfx 0xf2
1807 * @opunused intel-modrm
1808 * @opcpuid sse
1809 * @optest ->
1810 * @opdone
1811 */
1812
1813FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1814{
1815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1817 {
1818 /**
1819 * @opcode 0x16
1820 * @opcodesub 11 mr/reg
1821 * @oppfx none
1822 * @opcpuid sse
1823 * @opgroup og_sse_simdfp_datamove
1824 * @opxcpttype 5
1825 * @optest op1=1 op2=2 -> op1=2
1826 * @optest op1=0 op2=-42 -> op1=-42
1827 */
1828 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1829
1830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1831 IEM_MC_BEGIN(0, 1);
1832 IEM_MC_LOCAL(uint64_t, uSrc);
1833
1834 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1835 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1836 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1837 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1838
1839 IEM_MC_ADVANCE_RIP();
1840 IEM_MC_END();
1841 }
1842 else
1843 {
1844 /**
1845 * @opdone
1846 * @opcode 0x16
1847 * @opcodesub !11 mr/reg
1848 * @oppfx none
1849 * @opcpuid sse
1850 * @opgroup og_sse_simdfp_datamove
1851 * @opxcpttype 5
1852 * @optest op1=1 op2=2 -> op1=2
1853 * @optest op1=0 op2=-42 -> op1=-42
1854 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1855 */
1856 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1857
1858 IEM_MC_BEGIN(0, 2);
1859 IEM_MC_LOCAL(uint64_t, uSrc);
1860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1861
1862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1866
1867 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1868 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1869
1870 IEM_MC_ADVANCE_RIP();
1871 IEM_MC_END();
1872 }
1873 return VINF_SUCCESS;
1874}
1875
1876
1877/**
1878 * @opcode 0x16
1879 * @opcodesub !11 mr/reg
1880 * @oppfx 0x66
1881 * @opcpuid sse2
1882 * @opgroup og_sse2_pcksclr_datamove
1883 * @opxcpttype 5
1884 * @optest op1=1 op2=2 -> op1=2
1885 * @optest op1=0 op2=-42 -> op1=-42
1886 */
1887FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1888{
1889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1890 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1891 {
1892 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1893 IEM_MC_BEGIN(0, 2);
1894 IEM_MC_LOCAL(uint64_t, uSrc);
1895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1896
1897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1899 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1900 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1901
1902 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1903 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1904
1905 IEM_MC_ADVANCE_RIP();
1906 IEM_MC_END();
1907 return VINF_SUCCESS;
1908 }
1909
1910 /**
1911 * @opdone
1912 * @opmnemonic ud660f16m3
1913 * @opcode 0x16
1914 * @opcodesub 11 mr/reg
1915 * @oppfx 0x66
1916 * @opunused immediate
1917 * @opcpuid sse
1918 * @optest ->
1919 */
1920 return IEMOP_RAISE_INVALID_OPCODE();
1921}
1922
1923
1924/**
1925 * @opcode 0x16
1926 * @oppfx 0xf3
1927 * @opcpuid sse3
1928 * @opgroup og_sse3_pcksclr_datamove
1929 * @opxcpttype 4
1930 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1931 * op1=0x00000002000000020000000100000001
1932 */
1933FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1934{
1935 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1938 {
1939 /*
1940 * Register, register.
1941 */
1942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1943 IEM_MC_BEGIN(2, 0);
1944 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1945 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1946
1947 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1948 IEM_MC_PREPARE_SSE_USAGE();
1949
1950 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1951 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1952 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1953
1954 IEM_MC_ADVANCE_RIP();
1955 IEM_MC_END();
1956 }
1957 else
1958 {
1959 /*
1960 * Register, memory.
1961 */
1962 IEM_MC_BEGIN(2, 2);
1963 IEM_MC_LOCAL(RTUINT128U, uSrc);
1964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1965 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1966 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1967
1968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1970 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1971 IEM_MC_PREPARE_SSE_USAGE();
1972
1973 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1974 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1975 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1976
1977 IEM_MC_ADVANCE_RIP();
1978 IEM_MC_END();
1979 }
1980 return VINF_SUCCESS;
1981}
1982
1983/**
1984 * @opdone
1985 * @opmnemonic udf30f16
1986 * @opcode 0x16
1987 * @oppfx 0xf2
1988 * @opunused intel-modrm
1989 * @opcpuid sse
1990 * @optest ->
1991 * @opdone
1992 */
1993
1994
1995/**
1996 * @opcode 0x17
1997 * @opcodesub !11 mr/reg
1998 * @oppfx none
1999 * @opcpuid sse
2000 * @opgroup og_sse_simdfp_datamove
2001 * @opxcpttype 5
2002 * @optest op1=1 op2=2 -> op1=2
2003 * @optest op1=0 op2=-42 -> op1=-42
2004 */
2005FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2006{
2007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2008 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2009 {
2010 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2011
2012 IEM_MC_BEGIN(0, 2);
2013 IEM_MC_LOCAL(uint64_t, uSrc);
2014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2015
2016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2018 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2019 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2020
2021 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2022 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2023
2024 IEM_MC_ADVANCE_RIP();
2025 IEM_MC_END();
2026 return VINF_SUCCESS;
2027 }
2028
2029 /**
2030 * @opdone
2031 * @opmnemonic ud0f17m3
2032 * @opcode 0x17
2033 * @opcodesub 11 mr/reg
2034 * @oppfx none
2035 * @opunused immediate
2036 * @opcpuid sse
2037 * @optest ->
2038 */
2039 return IEMOP_RAISE_INVALID_OPCODE();
2040}
2041
2042
2043/**
2044 * @opcode 0x17
2045 * @opcodesub !11 mr/reg
2046 * @oppfx 0x66
2047 * @opcpuid sse2
2048 * @opgroup og_sse2_pcksclr_datamove
2049 * @opxcpttype 5
2050 * @optest op1=1 op2=2 -> op1=2
2051 * @optest op1=0 op2=-42 -> op1=-42
2052 */
2053FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
2054{
2055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2056 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2057 {
2058 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2059
2060 IEM_MC_BEGIN(0, 2);
2061 IEM_MC_LOCAL(uint64_t, uSrc);
2062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2063
2064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2066 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2067 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2068
2069 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2070 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2071
2072 IEM_MC_ADVANCE_RIP();
2073 IEM_MC_END();
2074 return VINF_SUCCESS;
2075 }
2076
2077 /**
2078 * @opdone
2079 * @opmnemonic ud660f17m3
2080 * @opcode 0x17
2081 * @opcodesub 11 mr/reg
2082 * @oppfx 0x66
2083 * @opunused immediate
2084 * @opcpuid sse
2085 * @optest ->
2086 */
2087 return IEMOP_RAISE_INVALID_OPCODE();
2088}
2089
2090
2091/**
2092 * @opdone
2093 * @opmnemonic udf30f17
2094 * @opcode 0x17
2095 * @oppfx 0xf3
2096 * @opunused intel-modrm
2097 * @opcpuid sse
2098 * @optest ->
2099 * @opdone
2100 */
2101
2102/**
2103 * @opmnemonic udf20f17
2104 * @opcode 0x17
2105 * @oppfx 0xf2
2106 * @opunused intel-modrm
2107 * @opcpuid sse
2108 * @optest ->
2109 * @opdone
2110 */
2111
2112
2113/** Opcode 0x0f 0x18. */
2114FNIEMOP_DEF(iemOp_prefetch_Grp16)
2115{
2116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2117 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2118 {
2119 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2120 {
2121 case 4: /* Aliased to /0 for the time being according to AMD. */
2122 case 5: /* Aliased to /0 for the time being according to AMD. */
2123 case 6: /* Aliased to /0 for the time being according to AMD. */
2124 case 7: /* Aliased to /0 for the time being according to AMD. */
2125 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2126 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2127 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2128 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2130 }
2131
2132 IEM_MC_BEGIN(0, 1);
2133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2136 /* Currently a NOP. */
2137 NOREF(GCPtrEffSrc);
2138 IEM_MC_ADVANCE_RIP();
2139 IEM_MC_END();
2140 return VINF_SUCCESS;
2141 }
2142
2143 return IEMOP_RAISE_INVALID_OPCODE();
2144}
2145
2146
2147/** Opcode 0x0f 0x19..0x1f. */
2148FNIEMOP_DEF(iemOp_nop_Ev)
2149{
2150 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2151 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2152 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2153 {
2154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2155 IEM_MC_BEGIN(0, 0);
2156 IEM_MC_ADVANCE_RIP();
2157 IEM_MC_END();
2158 }
2159 else
2160 {
2161 IEM_MC_BEGIN(0, 1);
2162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2165 /* Currently a NOP. */
2166 NOREF(GCPtrEffSrc);
2167 IEM_MC_ADVANCE_RIP();
2168 IEM_MC_END();
2169 }
2170 return VINF_SUCCESS;
2171}
2172
2173
2174/** Opcode 0x0f 0x20. */
2175FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2176{
2177 /* mod is ignored, as is operand size overrides. */
2178 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2179 IEMOP_HLP_MIN_386();
2180 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2181 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2182 else
2183 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2184
2185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2186 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2187 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2188 {
2189 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2190 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2191 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2192 iCrReg |= 8;
2193 }
2194 switch (iCrReg)
2195 {
2196 case 0: case 2: case 3: case 4: case 8:
2197 break;
2198 default:
2199 return IEMOP_RAISE_INVALID_OPCODE();
2200 }
2201 IEMOP_HLP_DONE_DECODING();
2202
2203 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2204}
2205
2206
2207/** Opcode 0x0f 0x21. */
2208FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2209{
2210 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2211 IEMOP_HLP_MIN_386();
2212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2214 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2215 return IEMOP_RAISE_INVALID_OPCODE();
2216 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2217 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2218 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2219}
2220
2221
2222/** Opcode 0x0f 0x22. */
2223FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2224{
2225 /* mod is ignored, as is operand size overrides. */
2226 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2227 IEMOP_HLP_MIN_386();
2228 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2229 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2230 else
2231 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2232
2233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2234 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2235 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2236 {
2237 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2238 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2239 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2240 iCrReg |= 8;
2241 }
2242 switch (iCrReg)
2243 {
2244 case 0: case 2: case 3: case 4: case 8:
2245 break;
2246 default:
2247 return IEMOP_RAISE_INVALID_OPCODE();
2248 }
2249 IEMOP_HLP_DONE_DECODING();
2250
2251 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2252}
2253
2254
2255/** Opcode 0x0f 0x23. */
2256FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2257{
2258 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2259 IEMOP_HLP_MIN_386();
2260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2262 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2263 return IEMOP_RAISE_INVALID_OPCODE();
2264 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2265 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2266 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2267}
2268
2269
2270/** Opcode 0x0f 0x24. */
2271FNIEMOP_DEF(iemOp_mov_Rd_Td)
2272{
2273 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2274 /** @todo works on 386 and 486. */
2275 /* The RM byte is not considered, see testcase. */
2276 return IEMOP_RAISE_INVALID_OPCODE();
2277}
2278
2279
2280/** Opcode 0x0f 0x26. */
2281FNIEMOP_DEF(iemOp_mov_Td_Rd)
2282{
2283 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2284 /** @todo works on 386 and 486. */
2285 /* The RM byte is not considered, see testcase. */
2286 return IEMOP_RAISE_INVALID_OPCODE();
2287}
2288
2289
2290/**
2291 * @opcode 0x28
2292 * @oppfx none
2293 * @opcpuid sse
2294 * @opgroup og_sse_simdfp_datamove
2295 * @opxcpttype 1
2296 * @optest op1=1 op2=2 -> op1=2
2297 * @optest op1=0 op2=-42 -> op1=-42
2298 */
2299FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2300{
2301 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2303 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2304 {
2305 /*
2306 * Register, register.
2307 */
2308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2309 IEM_MC_BEGIN(0, 0);
2310 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2312 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2313 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2314 IEM_MC_ADVANCE_RIP();
2315 IEM_MC_END();
2316 }
2317 else
2318 {
2319 /*
2320 * Register, memory.
2321 */
2322 IEM_MC_BEGIN(0, 2);
2323 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2325
2326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2328 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2329 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2330
2331 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2332 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2333
2334 IEM_MC_ADVANCE_RIP();
2335 IEM_MC_END();
2336 }
2337 return VINF_SUCCESS;
2338}
2339
2340/**
2341 * @opcode 0x28
2342 * @oppfx 66
2343 * @opcpuid sse2
2344 * @opgroup og_sse2_pcksclr_datamove
2345 * @opxcpttype 1
2346 * @optest op1=1 op2=2 -> op1=2
2347 * @optest op1=0 op2=-42 -> op1=-42
2348 */
2349FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2350{
2351 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2352 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2353 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2354 {
2355 /*
2356 * Register, register.
2357 */
2358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2359 IEM_MC_BEGIN(0, 0);
2360 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2361 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2362 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2363 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2364 IEM_MC_ADVANCE_RIP();
2365 IEM_MC_END();
2366 }
2367 else
2368 {
2369 /*
2370 * Register, memory.
2371 */
2372 IEM_MC_BEGIN(0, 2);
2373 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2375
2376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2378 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2379 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2380
2381 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2382 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2383
2384 IEM_MC_ADVANCE_RIP();
2385 IEM_MC_END();
2386 }
2387 return VINF_SUCCESS;
2388}
2389
2390/* Opcode 0xf3 0x0f 0x28 - invalid */
2391/* Opcode 0xf2 0x0f 0x28 - invalid */
2392
2393/**
2394 * @opcode 0x29
2395 * @oppfx none
2396 * @opcpuid sse
2397 * @opgroup og_sse_simdfp_datamove
2398 * @opxcpttype 1
2399 * @optest op1=1 op2=2 -> op1=2
2400 * @optest op1=0 op2=-42 -> op1=-42
2401 */
2402FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2403{
2404 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2406 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2407 {
2408 /*
2409 * Register, register.
2410 */
2411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2412 IEM_MC_BEGIN(0, 0);
2413 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2414 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2415 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2416 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2417 IEM_MC_ADVANCE_RIP();
2418 IEM_MC_END();
2419 }
2420 else
2421 {
2422 /*
2423 * Memory, register.
2424 */
2425 IEM_MC_BEGIN(0, 2);
2426 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2428
2429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2431 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2432 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2433
2434 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2435 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2436
2437 IEM_MC_ADVANCE_RIP();
2438 IEM_MC_END();
2439 }
2440 return VINF_SUCCESS;
2441}
2442
2443/**
2444 * @opcode 0x29
2445 * @oppfx 66
2446 * @opcpuid sse2
2447 * @opgroup og_sse2_pcksclr_datamove
2448 * @opxcpttype 1
2449 * @optest op1=1 op2=2 -> op1=2
2450 * @optest op1=0 op2=-42 -> op1=-42
2451 */
2452FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2453{
2454 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2456 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2457 {
2458 /*
2459 * Register, register.
2460 */
2461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2462 IEM_MC_BEGIN(0, 0);
2463 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2464 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2465 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2466 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2467 IEM_MC_ADVANCE_RIP();
2468 IEM_MC_END();
2469 }
2470 else
2471 {
2472 /*
2473 * Memory, register.
2474 */
2475 IEM_MC_BEGIN(0, 2);
2476 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2478
2479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2481 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2483
2484 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2485 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2486
2487 IEM_MC_ADVANCE_RIP();
2488 IEM_MC_END();
2489 }
2490 return VINF_SUCCESS;
2491}
2492
2493/* Opcode 0xf3 0x0f 0x29 - invalid */
2494/* Opcode 0xf2 0x0f 0x29 - invalid */
2495
2496
2497/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2498FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2499/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2500FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2501/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2502FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2503/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2504FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2505
2506
2507/**
2508 * @opcode 0x2b
2509 * @opcodesub !11 mr/reg
2510 * @oppfx none
2511 * @opcpuid sse
2512 * @opgroup og_sse1_cachect
2513 * @opxcpttype 1
2514 * @optest op1=1 op2=2 -> op1=2
2515 * @optest op1=0 op2=-42 -> op1=-42
2516 */
2517FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2518{
2519 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2521 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2522 {
2523 /*
2524 * memory, register.
2525 */
2526 IEM_MC_BEGIN(0, 2);
2527 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2529
2530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2532 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2533 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2534
2535 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2536 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2537
2538 IEM_MC_ADVANCE_RIP();
2539 IEM_MC_END();
2540 }
2541 /* The register, register encoding is invalid. */
2542 else
2543 return IEMOP_RAISE_INVALID_OPCODE();
2544 return VINF_SUCCESS;
2545}
2546
2547/**
2548 * @opcode 0x2b
2549 * @opcodesub !11 mr/reg
2550 * @oppfx 0x66
2551 * @opcpuid sse2
2552 * @opgroup og_sse2_cachect
2553 * @opxcpttype 1
2554 * @optest op1=1 op2=2 -> op1=2
2555 * @optest op1=0 op2=-42 -> op1=-42
2556 */
2557FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2558{
2559 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2561 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2562 {
2563 /*
2564 * memory, register.
2565 */
2566 IEM_MC_BEGIN(0, 2);
2567 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2569
2570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2572 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2573 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2574
2575 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2576 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2577
2578 IEM_MC_ADVANCE_RIP();
2579 IEM_MC_END();
2580 }
2581 /* The register, register encoding is invalid. */
2582 else
2583 return IEMOP_RAISE_INVALID_OPCODE();
2584 return VINF_SUCCESS;
2585}
2586/* Opcode 0xf3 0x0f 0x2b - invalid */
2587/* Opcode 0xf2 0x0f 0x2b - invalid */
2588
2589
2590/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2591FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2592/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2593FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2594/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2595FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2596/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2597FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2598
2599/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2600FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2601/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2602FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2603/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2604FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2605/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2606FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2607
2608/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2609FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2610/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2611FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2612/* Opcode 0xf3 0x0f 0x2e - invalid */
2613/* Opcode 0xf2 0x0f 0x2e - invalid */
2614
2615/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2616FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2617/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2618FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2619/* Opcode 0xf3 0x0f 0x2f - invalid */
2620/* Opcode 0xf2 0x0f 0x2f - invalid */
2621
2622/** Opcode 0x0f 0x30. */
2623FNIEMOP_DEF(iemOp_wrmsr)
2624{
2625 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2627 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2628}
2629
2630
2631/** Opcode 0x0f 0x31. */
2632FNIEMOP_DEF(iemOp_rdtsc)
2633{
2634 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2636 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2637}
2638
2639
2640/** Opcode 0x0f 0x33. */
2641FNIEMOP_DEF(iemOp_rdmsr)
2642{
2643 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2646}
2647
2648
2649/** Opcode 0x0f 0x34. */
2650FNIEMOP_DEF(iemOp_rdpmc)
2651{
2652 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2654 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2655}
2656
2657
2658/** Opcode 0x0f 0x34. */
2659FNIEMOP_STUB(iemOp_sysenter);
2660/** Opcode 0x0f 0x35. */
2661FNIEMOP_STUB(iemOp_sysexit);
2662/** Opcode 0x0f 0x37. */
2663FNIEMOP_STUB(iemOp_getsec);
2664
2665
2666/** Opcode 0x0f 0x38. */
2667FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2668{
2669#ifdef IEM_WITH_THREE_0F_38
2670 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2671 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2672#else
2673 IEMOP_BITCH_ABOUT_STUB();
2674 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2675#endif
2676}
2677
2678
2679/** Opcode 0x0f 0x3a. */
2680FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2681{
2682#ifdef IEM_WITH_THREE_0F_3A
2683 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2684 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2685#else
2686 IEMOP_BITCH_ABOUT_STUB();
2687 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2688#endif
2689}
2690
2691
2692/**
2693 * Implements a conditional move.
2694 *
2695 * Wish there was an obvious way to do this where we could share and reduce
2696 * code bloat.
2697 *
2698 * @param a_Cnd The conditional "microcode" operation.
2699 */
2700#define CMOV_X(a_Cnd) \
2701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2702 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2703 { \
2704 switch (pVCpu->iem.s.enmEffOpSize) \
2705 { \
2706 case IEMMODE_16BIT: \
2707 IEM_MC_BEGIN(0, 1); \
2708 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2709 a_Cnd { \
2710 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2711 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2712 } IEM_MC_ENDIF(); \
2713 IEM_MC_ADVANCE_RIP(); \
2714 IEM_MC_END(); \
2715 return VINF_SUCCESS; \
2716 \
2717 case IEMMODE_32BIT: \
2718 IEM_MC_BEGIN(0, 1); \
2719 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2720 a_Cnd { \
2721 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2722 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2723 } IEM_MC_ELSE() { \
2724 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2725 } IEM_MC_ENDIF(); \
2726 IEM_MC_ADVANCE_RIP(); \
2727 IEM_MC_END(); \
2728 return VINF_SUCCESS; \
2729 \
2730 case IEMMODE_64BIT: \
2731 IEM_MC_BEGIN(0, 1); \
2732 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2733 a_Cnd { \
2734 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2735 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2736 } IEM_MC_ENDIF(); \
2737 IEM_MC_ADVANCE_RIP(); \
2738 IEM_MC_END(); \
2739 return VINF_SUCCESS; \
2740 \
2741 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2742 } \
2743 } \
2744 else \
2745 { \
2746 switch (pVCpu->iem.s.enmEffOpSize) \
2747 { \
2748 case IEMMODE_16BIT: \
2749 IEM_MC_BEGIN(0, 2); \
2750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2751 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2753 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2754 a_Cnd { \
2755 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2756 } IEM_MC_ENDIF(); \
2757 IEM_MC_ADVANCE_RIP(); \
2758 IEM_MC_END(); \
2759 return VINF_SUCCESS; \
2760 \
2761 case IEMMODE_32BIT: \
2762 IEM_MC_BEGIN(0, 2); \
2763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2764 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2766 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2767 a_Cnd { \
2768 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2769 } IEM_MC_ELSE() { \
2770 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2771 } IEM_MC_ENDIF(); \
2772 IEM_MC_ADVANCE_RIP(); \
2773 IEM_MC_END(); \
2774 return VINF_SUCCESS; \
2775 \
2776 case IEMMODE_64BIT: \
2777 IEM_MC_BEGIN(0, 2); \
2778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2779 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2781 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2782 a_Cnd { \
2783 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2784 } IEM_MC_ENDIF(); \
2785 IEM_MC_ADVANCE_RIP(); \
2786 IEM_MC_END(); \
2787 return VINF_SUCCESS; \
2788 \
2789 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2790 } \
2791 } do {} while (0)
2792
2793
2794
2795/** Opcode 0x0f 0x40. */
2796FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2797{
2798 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2799 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2800}
2801
2802
2803/** Opcode 0x0f 0x41. */
2804FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2805{
2806 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2807 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2808}
2809
2810
2811/** Opcode 0x0f 0x42. */
2812FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2813{
2814 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2815 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2816}
2817
2818
2819/** Opcode 0x0f 0x43. */
2820FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2821{
2822 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2823 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2824}
2825
2826
2827/** Opcode 0x0f 0x44. */
2828FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2829{
2830 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2831 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2832}
2833
2834
2835/** Opcode 0x0f 0x45. */
2836FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2837{
2838 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2839 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2840}
2841
2842
2843/** Opcode 0x0f 0x46. */
2844FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2845{
2846 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2847 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2848}
2849
2850
2851/** Opcode 0x0f 0x47. */
2852FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2853{
2854 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2855 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2856}
2857
2858
2859/** Opcode 0x0f 0x48. */
2860FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2861{
2862 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2863 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2864}
2865
2866
2867/** Opcode 0x0f 0x49. */
2868FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2869{
2870 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2871 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2872}
2873
2874
2875/** Opcode 0x0f 0x4a. */
2876FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2877{
2878 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2879 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2880}
2881
2882
2883/** Opcode 0x0f 0x4b. */
2884FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2885{
2886 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2887 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2888}
2889
2890
2891/** Opcode 0x0f 0x4c. */
2892FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2893{
2894 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2895 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2896}
2897
2898
2899/** Opcode 0x0f 0x4d. */
2900FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2901{
2902 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2903 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2904}
2905
2906
2907/** Opcode 0x0f 0x4e. */
2908FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2909{
2910 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2911 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2912}
2913
2914
2915/** Opcode 0x0f 0x4f. */
2916FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2917{
2918 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2919 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2920}
2921
2922#undef CMOV_X
2923
2924/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2925FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2926/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2927FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2928/* Opcode 0xf3 0x0f 0x50 - invalid */
2929/* Opcode 0xf2 0x0f 0x50 - invalid */
2930
2931/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2932FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2933/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2934FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2935/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2936FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2937/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2938FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2939
2940/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2941FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2942/* Opcode 0x66 0x0f 0x52 - invalid */
2943/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2944FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2945/* Opcode 0xf2 0x0f 0x52 - invalid */
2946
2947/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2948FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2949/* Opcode 0x66 0x0f 0x53 - invalid */
2950/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2951FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2952/* Opcode 0xf2 0x0f 0x53 - invalid */
2953
2954/** Opcode 0x0f 0x54 - andps Vps, Wps */
2955FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2956/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2957FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2958/* Opcode 0xf3 0x0f 0x54 - invalid */
2959/* Opcode 0xf2 0x0f 0x54 - invalid */
2960
2961/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2962FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2963/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2964FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2965/* Opcode 0xf3 0x0f 0x55 - invalid */
2966/* Opcode 0xf2 0x0f 0x55 - invalid */
2967
2968/** Opcode 0x0f 0x56 - orps Vps, Wps */
2969FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2970/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2971FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2972/* Opcode 0xf3 0x0f 0x56 - invalid */
2973/* Opcode 0xf2 0x0f 0x56 - invalid */
2974
2975/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2976FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2977/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2978FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2979/* Opcode 0xf3 0x0f 0x57 - invalid */
2980/* Opcode 0xf2 0x0f 0x57 - invalid */
2981
2982/** Opcode 0x0f 0x58 - addps Vps, Wps */
2983FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2984/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2985FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2986/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2987FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2988/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2989FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2990
2991/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2992FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2993/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2994FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2995/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2996FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2997/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2998FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2999
3000/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
3001FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
3002/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
3003FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
3004/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
3005FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
3006/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
3007FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
3008
3009/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
3010FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
3011/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
3012FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
3013/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
3014FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
3015/* Opcode 0xf2 0x0f 0x5b - invalid */
3016
3017/** Opcode 0x0f 0x5c - subps Vps, Wps */
3018FNIEMOP_STUB(iemOp_subps_Vps_Wps);
3019/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
3020FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
3021/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
3022FNIEMOP_STUB(iemOp_subss_Vss_Wss);
3023/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
3024FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
3025
3026/** Opcode 0x0f 0x5d - minps Vps, Wps */
3027FNIEMOP_STUB(iemOp_minps_Vps_Wps);
3028/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
3029FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
3030/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
3031FNIEMOP_STUB(iemOp_minss_Vss_Wss);
3032/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
3033FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
3034
3035/** Opcode 0x0f 0x5e - divps Vps, Wps */
3036FNIEMOP_STUB(iemOp_divps_Vps_Wps);
3037/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
3038FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
3039/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
3040FNIEMOP_STUB(iemOp_divss_Vss_Wss);
3041/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
3042FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
3043
3044/** Opcode 0x0f 0x5f - maxps Vps, Wps */
3045FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
3046/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
3047FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
3048/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
3049FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
3050/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
3051FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
3052
3053/**
3054 * Common worker for MMX instructions on the forms:
3055 * pxxxx mm1, mm2/mem32
3056 *
3057 * The 2nd operand is the first half of a register, which in the memory case
3058 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3059 * memory accessed for MMX.
3060 *
3061 * Exceptions type 4.
3062 */
3063FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3064{
3065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3067 {
3068 /*
3069 * Register, register.
3070 */
3071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3072 IEM_MC_BEGIN(2, 0);
3073 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3074 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3075 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3076 IEM_MC_PREPARE_SSE_USAGE();
3077 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3078 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3079 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3080 IEM_MC_ADVANCE_RIP();
3081 IEM_MC_END();
3082 }
3083 else
3084 {
3085 /*
3086 * Register, memory.
3087 */
3088 IEM_MC_BEGIN(2, 2);
3089 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3090 IEM_MC_LOCAL(uint64_t, uSrc);
3091 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3093
3094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3096 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3097 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3098
3099 IEM_MC_PREPARE_SSE_USAGE();
3100 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3101 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3102
3103 IEM_MC_ADVANCE_RIP();
3104 IEM_MC_END();
3105 }
3106 return VINF_SUCCESS;
3107}
3108
3109
3110/**
3111 * Common worker for SSE2 instructions on the forms:
3112 * pxxxx xmm1, xmm2/mem128
3113 *
3114 * The 2nd operand is the first half of a register, which in the memory case
3115 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3116 * memory accessed for MMX.
3117 *
3118 * Exceptions type 4.
3119 */
3120FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3121{
3122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3123 if (!pImpl->pfnU64)
3124 return IEMOP_RAISE_INVALID_OPCODE();
3125 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3126 {
3127 /*
3128 * Register, register.
3129 */
3130 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3131 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3133 IEM_MC_BEGIN(2, 0);
3134 IEM_MC_ARG(uint64_t *, pDst, 0);
3135 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3136 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3137 IEM_MC_PREPARE_FPU_USAGE();
3138 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3139 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3140 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3141 IEM_MC_ADVANCE_RIP();
3142 IEM_MC_END();
3143 }
3144 else
3145 {
3146 /*
3147 * Register, memory.
3148 */
3149 IEM_MC_BEGIN(2, 2);
3150 IEM_MC_ARG(uint64_t *, pDst, 0);
3151 IEM_MC_LOCAL(uint32_t, uSrc);
3152 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3154
3155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3157 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3158 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3159
3160 IEM_MC_PREPARE_FPU_USAGE();
3161 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3162 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3163
3164 IEM_MC_ADVANCE_RIP();
3165 IEM_MC_END();
3166 }
3167 return VINF_SUCCESS;
3168}
3169
3170
3171/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3172FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3173{
3174 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3175 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3176}
3177
3178/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3179FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3180{
3181 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3182 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3183}
3184
3185/* Opcode 0xf3 0x0f 0x60 - invalid */
3186
3187
3188/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3189FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3190{
3191 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3192 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3193}
3194
3195/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3196FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3197{
3198 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3199 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3200}
3201
3202/* Opcode 0xf3 0x0f 0x61 - invalid */
3203
3204
3205/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3206FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3207{
3208 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3209 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3210}
3211
3212/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3213FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3214{
3215 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3216 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3217}
3218
3219/* Opcode 0xf3 0x0f 0x62 - invalid */
3220
3221
3222
3223/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3224FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3225/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3226FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3227/* Opcode 0xf3 0x0f 0x63 - invalid */
3228
3229/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3230FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3231/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3232FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3233/* Opcode 0xf3 0x0f 0x64 - invalid */
3234
3235/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3236FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3237/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3238FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3239/* Opcode 0xf3 0x0f 0x65 - invalid */
3240
3241/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3242FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3243/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3244FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3245/* Opcode 0xf3 0x0f 0x66 - invalid */
3246
3247/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3248FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3249/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3250FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3251/* Opcode 0xf3 0x0f 0x67 - invalid */
3252
3253
3254/**
3255 * Common worker for MMX instructions on the form:
3256 * pxxxx mm1, mm2/mem64
3257 *
3258 * The 2nd operand is the second half of a register, which in the memory case
3259 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3260 * where it may read the full 128 bits or only the upper 64 bits.
3261 *
3262 * Exceptions type 4.
3263 */
3264FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3265{
3266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3267 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3269 {
3270 /*
3271 * Register, register.
3272 */
3273 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3274 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3276 IEM_MC_BEGIN(2, 0);
3277 IEM_MC_ARG(uint64_t *, pDst, 0);
3278 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3279 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3280 IEM_MC_PREPARE_FPU_USAGE();
3281 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3282 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3283 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3284 IEM_MC_ADVANCE_RIP();
3285 IEM_MC_END();
3286 }
3287 else
3288 {
3289 /*
3290 * Register, memory.
3291 */
3292 IEM_MC_BEGIN(2, 2);
3293 IEM_MC_ARG(uint64_t *, pDst, 0);
3294 IEM_MC_LOCAL(uint64_t, uSrc);
3295 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3297
3298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3300 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3301 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3302
3303 IEM_MC_PREPARE_FPU_USAGE();
3304 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3305 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3306
3307 IEM_MC_ADVANCE_RIP();
3308 IEM_MC_END();
3309 }
3310 return VINF_SUCCESS;
3311}
3312
3313
3314/**
3315 * Common worker for SSE2 instructions on the form:
3316 * pxxxx xmm1, xmm2/mem128
3317 *
3318 * The 2nd operand is the second half of a register, which in the memory case
3319 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3320 * where it may read the full 128 bits or only the upper 64 bits.
3321 *
3322 * Exceptions type 4.
3323 */
3324FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3325{
3326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3327 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3328 {
3329 /*
3330 * Register, register.
3331 */
3332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3333 IEM_MC_BEGIN(2, 0);
3334 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3335 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3336 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3337 IEM_MC_PREPARE_SSE_USAGE();
3338 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3339 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3340 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3341 IEM_MC_ADVANCE_RIP();
3342 IEM_MC_END();
3343 }
3344 else
3345 {
3346 /*
3347 * Register, memory.
3348 */
3349 IEM_MC_BEGIN(2, 2);
3350 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3351 IEM_MC_LOCAL(RTUINT128U, uSrc);
3352 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3354
3355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3357 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3358 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3359
3360 IEM_MC_PREPARE_SSE_USAGE();
3361 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3362 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3363
3364 IEM_MC_ADVANCE_RIP();
3365 IEM_MC_END();
3366 }
3367 return VINF_SUCCESS;
3368}
3369
3370
3371/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3372FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3373{
3374 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3375 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3376}
3377
3378/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3379FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3380{
3381 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3382 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3383}
3384/* Opcode 0xf3 0x0f 0x68 - invalid */
3385
3386
3387/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3388FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3389{
3390 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3391 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3392}
3393
3394/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3395FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3396{
3397 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3398 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3399
3400}
3401/* Opcode 0xf3 0x0f 0x69 - invalid */
3402
3403
3404/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3405FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3406{
3407 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3408 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3409}
3410
3411/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3412FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3413{
3414 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3415 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3416}
3417/* Opcode 0xf3 0x0f 0x6a - invalid */
3418
3419
3420/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3421FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3422/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3423FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3424/* Opcode 0xf3 0x0f 0x6b - invalid */
3425
3426
3427/* Opcode 0x0f 0x6c - invalid */
3428
3429/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3430FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3431{
3432 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3433 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3434}
3435
3436/* Opcode 0xf3 0x0f 0x6c - invalid */
3437/* Opcode 0xf2 0x0f 0x6c - invalid */
3438
3439
3440/* Opcode 0x0f 0x6d - invalid */
3441
3442/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3443FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3444{
3445 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3446 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3447}
3448
3449/* Opcode 0xf3 0x0f 0x6d - invalid */
3450
3451
3452FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3453{
3454 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3455 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3456 {
3457 /**
3458 * @opcode 0x6e
3459 * @opcodesub rex.w=1
3460 * @oppfx none
3461 * @opcpuid mmx
3462 * @opgroup og_mmx_datamove
3463 * @opxcpttype 5
3464 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3465 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3466 * @oponly
3467 */
3468 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3469 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3470 {
3471 /* MMX, greg64 */
3472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3473 IEM_MC_BEGIN(0, 1);
3474 IEM_MC_LOCAL(uint64_t, u64Tmp);
3475
3476 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3477 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3478
3479 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3480 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3481 IEM_MC_FPU_TO_MMX_MODE();
3482
3483 IEM_MC_ADVANCE_RIP();
3484 IEM_MC_END();
3485 }
3486 else
3487 {
3488 /* MMX, [mem64] */
3489 IEM_MC_BEGIN(0, 2);
3490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3491 IEM_MC_LOCAL(uint64_t, u64Tmp);
3492
3493 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3496 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3497
3498 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3499 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3500 IEM_MC_FPU_TO_MMX_MODE();
3501
3502 IEM_MC_ADVANCE_RIP();
3503 IEM_MC_END();
3504 }
3505 }
3506 else
3507 {
3508 /**
3509 * @opdone
3510 * @opcode 0x6e
3511 * @opcodesub rex.w=0
3512 * @oppfx none
3513 * @opcpuid mmx
3514 * @opgroup og_mmx_datamove
3515 * @opxcpttype 5
3516 * @opfunction iemOp_movd_q_Pd_Ey
3517 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3518 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3519 * @oponly
3520 */
3521 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3522 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3523 {
3524 /* MMX, greg */
3525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3526 IEM_MC_BEGIN(0, 1);
3527 IEM_MC_LOCAL(uint64_t, u64Tmp);
3528
3529 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3530 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3531
3532 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3533 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3534 IEM_MC_FPU_TO_MMX_MODE();
3535
3536 IEM_MC_ADVANCE_RIP();
3537 IEM_MC_END();
3538 }
3539 else
3540 {
3541 /* MMX, [mem] */
3542 IEM_MC_BEGIN(0, 2);
3543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3544 IEM_MC_LOCAL(uint32_t, u32Tmp);
3545
3546 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3549 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3550
3551 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3552 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3553 IEM_MC_FPU_TO_MMX_MODE();
3554
3555 IEM_MC_ADVANCE_RIP();
3556 IEM_MC_END();
3557 }
3558 }
3559 return VINF_SUCCESS;
3560}
3561
3562/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
3563FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3564{
3565 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3566 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3567 {
3568 /**
3569 * @opcode 0x6e
3570 * @opcodesub rex.w=1
3571 * @oppfx 0x66
3572 * @opcpuid sse2
3573 * @opgroup og_sse2_simdint_datamov
3574 * @opxcpttype 5
3575 * @optest 64-bit / op1=1 op2=2 -> op1=2
3576 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3577 * @oponly
3578 */
3579 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3580 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3581 {
3582 /* XMM, greg64 */
3583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3584 IEM_MC_BEGIN(0, 1);
3585 IEM_MC_LOCAL(uint64_t, u64Tmp);
3586
3587 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3588 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3589
3590 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3591 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3592
3593 IEM_MC_ADVANCE_RIP();
3594 IEM_MC_END();
3595 }
3596 else
3597 {
3598 /* XMM, [mem64] */
3599 IEM_MC_BEGIN(0, 2);
3600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3601 IEM_MC_LOCAL(uint64_t, u64Tmp);
3602
3603 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3606 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3607
3608 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3609 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3610
3611 IEM_MC_ADVANCE_RIP();
3612 IEM_MC_END();
3613 }
3614 }
3615 else
3616 {
3617 /**
3618 * @opdone
3619 * @opcode 0x6e
3620 * @opcodesub rex.w=0
3621 * @oppfx 0x66
3622 * @opcpuid sse2
3623 * @opgroup og_sse2_simdint_datamov
3624 * @opxcpttype 5
3625 * @opfunction iemOp_movd_q_Vy_Ey
3626 * @optest op1=1 op2=2 -> op1=2
3627 * @optest op1=0 op2=-42 -> op1=-42
3628 * @oponly
3629 */
3630 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3631 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3632 {
3633 /* XMM, greg32 */
3634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3635 IEM_MC_BEGIN(0, 1);
3636 IEM_MC_LOCAL(uint32_t, u32Tmp);
3637
3638 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3639 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3640
3641 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3642 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3643
3644 IEM_MC_ADVANCE_RIP();
3645 IEM_MC_END();
3646 }
3647 else
3648 {
3649 /* XMM, [mem32] */
3650 IEM_MC_BEGIN(0, 2);
3651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3652 IEM_MC_LOCAL(uint32_t, u32Tmp);
3653
3654 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3657 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3658
3659 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3660 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3661
3662 IEM_MC_ADVANCE_RIP();
3663 IEM_MC_END();
3664 }
3665 }
3666 return VINF_SUCCESS;
3667}
3668
3669/* Opcode 0xf3 0x0f 0x6e - invalid */
3670
3671
3672/** Opcode 0x0f 0x6f - movq Pq, Qq */
3673FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3674{
3675 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3676 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3677 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3678 {
3679 /*
3680 * Register, register.
3681 */
3682 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3683 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3685 IEM_MC_BEGIN(0, 1);
3686 IEM_MC_LOCAL(uint64_t, u64Tmp);
3687 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3688 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3689 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3690 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3691 IEM_MC_ADVANCE_RIP();
3692 IEM_MC_END();
3693 }
3694 else
3695 {
3696 /*
3697 * Register, memory.
3698 */
3699 IEM_MC_BEGIN(0, 2);
3700 IEM_MC_LOCAL(uint64_t, u64Tmp);
3701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3702
3703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3705 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3706 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3707 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3708 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3709
3710 IEM_MC_ADVANCE_RIP();
3711 IEM_MC_END();
3712 }
3713 return VINF_SUCCESS;
3714}
3715
3716/** Opcode 0x66 0x0f 0x6f - movdqa Vx, Wx */
3717FNIEMOP_DEF(iemOp_movdqa_Vx_Wx)
3718{
3719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3720 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3721 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3722 {
3723 /*
3724 * Register, register.
3725 */
3726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3727 IEM_MC_BEGIN(0, 0);
3728 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3729 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3730 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3731 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3732 IEM_MC_ADVANCE_RIP();
3733 IEM_MC_END();
3734 }
3735 else
3736 {
3737 /*
3738 * Register, memory.
3739 */
3740 IEM_MC_BEGIN(0, 2);
3741 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3743
3744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3746 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3747 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3748 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3749 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3750
3751 IEM_MC_ADVANCE_RIP();
3752 IEM_MC_END();
3753 }
3754 return VINF_SUCCESS;
3755}
3756
3757/** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */
3758FNIEMOP_DEF(iemOp_movdqu_Vx_Wx)
3759{
3760 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3761 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3762 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3763 {
3764 /*
3765 * Register, register.
3766 */
3767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3768 IEM_MC_BEGIN(0, 0);
3769 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3770 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3771 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3772 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3773 IEM_MC_ADVANCE_RIP();
3774 IEM_MC_END();
3775 }
3776 else
3777 {
3778 /*
3779 * Register, memory.
3780 */
3781 IEM_MC_BEGIN(0, 2);
3782 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3784
3785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3787 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3788 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3789 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3790 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3791
3792 IEM_MC_ADVANCE_RIP();
3793 IEM_MC_END();
3794 }
3795 return VINF_SUCCESS;
3796}
3797
3798
3799/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3800FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3801{
3802 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3803 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3804 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3805 {
3806 /*
3807 * Register, register.
3808 */
3809 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3811
3812 IEM_MC_BEGIN(3, 0);
3813 IEM_MC_ARG(uint64_t *, pDst, 0);
3814 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3815 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3816 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3817 IEM_MC_PREPARE_FPU_USAGE();
3818 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3819 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3820 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3821 IEM_MC_ADVANCE_RIP();
3822 IEM_MC_END();
3823 }
3824 else
3825 {
3826 /*
3827 * Register, memory.
3828 */
3829 IEM_MC_BEGIN(3, 2);
3830 IEM_MC_ARG(uint64_t *, pDst, 0);
3831 IEM_MC_LOCAL(uint64_t, uSrc);
3832 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3834
3835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3836 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3837 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3839 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3840
3841 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3842 IEM_MC_PREPARE_FPU_USAGE();
3843 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3844 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3845
3846 IEM_MC_ADVANCE_RIP();
3847 IEM_MC_END();
3848 }
3849 return VINF_SUCCESS;
3850}
3851
3852/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3853FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3854{
3855 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3856 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3857 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3858 {
3859 /*
3860 * Register, register.
3861 */
3862 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3864
3865 IEM_MC_BEGIN(3, 0);
3866 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3867 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3868 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3869 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3870 IEM_MC_PREPARE_SSE_USAGE();
3871 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3872 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3873 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3874 IEM_MC_ADVANCE_RIP();
3875 IEM_MC_END();
3876 }
3877 else
3878 {
3879 /*
3880 * Register, memory.
3881 */
3882 IEM_MC_BEGIN(3, 2);
3883 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3884 IEM_MC_LOCAL(RTUINT128U, uSrc);
3885 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3887
3888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3889 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3890 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3892 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3893
3894 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3895 IEM_MC_PREPARE_SSE_USAGE();
3896 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3897 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3898
3899 IEM_MC_ADVANCE_RIP();
3900 IEM_MC_END();
3901 }
3902 return VINF_SUCCESS;
3903}
3904
3905/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3906FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3907{
3908 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3909 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3910 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3911 {
3912 /*
3913 * Register, register.
3914 */
3915 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3917
3918 IEM_MC_BEGIN(3, 0);
3919 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3920 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3921 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3922 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3923 IEM_MC_PREPARE_SSE_USAGE();
3924 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3925 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3926 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3927 IEM_MC_ADVANCE_RIP();
3928 IEM_MC_END();
3929 }
3930 else
3931 {
3932 /*
3933 * Register, memory.
3934 */
3935 IEM_MC_BEGIN(3, 2);
3936 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3937 IEM_MC_LOCAL(RTUINT128U, uSrc);
3938 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3940
3941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3942 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3943 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3945 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3946
3947 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3948 IEM_MC_PREPARE_SSE_USAGE();
3949 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3950 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3951
3952 IEM_MC_ADVANCE_RIP();
3953 IEM_MC_END();
3954 }
3955 return VINF_SUCCESS;
3956}
3957
3958/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3959FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3960{
3961 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3962 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3963 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3964 {
3965 /*
3966 * Register, register.
3967 */
3968 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3970
3971 IEM_MC_BEGIN(3, 0);
3972 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3973 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3974 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3975 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3976 IEM_MC_PREPARE_SSE_USAGE();
3977 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3978 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3979 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3980 IEM_MC_ADVANCE_RIP();
3981 IEM_MC_END();
3982 }
3983 else
3984 {
3985 /*
3986 * Register, memory.
3987 */
3988 IEM_MC_BEGIN(3, 2);
3989 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3990 IEM_MC_LOCAL(RTUINT128U, uSrc);
3991 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3992 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3993
3994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3995 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3996 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3998 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3999
4000 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4001 IEM_MC_PREPARE_SSE_USAGE();
4002 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4003 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
4004
4005 IEM_MC_ADVANCE_RIP();
4006 IEM_MC_END();
4007 }
4008 return VINF_SUCCESS;
4009}
4010
4011
4012/** Opcode 0x0f 0x71 11/2. */
4013FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
4014
4015/** Opcode 0x66 0x0f 0x71 11/2. */
4016FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
4017
4018/** Opcode 0x0f 0x71 11/4. */
4019FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
4020
4021/** Opcode 0x66 0x0f 0x71 11/4. */
4022FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
4023
4024/** Opcode 0x0f 0x71 11/6. */
4025FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
4026
4027/** Opcode 0x66 0x0f 0x71 11/6. */
4028FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
4029
4030
4031/**
4032 * Group 12 jump table for register variant.
4033 */
4034IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
4035{
4036 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4037 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4038 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4039 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4040 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4041 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4042 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4043 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4044};
4045AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
4046
4047
4048/** Opcode 0x0f 0x71. */
4049FNIEMOP_DEF(iemOp_Grp12)
4050{
4051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4052 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4053 /* register, register */
4054 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4055 + pVCpu->iem.s.idxPrefix], bRm);
4056 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4057}
4058
4059
4060/** Opcode 0x0f 0x72 11/2. */
4061FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
4062
4063/** Opcode 0x66 0x0f 0x72 11/2. */
4064FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
4065
4066/** Opcode 0x0f 0x72 11/4. */
4067FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
4068
4069/** Opcode 0x66 0x0f 0x72 11/4. */
4070FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4071
4072/** Opcode 0x0f 0x72 11/6. */
4073FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4074
4075/** Opcode 0x66 0x0f 0x72 11/6. */
4076FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4077
4078
4079/**
4080 * Group 13 jump table for register variant.
4081 */
4082IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4083{
4084 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4085 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4086 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4087 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4088 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4089 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4090 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4091 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4092};
4093AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4094
4095/** Opcode 0x0f 0x72. */
4096FNIEMOP_DEF(iemOp_Grp13)
4097{
4098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4099 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4100 /* register, register */
4101 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4102 + pVCpu->iem.s.idxPrefix], bRm);
4103 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4104}
4105
4106
4107/** Opcode 0x0f 0x73 11/2. */
4108FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4109
4110/** Opcode 0x66 0x0f 0x73 11/2. */
4111FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4112
4113/** Opcode 0x66 0x0f 0x73 11/3. */
4114FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4115
4116/** Opcode 0x0f 0x73 11/6. */
4117FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4118
4119/** Opcode 0x66 0x0f 0x73 11/6. */
4120FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4121
4122/** Opcode 0x66 0x0f 0x73 11/7. */
4123FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4124
4125/**
4126 * Group 14 jump table for register variant.
4127 */
4128IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4129{
4130 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4131 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4132 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4133 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4134 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4135 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4136 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4137 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4138};
4139AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4140
4141
4142/** Opcode 0x0f 0x73. */
4143FNIEMOP_DEF(iemOp_Grp14)
4144{
4145 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4146 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4147 /* register, register */
4148 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4149 + pVCpu->iem.s.idxPrefix], bRm);
4150 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4151}
4152
4153
4154/**
4155 * Common worker for MMX instructions on the form:
4156 * pxxx mm1, mm2/mem64
4157 */
4158FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4159{
4160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4162 {
4163 /*
4164 * Register, register.
4165 */
4166 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4167 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4169 IEM_MC_BEGIN(2, 0);
4170 IEM_MC_ARG(uint64_t *, pDst, 0);
4171 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4172 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4173 IEM_MC_PREPARE_FPU_USAGE();
4174 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4175 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4176 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4177 IEM_MC_ADVANCE_RIP();
4178 IEM_MC_END();
4179 }
4180 else
4181 {
4182 /*
4183 * Register, memory.
4184 */
4185 IEM_MC_BEGIN(2, 2);
4186 IEM_MC_ARG(uint64_t *, pDst, 0);
4187 IEM_MC_LOCAL(uint64_t, uSrc);
4188 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4190
4191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4193 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4194 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4195
4196 IEM_MC_PREPARE_FPU_USAGE();
4197 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4198 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4199
4200 IEM_MC_ADVANCE_RIP();
4201 IEM_MC_END();
4202 }
4203 return VINF_SUCCESS;
4204}
4205
4206
4207/**
4208 * Common worker for SSE2 instructions on the forms:
4209 * pxxx xmm1, xmm2/mem128
4210 *
4211 * Proper alignment of the 128-bit operand is enforced.
4212 * Exceptions type 4. SSE2 cpuid checks.
4213 */
4214FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4215{
4216 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4218 {
4219 /*
4220 * Register, register.
4221 */
4222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4223 IEM_MC_BEGIN(2, 0);
4224 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4225 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4226 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4227 IEM_MC_PREPARE_SSE_USAGE();
4228 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4229 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4230 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4231 IEM_MC_ADVANCE_RIP();
4232 IEM_MC_END();
4233 }
4234 else
4235 {
4236 /*
4237 * Register, memory.
4238 */
4239 IEM_MC_BEGIN(2, 2);
4240 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4241 IEM_MC_LOCAL(RTUINT128U, uSrc);
4242 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4244
4245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4247 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4248 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4249
4250 IEM_MC_PREPARE_SSE_USAGE();
4251 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4252 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4253
4254 IEM_MC_ADVANCE_RIP();
4255 IEM_MC_END();
4256 }
4257 return VINF_SUCCESS;
4258}
4259
4260
4261/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4262FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4263{
4264 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4265 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4266}
4267
4268/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4269FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4270{
4271 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4272 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4273}
4274
4275/* Opcode 0xf3 0x0f 0x74 - invalid */
4276/* Opcode 0xf2 0x0f 0x74 - invalid */
4277
4278
4279/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4280FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4281{
4282 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4283 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4284}
4285
4286/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4287FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4288{
4289 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4290 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4291}
4292
4293/* Opcode 0xf3 0x0f 0x75 - invalid */
4294/* Opcode 0xf2 0x0f 0x75 - invalid */
4295
4296
4297/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4298FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4299{
4300 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4301 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4302}
4303
4304/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4305FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4306{
4307 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4308 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4309}
4310
4311/* Opcode 0xf3 0x0f 0x76 - invalid */
4312/* Opcode 0xf2 0x0f 0x76 - invalid */
4313
4314
4315/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4316FNIEMOP_STUB(iemOp_emms);
4317/* Opcode 0x66 0x0f 0x77 - invalid */
4318/* Opcode 0xf3 0x0f 0x77 - invalid */
4319/* Opcode 0xf2 0x0f 0x77 - invalid */
4320
4321/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4322FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4323/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4324FNIEMOP_STUB(iemOp_AmdGrp17);
4325/* Opcode 0xf3 0x0f 0x78 - invalid */
4326/* Opcode 0xf2 0x0f 0x78 - invalid */
4327
4328/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4329FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4330/* Opcode 0x66 0x0f 0x79 - invalid */
4331/* Opcode 0xf3 0x0f 0x79 - invalid */
4332/* Opcode 0xf2 0x0f 0x79 - invalid */
4333
4334/* Opcode 0x0f 0x7a - invalid */
4335/* Opcode 0x66 0x0f 0x7a - invalid */
4336/* Opcode 0xf3 0x0f 0x7a - invalid */
4337/* Opcode 0xf2 0x0f 0x7a - invalid */
4338
4339/* Opcode 0x0f 0x7b - invalid */
4340/* Opcode 0x66 0x0f 0x7b - invalid */
4341/* Opcode 0xf3 0x0f 0x7b - invalid */
4342/* Opcode 0xf2 0x0f 0x7b - invalid */
4343
4344/* Opcode 0x0f 0x7c - invalid */
4345/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4346FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4347/* Opcode 0xf3 0x0f 0x7c - invalid */
4348/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4349FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4350
4351/* Opcode 0x0f 0x7d - invalid */
4352/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4353FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4354/* Opcode 0xf3 0x0f 0x7d - invalid */
4355/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4356FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4357
4358
4359/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4360FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4361{
4362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4363 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4364 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
4365 else
4366 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
4367 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4368 {
4369 /* greg, MMX */
4370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4371 IEM_MC_BEGIN(0, 1);
4372 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4373 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4374 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4375 {
4376 IEM_MC_LOCAL(uint64_t, u64Tmp);
4377 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4378 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4379 }
4380 else
4381 {
4382 IEM_MC_LOCAL(uint32_t, u32Tmp);
4383 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4384 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4385 }
4386 IEM_MC_ADVANCE_RIP();
4387 IEM_MC_END();
4388 }
4389 else
4390 {
4391 /* [mem], MMX */
4392 IEM_MC_BEGIN(0, 2);
4393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4394 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4397 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4398 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4399 {
4400 IEM_MC_LOCAL(uint64_t, u64Tmp);
4401 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4402 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4403 }
4404 else
4405 {
4406 IEM_MC_LOCAL(uint32_t, u32Tmp);
4407 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4408 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4409 }
4410 IEM_MC_ADVANCE_RIP();
4411 IEM_MC_END();
4412 }
4413 return VINF_SUCCESS;
4414}
4415
4416/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
4417FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4418{
4419 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4420 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4421 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
4422 else
4423 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
4424 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4425 {
4426 /* greg, XMM */
4427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4428 IEM_MC_BEGIN(0, 1);
4429 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4430 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4431 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4432 {
4433 IEM_MC_LOCAL(uint64_t, u64Tmp);
4434 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4435 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4436 }
4437 else
4438 {
4439 IEM_MC_LOCAL(uint32_t, u32Tmp);
4440 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4441 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4442 }
4443 IEM_MC_ADVANCE_RIP();
4444 IEM_MC_END();
4445 }
4446 else
4447 {
4448 /* [mem], XMM */
4449 IEM_MC_BEGIN(0, 2);
4450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4451 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4454 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4455 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4456 {
4457 IEM_MC_LOCAL(uint64_t, u64Tmp);
4458 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4459 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4460 }
4461 else
4462 {
4463 IEM_MC_LOCAL(uint32_t, u32Tmp);
4464 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4465 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4466 }
4467 IEM_MC_ADVANCE_RIP();
4468 IEM_MC_END();
4469 }
4470 return VINF_SUCCESS;
4471}
4472
4473
4474/**
4475 * @opcode 0x7e
4476 * @opcodesub !11 mr/reg
4477 * @oppfx 0xf3
4478 * @opcpuid sse2
4479 * @opgroup og_sse2_pcksclr_datamove
4480 * @opxcpttype 5
4481 * @optest op1=1 op2=2 -> op1=2
4482 * @optest op1=0 op2=-42 -> op1=-42
4483 */
4484FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4485{
4486 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4488 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4489 {
4490 /*
4491 * Register, register.
4492 */
4493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4494 IEM_MC_BEGIN(0, 2);
4495 IEM_MC_LOCAL(uint64_t, uSrc);
4496
4497 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4498 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4499
4500 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4501 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4502
4503 IEM_MC_ADVANCE_RIP();
4504 IEM_MC_END();
4505 }
4506 else
4507 {
4508 /*
4509 * Memory, register.
4510 */
4511 IEM_MC_BEGIN(0, 2);
4512 IEM_MC_LOCAL(uint64_t, uSrc);
4513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4514
4515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4517 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4518 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4519
4520 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4521 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4522
4523 IEM_MC_ADVANCE_RIP();
4524 IEM_MC_END();
4525 }
4526 return VINF_SUCCESS;
4527}
4528
4529/* Opcode 0xf2 0x0f 0x7e - invalid */
4530
4531
4532/** Opcode 0x0f 0x7f - movq Qq, Pq */
4533FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4534{
4535 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4536 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4537 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4538 {
4539 /*
4540 * Register, register.
4541 */
4542 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4543 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4545 IEM_MC_BEGIN(0, 1);
4546 IEM_MC_LOCAL(uint64_t, u64Tmp);
4547 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4548 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4549 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4550 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4551 IEM_MC_ADVANCE_RIP();
4552 IEM_MC_END();
4553 }
4554 else
4555 {
4556 /*
4557 * Register, memory.
4558 */
4559 IEM_MC_BEGIN(0, 2);
4560 IEM_MC_LOCAL(uint64_t, u64Tmp);
4561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4562
4563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4565 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4566 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4567
4568 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4569 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4570
4571 IEM_MC_ADVANCE_RIP();
4572 IEM_MC_END();
4573 }
4574 return VINF_SUCCESS;
4575}
4576
4577/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4578FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4579{
4580 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4582 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4583 {
4584 /*
4585 * Register, register.
4586 */
4587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4588 IEM_MC_BEGIN(0, 0);
4589 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4590 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4591 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4592 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4593 IEM_MC_ADVANCE_RIP();
4594 IEM_MC_END();
4595 }
4596 else
4597 {
4598 /*
4599 * Register, memory.
4600 */
4601 IEM_MC_BEGIN(0, 2);
4602 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4604
4605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4607 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4608 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4609
4610 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4611 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4612
4613 IEM_MC_ADVANCE_RIP();
4614 IEM_MC_END();
4615 }
4616 return VINF_SUCCESS;
4617}
4618
4619/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4620FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4621{
4622 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4623 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4624 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4625 {
4626 /*
4627 * Register, register.
4628 */
4629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4630 IEM_MC_BEGIN(0, 0);
4631 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4632 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4633 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4634 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4635 IEM_MC_ADVANCE_RIP();
4636 IEM_MC_END();
4637 }
4638 else
4639 {
4640 /*
4641 * Register, memory.
4642 */
4643 IEM_MC_BEGIN(0, 2);
4644 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4646
4647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4649 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4650 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4651
4652 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4653 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4654
4655 IEM_MC_ADVANCE_RIP();
4656 IEM_MC_END();
4657 }
4658 return VINF_SUCCESS;
4659}
4660
4661/* Opcode 0xf2 0x0f 0x7f - invalid */
4662
4663
4664
4665/** Opcode 0x0f 0x80. */
4666FNIEMOP_DEF(iemOp_jo_Jv)
4667{
4668 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4669 IEMOP_HLP_MIN_386();
4670 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4671 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4672 {
4673 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4675
4676 IEM_MC_BEGIN(0, 0);
4677 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4678 IEM_MC_REL_JMP_S16(i16Imm);
4679 } IEM_MC_ELSE() {
4680 IEM_MC_ADVANCE_RIP();
4681 } IEM_MC_ENDIF();
4682 IEM_MC_END();
4683 }
4684 else
4685 {
4686 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4688
4689 IEM_MC_BEGIN(0, 0);
4690 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4691 IEM_MC_REL_JMP_S32(i32Imm);
4692 } IEM_MC_ELSE() {
4693 IEM_MC_ADVANCE_RIP();
4694 } IEM_MC_ENDIF();
4695 IEM_MC_END();
4696 }
4697 return VINF_SUCCESS;
4698}
4699
4700
4701/** Opcode 0x0f 0x81. */
4702FNIEMOP_DEF(iemOp_jno_Jv)
4703{
4704 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4705 IEMOP_HLP_MIN_386();
4706 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4707 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4708 {
4709 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4711
4712 IEM_MC_BEGIN(0, 0);
4713 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4714 IEM_MC_ADVANCE_RIP();
4715 } IEM_MC_ELSE() {
4716 IEM_MC_REL_JMP_S16(i16Imm);
4717 } IEM_MC_ENDIF();
4718 IEM_MC_END();
4719 }
4720 else
4721 {
4722 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4724
4725 IEM_MC_BEGIN(0, 0);
4726 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4727 IEM_MC_ADVANCE_RIP();
4728 } IEM_MC_ELSE() {
4729 IEM_MC_REL_JMP_S32(i32Imm);
4730 } IEM_MC_ENDIF();
4731 IEM_MC_END();
4732 }
4733 return VINF_SUCCESS;
4734}
4735
4736
4737/** Opcode 0x0f 0x82. */
4738FNIEMOP_DEF(iemOp_jc_Jv)
4739{
4740 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4741 IEMOP_HLP_MIN_386();
4742 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4743 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4744 {
4745 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4747
4748 IEM_MC_BEGIN(0, 0);
4749 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4750 IEM_MC_REL_JMP_S16(i16Imm);
4751 } IEM_MC_ELSE() {
4752 IEM_MC_ADVANCE_RIP();
4753 } IEM_MC_ENDIF();
4754 IEM_MC_END();
4755 }
4756 else
4757 {
4758 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4760
4761 IEM_MC_BEGIN(0, 0);
4762 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4763 IEM_MC_REL_JMP_S32(i32Imm);
4764 } IEM_MC_ELSE() {
4765 IEM_MC_ADVANCE_RIP();
4766 } IEM_MC_ENDIF();
4767 IEM_MC_END();
4768 }
4769 return VINF_SUCCESS;
4770}
4771
4772
4773/** Opcode 0x0f 0x83. */
4774FNIEMOP_DEF(iemOp_jnc_Jv)
4775{
4776 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4777 IEMOP_HLP_MIN_386();
4778 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4779 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4780 {
4781 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4783
4784 IEM_MC_BEGIN(0, 0);
4785 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4786 IEM_MC_ADVANCE_RIP();
4787 } IEM_MC_ELSE() {
4788 IEM_MC_REL_JMP_S16(i16Imm);
4789 } IEM_MC_ENDIF();
4790 IEM_MC_END();
4791 }
4792 else
4793 {
4794 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4796
4797 IEM_MC_BEGIN(0, 0);
4798 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4799 IEM_MC_ADVANCE_RIP();
4800 } IEM_MC_ELSE() {
4801 IEM_MC_REL_JMP_S32(i32Imm);
4802 } IEM_MC_ENDIF();
4803 IEM_MC_END();
4804 }
4805 return VINF_SUCCESS;
4806}
4807
4808
4809/** Opcode 0x0f 0x84. */
4810FNIEMOP_DEF(iemOp_je_Jv)
4811{
4812 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4813 IEMOP_HLP_MIN_386();
4814 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4815 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4816 {
4817 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4819
4820 IEM_MC_BEGIN(0, 0);
4821 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4822 IEM_MC_REL_JMP_S16(i16Imm);
4823 } IEM_MC_ELSE() {
4824 IEM_MC_ADVANCE_RIP();
4825 } IEM_MC_ENDIF();
4826 IEM_MC_END();
4827 }
4828 else
4829 {
4830 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4832
4833 IEM_MC_BEGIN(0, 0);
4834 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4835 IEM_MC_REL_JMP_S32(i32Imm);
4836 } IEM_MC_ELSE() {
4837 IEM_MC_ADVANCE_RIP();
4838 } IEM_MC_ENDIF();
4839 IEM_MC_END();
4840 }
4841 return VINF_SUCCESS;
4842}
4843
4844
4845/** Opcode 0x0f 0x85. */
4846FNIEMOP_DEF(iemOp_jne_Jv)
4847{
4848 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4849 IEMOP_HLP_MIN_386();
4850 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4851 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4852 {
4853 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4855
4856 IEM_MC_BEGIN(0, 0);
4857 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4858 IEM_MC_ADVANCE_RIP();
4859 } IEM_MC_ELSE() {
4860 IEM_MC_REL_JMP_S16(i16Imm);
4861 } IEM_MC_ENDIF();
4862 IEM_MC_END();
4863 }
4864 else
4865 {
4866 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4868
4869 IEM_MC_BEGIN(0, 0);
4870 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4871 IEM_MC_ADVANCE_RIP();
4872 } IEM_MC_ELSE() {
4873 IEM_MC_REL_JMP_S32(i32Imm);
4874 } IEM_MC_ENDIF();
4875 IEM_MC_END();
4876 }
4877 return VINF_SUCCESS;
4878}
4879
4880
4881/** Opcode 0x0f 0x86. */
4882FNIEMOP_DEF(iemOp_jbe_Jv)
4883{
4884 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4885 IEMOP_HLP_MIN_386();
4886 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4887 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4888 {
4889 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4891
4892 IEM_MC_BEGIN(0, 0);
4893 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4894 IEM_MC_REL_JMP_S16(i16Imm);
4895 } IEM_MC_ELSE() {
4896 IEM_MC_ADVANCE_RIP();
4897 } IEM_MC_ENDIF();
4898 IEM_MC_END();
4899 }
4900 else
4901 {
4902 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4904
4905 IEM_MC_BEGIN(0, 0);
4906 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4907 IEM_MC_REL_JMP_S32(i32Imm);
4908 } IEM_MC_ELSE() {
4909 IEM_MC_ADVANCE_RIP();
4910 } IEM_MC_ENDIF();
4911 IEM_MC_END();
4912 }
4913 return VINF_SUCCESS;
4914}
4915
4916
4917/** Opcode 0x0f 0x87. */
4918FNIEMOP_DEF(iemOp_jnbe_Jv)
4919{
4920 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4921 IEMOP_HLP_MIN_386();
4922 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4923 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4924 {
4925 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4927
4928 IEM_MC_BEGIN(0, 0);
4929 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4930 IEM_MC_ADVANCE_RIP();
4931 } IEM_MC_ELSE() {
4932 IEM_MC_REL_JMP_S16(i16Imm);
4933 } IEM_MC_ENDIF();
4934 IEM_MC_END();
4935 }
4936 else
4937 {
4938 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4940
4941 IEM_MC_BEGIN(0, 0);
4942 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4943 IEM_MC_ADVANCE_RIP();
4944 } IEM_MC_ELSE() {
4945 IEM_MC_REL_JMP_S32(i32Imm);
4946 } IEM_MC_ENDIF();
4947 IEM_MC_END();
4948 }
4949 return VINF_SUCCESS;
4950}
4951
4952
4953/** Opcode 0x0f 0x88. */
4954FNIEMOP_DEF(iemOp_js_Jv)
4955{
4956 IEMOP_MNEMONIC(js_Jv, "js Jv");
4957 IEMOP_HLP_MIN_386();
4958 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4959 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4960 {
4961 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4963
4964 IEM_MC_BEGIN(0, 0);
4965 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4966 IEM_MC_REL_JMP_S16(i16Imm);
4967 } IEM_MC_ELSE() {
4968 IEM_MC_ADVANCE_RIP();
4969 } IEM_MC_ENDIF();
4970 IEM_MC_END();
4971 }
4972 else
4973 {
4974 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4976
4977 IEM_MC_BEGIN(0, 0);
4978 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4979 IEM_MC_REL_JMP_S32(i32Imm);
4980 } IEM_MC_ELSE() {
4981 IEM_MC_ADVANCE_RIP();
4982 } IEM_MC_ENDIF();
4983 IEM_MC_END();
4984 }
4985 return VINF_SUCCESS;
4986}
4987
4988
4989/** Opcode 0x0f 0x89. */
4990FNIEMOP_DEF(iemOp_jns_Jv)
4991{
4992 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4993 IEMOP_HLP_MIN_386();
4994 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4995 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4996 {
4997 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4999
5000 IEM_MC_BEGIN(0, 0);
5001 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5002 IEM_MC_ADVANCE_RIP();
5003 } IEM_MC_ELSE() {
5004 IEM_MC_REL_JMP_S16(i16Imm);
5005 } IEM_MC_ENDIF();
5006 IEM_MC_END();
5007 }
5008 else
5009 {
5010 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5012
5013 IEM_MC_BEGIN(0, 0);
5014 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5015 IEM_MC_ADVANCE_RIP();
5016 } IEM_MC_ELSE() {
5017 IEM_MC_REL_JMP_S32(i32Imm);
5018 } IEM_MC_ENDIF();
5019 IEM_MC_END();
5020 }
5021 return VINF_SUCCESS;
5022}
5023
5024
5025/** Opcode 0x0f 0x8a. */
5026FNIEMOP_DEF(iemOp_jp_Jv)
5027{
5028 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5029 IEMOP_HLP_MIN_386();
5030 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5031 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5032 {
5033 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5035
5036 IEM_MC_BEGIN(0, 0);
5037 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5038 IEM_MC_REL_JMP_S16(i16Imm);
5039 } IEM_MC_ELSE() {
5040 IEM_MC_ADVANCE_RIP();
5041 } IEM_MC_ENDIF();
5042 IEM_MC_END();
5043 }
5044 else
5045 {
5046 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5048
5049 IEM_MC_BEGIN(0, 0);
5050 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5051 IEM_MC_REL_JMP_S32(i32Imm);
5052 } IEM_MC_ELSE() {
5053 IEM_MC_ADVANCE_RIP();
5054 } IEM_MC_ENDIF();
5055 IEM_MC_END();
5056 }
5057 return VINF_SUCCESS;
5058}
5059
5060
5061/** Opcode 0x0f 0x8b. */
5062FNIEMOP_DEF(iemOp_jnp_Jv)
5063{
5064 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5065 IEMOP_HLP_MIN_386();
5066 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5067 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5068 {
5069 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5071
5072 IEM_MC_BEGIN(0, 0);
5073 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5074 IEM_MC_ADVANCE_RIP();
5075 } IEM_MC_ELSE() {
5076 IEM_MC_REL_JMP_S16(i16Imm);
5077 } IEM_MC_ENDIF();
5078 IEM_MC_END();
5079 }
5080 else
5081 {
5082 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5084
5085 IEM_MC_BEGIN(0, 0);
5086 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5087 IEM_MC_ADVANCE_RIP();
5088 } IEM_MC_ELSE() {
5089 IEM_MC_REL_JMP_S32(i32Imm);
5090 } IEM_MC_ENDIF();
5091 IEM_MC_END();
5092 }
5093 return VINF_SUCCESS;
5094}
5095
5096
5097/** Opcode 0x0f 0x8c. */
5098FNIEMOP_DEF(iemOp_jl_Jv)
5099{
5100 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5101 IEMOP_HLP_MIN_386();
5102 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5103 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5104 {
5105 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5107
5108 IEM_MC_BEGIN(0, 0);
5109 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5110 IEM_MC_REL_JMP_S16(i16Imm);
5111 } IEM_MC_ELSE() {
5112 IEM_MC_ADVANCE_RIP();
5113 } IEM_MC_ENDIF();
5114 IEM_MC_END();
5115 }
5116 else
5117 {
5118 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5120
5121 IEM_MC_BEGIN(0, 0);
5122 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5123 IEM_MC_REL_JMP_S32(i32Imm);
5124 } IEM_MC_ELSE() {
5125 IEM_MC_ADVANCE_RIP();
5126 } IEM_MC_ENDIF();
5127 IEM_MC_END();
5128 }
5129 return VINF_SUCCESS;
5130}
5131
5132
5133/** Opcode 0x0f 0x8d. */
5134FNIEMOP_DEF(iemOp_jnl_Jv)
5135{
5136 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5137 IEMOP_HLP_MIN_386();
5138 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5139 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5140 {
5141 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5143
5144 IEM_MC_BEGIN(0, 0);
5145 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5146 IEM_MC_ADVANCE_RIP();
5147 } IEM_MC_ELSE() {
5148 IEM_MC_REL_JMP_S16(i16Imm);
5149 } IEM_MC_ENDIF();
5150 IEM_MC_END();
5151 }
5152 else
5153 {
5154 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5156
5157 IEM_MC_BEGIN(0, 0);
5158 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5159 IEM_MC_ADVANCE_RIP();
5160 } IEM_MC_ELSE() {
5161 IEM_MC_REL_JMP_S32(i32Imm);
5162 } IEM_MC_ENDIF();
5163 IEM_MC_END();
5164 }
5165 return VINF_SUCCESS;
5166}
5167
5168
5169/** Opcode 0x0f 0x8e. */
5170FNIEMOP_DEF(iemOp_jle_Jv)
5171{
5172 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5173 IEMOP_HLP_MIN_386();
5174 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5175 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5176 {
5177 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5179
5180 IEM_MC_BEGIN(0, 0);
5181 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5182 IEM_MC_REL_JMP_S16(i16Imm);
5183 } IEM_MC_ELSE() {
5184 IEM_MC_ADVANCE_RIP();
5185 } IEM_MC_ENDIF();
5186 IEM_MC_END();
5187 }
5188 else
5189 {
5190 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5192
5193 IEM_MC_BEGIN(0, 0);
5194 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5195 IEM_MC_REL_JMP_S32(i32Imm);
5196 } IEM_MC_ELSE() {
5197 IEM_MC_ADVANCE_RIP();
5198 } IEM_MC_ENDIF();
5199 IEM_MC_END();
5200 }
5201 return VINF_SUCCESS;
5202}
5203
5204
5205/** Opcode 0x0f 0x8f. */
5206FNIEMOP_DEF(iemOp_jnle_Jv)
5207{
5208 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5209 IEMOP_HLP_MIN_386();
5210 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5211 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5212 {
5213 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5215
5216 IEM_MC_BEGIN(0, 0);
5217 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5218 IEM_MC_ADVANCE_RIP();
5219 } IEM_MC_ELSE() {
5220 IEM_MC_REL_JMP_S16(i16Imm);
5221 } IEM_MC_ENDIF();
5222 IEM_MC_END();
5223 }
5224 else
5225 {
5226 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5228
5229 IEM_MC_BEGIN(0, 0);
5230 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5231 IEM_MC_ADVANCE_RIP();
5232 } IEM_MC_ELSE() {
5233 IEM_MC_REL_JMP_S32(i32Imm);
5234 } IEM_MC_ENDIF();
5235 IEM_MC_END();
5236 }
5237 return VINF_SUCCESS;
5238}
5239
5240
5241/** Opcode 0x0f 0x90. */
5242FNIEMOP_DEF(iemOp_seto_Eb)
5243{
5244 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5245 IEMOP_HLP_MIN_386();
5246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5247
5248 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5249 * any way. AMD says it's "unused", whatever that means. We're
5250 * ignoring for now. */
5251 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5252 {
5253 /* register target */
5254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5255 IEM_MC_BEGIN(0, 0);
5256 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5257 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5258 } IEM_MC_ELSE() {
5259 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5260 } IEM_MC_ENDIF();
5261 IEM_MC_ADVANCE_RIP();
5262 IEM_MC_END();
5263 }
5264 else
5265 {
5266 /* memory target */
5267 IEM_MC_BEGIN(0, 1);
5268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5271 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5272 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5273 } IEM_MC_ELSE() {
5274 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5275 } IEM_MC_ENDIF();
5276 IEM_MC_ADVANCE_RIP();
5277 IEM_MC_END();
5278 }
5279 return VINF_SUCCESS;
5280}
5281
5282
5283/** Opcode 0x0f 0x91. */
5284FNIEMOP_DEF(iemOp_setno_Eb)
5285{
5286 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5287 IEMOP_HLP_MIN_386();
5288 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5289
5290 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5291 * any way. AMD says it's "unused", whatever that means. We're
5292 * ignoring for now. */
5293 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5294 {
5295 /* register target */
5296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5297 IEM_MC_BEGIN(0, 0);
5298 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5299 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5300 } IEM_MC_ELSE() {
5301 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5302 } IEM_MC_ENDIF();
5303 IEM_MC_ADVANCE_RIP();
5304 IEM_MC_END();
5305 }
5306 else
5307 {
5308 /* memory target */
5309 IEM_MC_BEGIN(0, 1);
5310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5313 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5314 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5315 } IEM_MC_ELSE() {
5316 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5317 } IEM_MC_ENDIF();
5318 IEM_MC_ADVANCE_RIP();
5319 IEM_MC_END();
5320 }
5321 return VINF_SUCCESS;
5322}
5323
5324
5325/** Opcode 0x0f 0x92. */
5326FNIEMOP_DEF(iemOp_setc_Eb)
5327{
5328 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5329 IEMOP_HLP_MIN_386();
5330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5331
5332 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5333 * any way. AMD says it's "unused", whatever that means. We're
5334 * ignoring for now. */
5335 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5336 {
5337 /* register target */
5338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5339 IEM_MC_BEGIN(0, 0);
5340 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5341 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5342 } IEM_MC_ELSE() {
5343 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5344 } IEM_MC_ENDIF();
5345 IEM_MC_ADVANCE_RIP();
5346 IEM_MC_END();
5347 }
5348 else
5349 {
5350 /* memory target */
5351 IEM_MC_BEGIN(0, 1);
5352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5355 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5356 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5357 } IEM_MC_ELSE() {
5358 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5359 } IEM_MC_ENDIF();
5360 IEM_MC_ADVANCE_RIP();
5361 IEM_MC_END();
5362 }
5363 return VINF_SUCCESS;
5364}
5365
5366
5367/** Opcode 0x0f 0x93. */
5368FNIEMOP_DEF(iemOp_setnc_Eb)
5369{
5370 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5371 IEMOP_HLP_MIN_386();
5372 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5373
5374 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5375 * any way. AMD says it's "unused", whatever that means. We're
5376 * ignoring for now. */
5377 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5378 {
5379 /* register target */
5380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5381 IEM_MC_BEGIN(0, 0);
5382 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5383 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5384 } IEM_MC_ELSE() {
5385 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5386 } IEM_MC_ENDIF();
5387 IEM_MC_ADVANCE_RIP();
5388 IEM_MC_END();
5389 }
5390 else
5391 {
5392 /* memory target */
5393 IEM_MC_BEGIN(0, 1);
5394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5397 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5398 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5399 } IEM_MC_ELSE() {
5400 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5401 } IEM_MC_ENDIF();
5402 IEM_MC_ADVANCE_RIP();
5403 IEM_MC_END();
5404 }
5405 return VINF_SUCCESS;
5406}
5407
5408
5409/** Opcode 0x0f 0x94. */
5410FNIEMOP_DEF(iemOp_sete_Eb)
5411{
5412 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5413 IEMOP_HLP_MIN_386();
5414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5415
5416 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5417 * any way. AMD says it's "unused", whatever that means. We're
5418 * ignoring for now. */
5419 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5420 {
5421 /* register target */
5422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5423 IEM_MC_BEGIN(0, 0);
5424 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5425 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5426 } IEM_MC_ELSE() {
5427 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5428 } IEM_MC_ENDIF();
5429 IEM_MC_ADVANCE_RIP();
5430 IEM_MC_END();
5431 }
5432 else
5433 {
5434 /* memory target */
5435 IEM_MC_BEGIN(0, 1);
5436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5439 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5440 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5441 } IEM_MC_ELSE() {
5442 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5443 } IEM_MC_ENDIF();
5444 IEM_MC_ADVANCE_RIP();
5445 IEM_MC_END();
5446 }
5447 return VINF_SUCCESS;
5448}
5449
5450
5451/** Opcode 0x0f 0x95. */
5452FNIEMOP_DEF(iemOp_setne_Eb)
5453{
5454 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5455 IEMOP_HLP_MIN_386();
5456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5457
5458 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5459 * any way. AMD says it's "unused", whatever that means. We're
5460 * ignoring for now. */
5461 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5462 {
5463 /* register target */
5464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5465 IEM_MC_BEGIN(0, 0);
5466 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5467 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5468 } IEM_MC_ELSE() {
5469 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5470 } IEM_MC_ENDIF();
5471 IEM_MC_ADVANCE_RIP();
5472 IEM_MC_END();
5473 }
5474 else
5475 {
5476 /* memory target */
5477 IEM_MC_BEGIN(0, 1);
5478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5481 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5482 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5483 } IEM_MC_ELSE() {
5484 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5485 } IEM_MC_ENDIF();
5486 IEM_MC_ADVANCE_RIP();
5487 IEM_MC_END();
5488 }
5489 return VINF_SUCCESS;
5490}
5491
5492
5493/** Opcode 0x0f 0x96. */
5494FNIEMOP_DEF(iemOp_setbe_Eb)
5495{
5496 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5497 IEMOP_HLP_MIN_386();
5498 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5499
5500 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5501 * any way. AMD says it's "unused", whatever that means. We're
5502 * ignoring for now. */
5503 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5504 {
5505 /* register target */
5506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5507 IEM_MC_BEGIN(0, 0);
5508 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5509 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5510 } IEM_MC_ELSE() {
5511 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5512 } IEM_MC_ENDIF();
5513 IEM_MC_ADVANCE_RIP();
5514 IEM_MC_END();
5515 }
5516 else
5517 {
5518 /* memory target */
5519 IEM_MC_BEGIN(0, 1);
5520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5523 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5524 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5525 } IEM_MC_ELSE() {
5526 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5527 } IEM_MC_ENDIF();
5528 IEM_MC_ADVANCE_RIP();
5529 IEM_MC_END();
5530 }
5531 return VINF_SUCCESS;
5532}
5533
5534
5535/** Opcode 0x0f 0x97. */
5536FNIEMOP_DEF(iemOp_setnbe_Eb)
5537{
5538 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5539 IEMOP_HLP_MIN_386();
5540 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5541
5542 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5543 * any way. AMD says it's "unused", whatever that means. We're
5544 * ignoring for now. */
5545 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5546 {
5547 /* register target */
5548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5549 IEM_MC_BEGIN(0, 0);
5550 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5551 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5552 } IEM_MC_ELSE() {
5553 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5554 } IEM_MC_ENDIF();
5555 IEM_MC_ADVANCE_RIP();
5556 IEM_MC_END();
5557 }
5558 else
5559 {
5560 /* memory target */
5561 IEM_MC_BEGIN(0, 1);
5562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5565 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5566 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5567 } IEM_MC_ELSE() {
5568 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5569 } IEM_MC_ENDIF();
5570 IEM_MC_ADVANCE_RIP();
5571 IEM_MC_END();
5572 }
5573 return VINF_SUCCESS;
5574}
5575
5576
5577/** Opcode 0x0f 0x98. */
5578FNIEMOP_DEF(iemOp_sets_Eb)
5579{
5580 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5581 IEMOP_HLP_MIN_386();
5582 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5583
5584 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5585 * any way. AMD says it's "unused", whatever that means. We're
5586 * ignoring for now. */
5587 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5588 {
5589 /* register target */
5590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5591 IEM_MC_BEGIN(0, 0);
5592 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5593 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5594 } IEM_MC_ELSE() {
5595 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5596 } IEM_MC_ENDIF();
5597 IEM_MC_ADVANCE_RIP();
5598 IEM_MC_END();
5599 }
5600 else
5601 {
5602 /* memory target */
5603 IEM_MC_BEGIN(0, 1);
5604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5607 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5608 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5609 } IEM_MC_ELSE() {
5610 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5611 } IEM_MC_ENDIF();
5612 IEM_MC_ADVANCE_RIP();
5613 IEM_MC_END();
5614 }
5615 return VINF_SUCCESS;
5616}
5617
5618
5619/** Opcode 0x0f 0x99. */
5620FNIEMOP_DEF(iemOp_setns_Eb)
5621{
5622 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5623 IEMOP_HLP_MIN_386();
5624 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5625
5626 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5627 * any way. AMD says it's "unused", whatever that means. We're
5628 * ignoring for now. */
5629 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5630 {
5631 /* register target */
5632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5633 IEM_MC_BEGIN(0, 0);
5634 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5635 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5636 } IEM_MC_ELSE() {
5637 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5638 } IEM_MC_ENDIF();
5639 IEM_MC_ADVANCE_RIP();
5640 IEM_MC_END();
5641 }
5642 else
5643 {
5644 /* memory target */
5645 IEM_MC_BEGIN(0, 1);
5646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5649 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5650 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5651 } IEM_MC_ELSE() {
5652 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5653 } IEM_MC_ENDIF();
5654 IEM_MC_ADVANCE_RIP();
5655 IEM_MC_END();
5656 }
5657 return VINF_SUCCESS;
5658}
5659
5660
5661/** Opcode 0x0f 0x9a. */
5662FNIEMOP_DEF(iemOp_setp_Eb)
5663{
5664 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5665 IEMOP_HLP_MIN_386();
5666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5667
5668 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5669 * any way. AMD says it's "unused", whatever that means. We're
5670 * ignoring for now. */
5671 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5672 {
5673 /* register target */
5674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5675 IEM_MC_BEGIN(0, 0);
5676 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5677 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5678 } IEM_MC_ELSE() {
5679 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5680 } IEM_MC_ENDIF();
5681 IEM_MC_ADVANCE_RIP();
5682 IEM_MC_END();
5683 }
5684 else
5685 {
5686 /* memory target */
5687 IEM_MC_BEGIN(0, 1);
5688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5691 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5692 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5693 } IEM_MC_ELSE() {
5694 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5695 } IEM_MC_ENDIF();
5696 IEM_MC_ADVANCE_RIP();
5697 IEM_MC_END();
5698 }
5699 return VINF_SUCCESS;
5700}
5701
5702
5703/** Opcode 0x0f 0x9b. */
5704FNIEMOP_DEF(iemOp_setnp_Eb)
5705{
5706 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5707 IEMOP_HLP_MIN_386();
5708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5709
5710 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5711 * any way. AMD says it's "unused", whatever that means. We're
5712 * ignoring for now. */
5713 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5714 {
5715 /* register target */
5716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5717 IEM_MC_BEGIN(0, 0);
5718 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5719 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5720 } IEM_MC_ELSE() {
5721 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5722 } IEM_MC_ENDIF();
5723 IEM_MC_ADVANCE_RIP();
5724 IEM_MC_END();
5725 }
5726 else
5727 {
5728 /* memory target */
5729 IEM_MC_BEGIN(0, 1);
5730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5733 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5734 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5735 } IEM_MC_ELSE() {
5736 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5737 } IEM_MC_ENDIF();
5738 IEM_MC_ADVANCE_RIP();
5739 IEM_MC_END();
5740 }
5741 return VINF_SUCCESS;
5742}
5743
5744
5745/** Opcode 0x0f 0x9c. */
5746FNIEMOP_DEF(iemOp_setl_Eb)
5747{
5748 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5749 IEMOP_HLP_MIN_386();
5750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5751
5752 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5753 * any way. AMD says it's "unused", whatever that means. We're
5754 * ignoring for now. */
5755 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5756 {
5757 /* register target */
5758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5759 IEM_MC_BEGIN(0, 0);
5760 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5761 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5762 } IEM_MC_ELSE() {
5763 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5764 } IEM_MC_ENDIF();
5765 IEM_MC_ADVANCE_RIP();
5766 IEM_MC_END();
5767 }
5768 else
5769 {
5770 /* memory target */
5771 IEM_MC_BEGIN(0, 1);
5772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5775 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5776 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5777 } IEM_MC_ELSE() {
5778 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5779 } IEM_MC_ENDIF();
5780 IEM_MC_ADVANCE_RIP();
5781 IEM_MC_END();
5782 }
5783 return VINF_SUCCESS;
5784}
5785
5786
5787/** Opcode 0x0f 0x9d. */
5788FNIEMOP_DEF(iemOp_setnl_Eb)
5789{
5790 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5791 IEMOP_HLP_MIN_386();
5792 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5793
5794 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5795 * any way. AMD says it's "unused", whatever that means. We're
5796 * ignoring for now. */
5797 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5798 {
5799 /* register target */
5800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5801 IEM_MC_BEGIN(0, 0);
5802 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5803 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5804 } IEM_MC_ELSE() {
5805 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5806 } IEM_MC_ENDIF();
5807 IEM_MC_ADVANCE_RIP();
5808 IEM_MC_END();
5809 }
5810 else
5811 {
5812 /* memory target */
5813 IEM_MC_BEGIN(0, 1);
5814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5817 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5818 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5819 } IEM_MC_ELSE() {
5820 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5821 } IEM_MC_ENDIF();
5822 IEM_MC_ADVANCE_RIP();
5823 IEM_MC_END();
5824 }
5825 return VINF_SUCCESS;
5826}
5827
5828
5829/** Opcode 0x0f 0x9e. */
5830FNIEMOP_DEF(iemOp_setle_Eb)
5831{
5832 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5833 IEMOP_HLP_MIN_386();
5834 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5835
5836 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5837 * any way. AMD says it's "unused", whatever that means. We're
5838 * ignoring for now. */
5839 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5840 {
5841 /* register target */
5842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5843 IEM_MC_BEGIN(0, 0);
5844 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5845 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5846 } IEM_MC_ELSE() {
5847 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5848 } IEM_MC_ENDIF();
5849 IEM_MC_ADVANCE_RIP();
5850 IEM_MC_END();
5851 }
5852 else
5853 {
5854 /* memory target */
5855 IEM_MC_BEGIN(0, 1);
5856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5859 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5860 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5861 } IEM_MC_ELSE() {
5862 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5863 } IEM_MC_ENDIF();
5864 IEM_MC_ADVANCE_RIP();
5865 IEM_MC_END();
5866 }
5867 return VINF_SUCCESS;
5868}
5869
5870
5871/** Opcode 0x0f 0x9f. */
5872FNIEMOP_DEF(iemOp_setnle_Eb)
5873{
5874 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5875 IEMOP_HLP_MIN_386();
5876 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5877
5878 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5879 * any way. AMD says it's "unused", whatever that means. We're
5880 * ignoring for now. */
5881 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5882 {
5883 /* register target */
5884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5885 IEM_MC_BEGIN(0, 0);
5886 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5887 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5888 } IEM_MC_ELSE() {
5889 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5890 } IEM_MC_ENDIF();
5891 IEM_MC_ADVANCE_RIP();
5892 IEM_MC_END();
5893 }
5894 else
5895 {
5896 /* memory target */
5897 IEM_MC_BEGIN(0, 1);
5898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5901 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5902 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5903 } IEM_MC_ELSE() {
5904 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5905 } IEM_MC_ENDIF();
5906 IEM_MC_ADVANCE_RIP();
5907 IEM_MC_END();
5908 }
5909 return VINF_SUCCESS;
5910}
5911
5912
5913/**
5914 * Common 'push segment-register' helper.
5915 */
5916FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5917{
5918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5919 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5920 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5921
5922 switch (pVCpu->iem.s.enmEffOpSize)
5923 {
5924 case IEMMODE_16BIT:
5925 IEM_MC_BEGIN(0, 1);
5926 IEM_MC_LOCAL(uint16_t, u16Value);
5927 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5928 IEM_MC_PUSH_U16(u16Value);
5929 IEM_MC_ADVANCE_RIP();
5930 IEM_MC_END();
5931 break;
5932
5933 case IEMMODE_32BIT:
5934 IEM_MC_BEGIN(0, 1);
5935 IEM_MC_LOCAL(uint32_t, u32Value);
5936 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5937 IEM_MC_PUSH_U32_SREG(u32Value);
5938 IEM_MC_ADVANCE_RIP();
5939 IEM_MC_END();
5940 break;
5941
5942 case IEMMODE_64BIT:
5943 IEM_MC_BEGIN(0, 1);
5944 IEM_MC_LOCAL(uint64_t, u64Value);
5945 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5946 IEM_MC_PUSH_U64(u64Value);
5947 IEM_MC_ADVANCE_RIP();
5948 IEM_MC_END();
5949 break;
5950 }
5951
5952 return VINF_SUCCESS;
5953}
5954
5955
5956/** Opcode 0x0f 0xa0. */
5957FNIEMOP_DEF(iemOp_push_fs)
5958{
5959 IEMOP_MNEMONIC(push_fs, "push fs");
5960 IEMOP_HLP_MIN_386();
5961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5962 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5963}
5964
5965
5966/** Opcode 0x0f 0xa1. */
5967FNIEMOP_DEF(iemOp_pop_fs)
5968{
5969 IEMOP_MNEMONIC(pop_fs, "pop fs");
5970 IEMOP_HLP_MIN_386();
5971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5972 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5973}
5974
5975
5976/** Opcode 0x0f 0xa2. */
5977FNIEMOP_DEF(iemOp_cpuid)
5978{
5979 IEMOP_MNEMONIC(cpuid, "cpuid");
5980 IEMOP_HLP_MIN_486(); /* not all 486es. */
5981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5982 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5983}
5984
5985
5986/**
5987 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5988 * iemOp_bts_Ev_Gv.
5989 */
5990FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5991{
5992 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5993 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5994
5995 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5996 {
5997 /* register destination. */
5998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5999 switch (pVCpu->iem.s.enmEffOpSize)
6000 {
6001 case IEMMODE_16BIT:
6002 IEM_MC_BEGIN(3, 0);
6003 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6004 IEM_MC_ARG(uint16_t, u16Src, 1);
6005 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6006
6007 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6008 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6009 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6010 IEM_MC_REF_EFLAGS(pEFlags);
6011 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6012
6013 IEM_MC_ADVANCE_RIP();
6014 IEM_MC_END();
6015 return VINF_SUCCESS;
6016
6017 case IEMMODE_32BIT:
6018 IEM_MC_BEGIN(3, 0);
6019 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6020 IEM_MC_ARG(uint32_t, u32Src, 1);
6021 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6022
6023 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6024 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6025 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6026 IEM_MC_REF_EFLAGS(pEFlags);
6027 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6028
6029 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6030 IEM_MC_ADVANCE_RIP();
6031 IEM_MC_END();
6032 return VINF_SUCCESS;
6033
6034 case IEMMODE_64BIT:
6035 IEM_MC_BEGIN(3, 0);
6036 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6037 IEM_MC_ARG(uint64_t, u64Src, 1);
6038 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6039
6040 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6041 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6042 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6043 IEM_MC_REF_EFLAGS(pEFlags);
6044 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6045
6046 IEM_MC_ADVANCE_RIP();
6047 IEM_MC_END();
6048 return VINF_SUCCESS;
6049
6050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6051 }
6052 }
6053 else
6054 {
6055 /* memory destination. */
6056
6057 uint32_t fAccess;
6058 if (pImpl->pfnLockedU16)
6059 fAccess = IEM_ACCESS_DATA_RW;
6060 else /* BT */
6061 fAccess = IEM_ACCESS_DATA_R;
6062
6063 /** @todo test negative bit offsets! */
6064 switch (pVCpu->iem.s.enmEffOpSize)
6065 {
6066 case IEMMODE_16BIT:
6067 IEM_MC_BEGIN(3, 2);
6068 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6069 IEM_MC_ARG(uint16_t, u16Src, 1);
6070 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6072 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6073
6074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6075 if (pImpl->pfnLockedU16)
6076 IEMOP_HLP_DONE_DECODING();
6077 else
6078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6079 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6080 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6081 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6082 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6083 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6084 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6085 IEM_MC_FETCH_EFLAGS(EFlags);
6086
6087 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6088 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6089 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6090 else
6091 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6092 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6093
6094 IEM_MC_COMMIT_EFLAGS(EFlags);
6095 IEM_MC_ADVANCE_RIP();
6096 IEM_MC_END();
6097 return VINF_SUCCESS;
6098
6099 case IEMMODE_32BIT:
6100 IEM_MC_BEGIN(3, 2);
6101 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6102 IEM_MC_ARG(uint32_t, u32Src, 1);
6103 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6105 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6106
6107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6108 if (pImpl->pfnLockedU16)
6109 IEMOP_HLP_DONE_DECODING();
6110 else
6111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6112 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6113 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6114 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6115 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6116 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6117 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6118 IEM_MC_FETCH_EFLAGS(EFlags);
6119
6120 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6121 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6123 else
6124 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6125 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6126
6127 IEM_MC_COMMIT_EFLAGS(EFlags);
6128 IEM_MC_ADVANCE_RIP();
6129 IEM_MC_END();
6130 return VINF_SUCCESS;
6131
6132 case IEMMODE_64BIT:
6133 IEM_MC_BEGIN(3, 2);
6134 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6135 IEM_MC_ARG(uint64_t, u64Src, 1);
6136 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6138 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6139
6140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6141 if (pImpl->pfnLockedU16)
6142 IEMOP_HLP_DONE_DECODING();
6143 else
6144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6145 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6146 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6147 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6148 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6149 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6150 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6151 IEM_MC_FETCH_EFLAGS(EFlags);
6152
6153 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6155 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6156 else
6157 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6158 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6159
6160 IEM_MC_COMMIT_EFLAGS(EFlags);
6161 IEM_MC_ADVANCE_RIP();
6162 IEM_MC_END();
6163 return VINF_SUCCESS;
6164
6165 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6166 }
6167 }
6168}
6169
6170
6171/** Opcode 0x0f 0xa3. */
6172FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6173{
6174 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6175 IEMOP_HLP_MIN_386();
6176 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6177}
6178
6179
6180/**
6181 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6182 */
6183FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6184{
6185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6186 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6187
6188 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6189 {
6190 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6192
6193 switch (pVCpu->iem.s.enmEffOpSize)
6194 {
6195 case IEMMODE_16BIT:
6196 IEM_MC_BEGIN(4, 0);
6197 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6198 IEM_MC_ARG(uint16_t, u16Src, 1);
6199 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6200 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6201
6202 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6203 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6204 IEM_MC_REF_EFLAGS(pEFlags);
6205 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6206
6207 IEM_MC_ADVANCE_RIP();
6208 IEM_MC_END();
6209 return VINF_SUCCESS;
6210
6211 case IEMMODE_32BIT:
6212 IEM_MC_BEGIN(4, 0);
6213 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6214 IEM_MC_ARG(uint32_t, u32Src, 1);
6215 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6216 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6217
6218 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6219 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6220 IEM_MC_REF_EFLAGS(pEFlags);
6221 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6222
6223 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6224 IEM_MC_ADVANCE_RIP();
6225 IEM_MC_END();
6226 return VINF_SUCCESS;
6227
6228 case IEMMODE_64BIT:
6229 IEM_MC_BEGIN(4, 0);
6230 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6231 IEM_MC_ARG(uint64_t, u64Src, 1);
6232 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6233 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6234
6235 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6236 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6237 IEM_MC_REF_EFLAGS(pEFlags);
6238 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6239
6240 IEM_MC_ADVANCE_RIP();
6241 IEM_MC_END();
6242 return VINF_SUCCESS;
6243
6244 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6245 }
6246 }
6247 else
6248 {
6249 switch (pVCpu->iem.s.enmEffOpSize)
6250 {
6251 case IEMMODE_16BIT:
6252 IEM_MC_BEGIN(4, 2);
6253 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6254 IEM_MC_ARG(uint16_t, u16Src, 1);
6255 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6256 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6258
6259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6260 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6261 IEM_MC_ASSIGN(cShiftArg, cShift);
6262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6263 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6264 IEM_MC_FETCH_EFLAGS(EFlags);
6265 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6266 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6267
6268 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6269 IEM_MC_COMMIT_EFLAGS(EFlags);
6270 IEM_MC_ADVANCE_RIP();
6271 IEM_MC_END();
6272 return VINF_SUCCESS;
6273
6274 case IEMMODE_32BIT:
6275 IEM_MC_BEGIN(4, 2);
6276 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6277 IEM_MC_ARG(uint32_t, u32Src, 1);
6278 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6279 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6281
6282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6283 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6284 IEM_MC_ASSIGN(cShiftArg, cShift);
6285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6286 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6287 IEM_MC_FETCH_EFLAGS(EFlags);
6288 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6289 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6290
6291 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6292 IEM_MC_COMMIT_EFLAGS(EFlags);
6293 IEM_MC_ADVANCE_RIP();
6294 IEM_MC_END();
6295 return VINF_SUCCESS;
6296
6297 case IEMMODE_64BIT:
6298 IEM_MC_BEGIN(4, 2);
6299 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6300 IEM_MC_ARG(uint64_t, u64Src, 1);
6301 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6302 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6304
6305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6306 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6307 IEM_MC_ASSIGN(cShiftArg, cShift);
6308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6309 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6310 IEM_MC_FETCH_EFLAGS(EFlags);
6311 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6312 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6313
6314 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6315 IEM_MC_COMMIT_EFLAGS(EFlags);
6316 IEM_MC_ADVANCE_RIP();
6317 IEM_MC_END();
6318 return VINF_SUCCESS;
6319
6320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6321 }
6322 }
6323}
6324
6325
6326/**
6327 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6328 */
6329FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6330{
6331 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6332 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6333
6334 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6335 {
6336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6337
6338 switch (pVCpu->iem.s.enmEffOpSize)
6339 {
6340 case IEMMODE_16BIT:
6341 IEM_MC_BEGIN(4, 0);
6342 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6343 IEM_MC_ARG(uint16_t, u16Src, 1);
6344 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6345 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6346
6347 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6348 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6349 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6350 IEM_MC_REF_EFLAGS(pEFlags);
6351 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6352
6353 IEM_MC_ADVANCE_RIP();
6354 IEM_MC_END();
6355 return VINF_SUCCESS;
6356
6357 case IEMMODE_32BIT:
6358 IEM_MC_BEGIN(4, 0);
6359 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6360 IEM_MC_ARG(uint32_t, u32Src, 1);
6361 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6362 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6363
6364 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6365 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6366 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6367 IEM_MC_REF_EFLAGS(pEFlags);
6368 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6369
6370 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6371 IEM_MC_ADVANCE_RIP();
6372 IEM_MC_END();
6373 return VINF_SUCCESS;
6374
6375 case IEMMODE_64BIT:
6376 IEM_MC_BEGIN(4, 0);
6377 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6378 IEM_MC_ARG(uint64_t, u64Src, 1);
6379 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6380 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6381
6382 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6383 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6384 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6385 IEM_MC_REF_EFLAGS(pEFlags);
6386 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6387
6388 IEM_MC_ADVANCE_RIP();
6389 IEM_MC_END();
6390 return VINF_SUCCESS;
6391
6392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6393 }
6394 }
6395 else
6396 {
6397 switch (pVCpu->iem.s.enmEffOpSize)
6398 {
6399 case IEMMODE_16BIT:
6400 IEM_MC_BEGIN(4, 2);
6401 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6402 IEM_MC_ARG(uint16_t, u16Src, 1);
6403 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6404 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6406
6407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6409 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6410 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6411 IEM_MC_FETCH_EFLAGS(EFlags);
6412 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6413 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6414
6415 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6416 IEM_MC_COMMIT_EFLAGS(EFlags);
6417 IEM_MC_ADVANCE_RIP();
6418 IEM_MC_END();
6419 return VINF_SUCCESS;
6420
6421 case IEMMODE_32BIT:
6422 IEM_MC_BEGIN(4, 2);
6423 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6424 IEM_MC_ARG(uint32_t, u32Src, 1);
6425 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6426 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6428
6429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6431 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6432 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6433 IEM_MC_FETCH_EFLAGS(EFlags);
6434 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6435 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6436
6437 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6438 IEM_MC_COMMIT_EFLAGS(EFlags);
6439 IEM_MC_ADVANCE_RIP();
6440 IEM_MC_END();
6441 return VINF_SUCCESS;
6442
6443 case IEMMODE_64BIT:
6444 IEM_MC_BEGIN(4, 2);
6445 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6446 IEM_MC_ARG(uint64_t, u64Src, 1);
6447 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6448 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6450
6451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6453 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6454 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6455 IEM_MC_FETCH_EFLAGS(EFlags);
6456 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6457 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6458
6459 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6460 IEM_MC_COMMIT_EFLAGS(EFlags);
6461 IEM_MC_ADVANCE_RIP();
6462 IEM_MC_END();
6463 return VINF_SUCCESS;
6464
6465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6466 }
6467 }
6468}
6469
6470
6471
6472/** Opcode 0x0f 0xa4. */
6473FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6474{
6475 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6476 IEMOP_HLP_MIN_386();
6477 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6478}
6479
6480
6481/** Opcode 0x0f 0xa5. */
6482FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6483{
6484 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6485 IEMOP_HLP_MIN_386();
6486 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6487}
6488
6489
6490/** Opcode 0x0f 0xa8. */
6491FNIEMOP_DEF(iemOp_push_gs)
6492{
6493 IEMOP_MNEMONIC(push_gs, "push gs");
6494 IEMOP_HLP_MIN_386();
6495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6496 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6497}
6498
6499
6500/** Opcode 0x0f 0xa9. */
6501FNIEMOP_DEF(iemOp_pop_gs)
6502{
6503 IEMOP_MNEMONIC(pop_gs, "pop gs");
6504 IEMOP_HLP_MIN_386();
6505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6506 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6507}
6508
6509
6510/** Opcode 0x0f 0xaa. */
6511FNIEMOP_DEF(iemOp_rsm)
6512{
6513 IEMOP_MNEMONIC(rsm, "rsm");
6514 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
6515 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
6516 * intercept). */
6517 IEMOP_BITCH_ABOUT_STUB();
6518 return IEMOP_RAISE_INVALID_OPCODE();
6519}
6520
6521//IEMOP_HLP_MIN_386();
6522
6523
6524/** Opcode 0x0f 0xab. */
6525FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6526{
6527 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6528 IEMOP_HLP_MIN_386();
6529 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6530}
6531
6532
6533/** Opcode 0x0f 0xac. */
6534FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6535{
6536 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6537 IEMOP_HLP_MIN_386();
6538 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6539}
6540
6541
6542/** Opcode 0x0f 0xad. */
6543FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6544{
6545 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6546 IEMOP_HLP_MIN_386();
6547 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6548}
6549
6550
6551/** Opcode 0x0f 0xae mem/0. */
6552FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6553{
6554 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6555 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6556 return IEMOP_RAISE_INVALID_OPCODE();
6557
6558 IEM_MC_BEGIN(3, 1);
6559 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6560 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6561 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6564 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6565 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6566 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6567 IEM_MC_END();
6568 return VINF_SUCCESS;
6569}
6570
6571
6572/** Opcode 0x0f 0xae mem/1. */
6573FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6574{
6575 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6576 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6577 return IEMOP_RAISE_INVALID_OPCODE();
6578
6579 IEM_MC_BEGIN(3, 1);
6580 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6581 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6582 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6585 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6586 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6587 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6588 IEM_MC_END();
6589 return VINF_SUCCESS;
6590}
6591
6592
6593/**
6594 * @opmaps grp15
6595 * @opcode !11/2
6596 * @oppfx none
6597 * @opcpuid sse
6598 * @opgroup og_sse_mxcsrsm
6599 * @opxcpttype 5
6600 * @optest op1=0 -> mxcsr=0
6601 * @optest op1=0x2083 -> mxcsr=0x2083
6602 * @optest op1=0xfffffffe -> value.xcpt=0xd
6603 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6604 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6605 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6606 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6607 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6608 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6609 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6610 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6611 */
6612FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6613{
6614 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6615 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6616 return IEMOP_RAISE_INVALID_OPCODE();
6617
6618 IEM_MC_BEGIN(2, 0);
6619 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6620 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6623 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6624 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6625 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6626 IEM_MC_END();
6627 return VINF_SUCCESS;
6628}
6629
6630
6631/**
6632 * @opmaps grp15
6633 * @opcode !11/3
6634 * @oppfx none
6635 * @opcpuid sse
6636 * @opgroup og_sse_mxcsrsm
6637 * @opxcpttype 5
6638 * @optest mxcsr=0 -> op1=0
6639 * @optest mxcsr=0x2083 -> op1=0x2083
6640 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6641 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6642 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6643 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6644 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6645 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6646 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6647 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6648 */
6649FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6650{
6651 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6652 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6653 return IEMOP_RAISE_INVALID_OPCODE();
6654
6655 IEM_MC_BEGIN(2, 0);
6656 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6657 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6660 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6661 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6662 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6663 IEM_MC_END();
6664 return VINF_SUCCESS;
6665}
6666
6667
6668/**
6669 * @opmaps grp15
6670 * @opcode !11/4
6671 * @oppfx none
6672 * @opcpuid xsave
6673 * @opgroup og_system
6674 * @opxcpttype none
6675 */
6676FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6677{
6678 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6679 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6680 return IEMOP_RAISE_INVALID_OPCODE();
6681
6682 IEM_MC_BEGIN(3, 0);
6683 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6684 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6685 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6688 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6689 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6690 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6691 IEM_MC_END();
6692 return VINF_SUCCESS;
6693}
6694
6695
6696/**
6697 * @opmaps grp15
6698 * @opcode !11/5
6699 * @oppfx none
6700 * @opcpuid xsave
6701 * @opgroup og_system
6702 * @opxcpttype none
6703 */
6704FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6705{
6706 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6707 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6708 return IEMOP_RAISE_INVALID_OPCODE();
6709
6710 IEM_MC_BEGIN(3, 0);
6711 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6712 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6713 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6716 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6717 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6718 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6719 IEM_MC_END();
6720 return VINF_SUCCESS;
6721}
6722
6723/** Opcode 0x0f 0xae mem/6. */
6724FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6725
6726/**
6727 * @opmaps grp15
6728 * @opcode !11/7
6729 * @oppfx none
6730 * @opcpuid clfsh
6731 * @opgroup og_cachectl
6732 * @optest op1=1 ->
6733 */
6734FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6735{
6736 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6737 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6738 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6739
6740 IEM_MC_BEGIN(2, 0);
6741 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6742 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6745 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6746 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6747 IEM_MC_END();
6748 return VINF_SUCCESS;
6749}
6750
6751/**
6752 * @opmaps grp15
6753 * @opcode !11/7
6754 * @oppfx 0x66
6755 * @opcpuid clflushopt
6756 * @opgroup og_cachectl
6757 * @optest op1=1 ->
6758 */
6759FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6760{
6761 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6762 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6763 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6764
6765 IEM_MC_BEGIN(2, 0);
6766 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6767 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6770 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6771 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6772 IEM_MC_END();
6773 return VINF_SUCCESS;
6774}
6775
6776
6777/** Opcode 0x0f 0xae 11b/5. */
6778FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6779{
6780 RT_NOREF_PV(bRm);
6781 IEMOP_MNEMONIC(lfence, "lfence");
6782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6783 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6784 return IEMOP_RAISE_INVALID_OPCODE();
6785
6786 IEM_MC_BEGIN(0, 0);
6787 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6788 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6789 else
6790 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6791 IEM_MC_ADVANCE_RIP();
6792 IEM_MC_END();
6793 return VINF_SUCCESS;
6794}
6795
6796
6797/** Opcode 0x0f 0xae 11b/6. */
6798FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6799{
6800 RT_NOREF_PV(bRm);
6801 IEMOP_MNEMONIC(mfence, "mfence");
6802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6803 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6804 return IEMOP_RAISE_INVALID_OPCODE();
6805
6806 IEM_MC_BEGIN(0, 0);
6807 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6808 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6809 else
6810 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6811 IEM_MC_ADVANCE_RIP();
6812 IEM_MC_END();
6813 return VINF_SUCCESS;
6814}
6815
6816
6817/** Opcode 0x0f 0xae 11b/7. */
6818FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6819{
6820 RT_NOREF_PV(bRm);
6821 IEMOP_MNEMONIC(sfence, "sfence");
6822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6823 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6824 return IEMOP_RAISE_INVALID_OPCODE();
6825
6826 IEM_MC_BEGIN(0, 0);
6827 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6828 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6829 else
6830 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6831 IEM_MC_ADVANCE_RIP();
6832 IEM_MC_END();
6833 return VINF_SUCCESS;
6834}
6835
6836
6837/** Opcode 0xf3 0x0f 0xae 11b/0. */
6838FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6839
6840/** Opcode 0xf3 0x0f 0xae 11b/1. */
6841FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6842
6843/** Opcode 0xf3 0x0f 0xae 11b/2. */
6844FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6845
6846/** Opcode 0xf3 0x0f 0xae 11b/3. */
6847FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6848
6849
6850/**
6851 * Group 15 jump table for register variant.
6852 */
6853IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6854{ /* pfx: none, 066h, 0f3h, 0f2h */
6855 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6856 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6857 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6858 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6859 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6860 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6861 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6862 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6863};
6864AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6865
6866
6867/**
6868 * Group 15 jump table for memory variant.
6869 */
6870IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6871{ /* pfx: none, 066h, 0f3h, 0f2h */
6872 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6873 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6874 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6875 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6876 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6877 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6878 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6879 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6880};
6881AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6882
6883
6884/** Opcode 0x0f 0xae. */
6885FNIEMOP_DEF(iemOp_Grp15)
6886{
6887 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6889 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6890 /* register, register */
6891 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6892 + pVCpu->iem.s.idxPrefix], bRm);
6893 /* memory, register */
6894 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6895 + pVCpu->iem.s.idxPrefix], bRm);
6896}
6897
6898
6899/** Opcode 0x0f 0xaf. */
6900FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6901{
6902 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6903 IEMOP_HLP_MIN_386();
6904 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6905 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6906}
6907
6908
6909/** Opcode 0x0f 0xb0. */
6910FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6911{
6912 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6913 IEMOP_HLP_MIN_486();
6914 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6915
6916 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6917 {
6918 IEMOP_HLP_DONE_DECODING();
6919 IEM_MC_BEGIN(4, 0);
6920 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6921 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6922 IEM_MC_ARG(uint8_t, u8Src, 2);
6923 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6924
6925 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6926 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6927 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6928 IEM_MC_REF_EFLAGS(pEFlags);
6929 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6930 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6931 else
6932 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6933
6934 IEM_MC_ADVANCE_RIP();
6935 IEM_MC_END();
6936 }
6937 else
6938 {
6939 IEM_MC_BEGIN(4, 3);
6940 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6941 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6942 IEM_MC_ARG(uint8_t, u8Src, 2);
6943 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6945 IEM_MC_LOCAL(uint8_t, u8Al);
6946
6947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6948 IEMOP_HLP_DONE_DECODING();
6949 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6950 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6951 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6952 IEM_MC_FETCH_EFLAGS(EFlags);
6953 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6954 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6955 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6956 else
6957 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6958
6959 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6960 IEM_MC_COMMIT_EFLAGS(EFlags);
6961 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6962 IEM_MC_ADVANCE_RIP();
6963 IEM_MC_END();
6964 }
6965 return VINF_SUCCESS;
6966}
6967
6968/** Opcode 0x0f 0xb1. */
6969FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6970{
6971 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6972 IEMOP_HLP_MIN_486();
6973 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6974
6975 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6976 {
6977 IEMOP_HLP_DONE_DECODING();
6978 switch (pVCpu->iem.s.enmEffOpSize)
6979 {
6980 case IEMMODE_16BIT:
6981 IEM_MC_BEGIN(4, 0);
6982 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6983 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6984 IEM_MC_ARG(uint16_t, u16Src, 2);
6985 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6986
6987 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6988 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6989 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6990 IEM_MC_REF_EFLAGS(pEFlags);
6991 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6992 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6993 else
6994 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6995
6996 IEM_MC_ADVANCE_RIP();
6997 IEM_MC_END();
6998 return VINF_SUCCESS;
6999
7000 case IEMMODE_32BIT:
7001 IEM_MC_BEGIN(4, 0);
7002 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7003 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7004 IEM_MC_ARG(uint32_t, u32Src, 2);
7005 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7006
7007 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7008 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7009 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7010 IEM_MC_REF_EFLAGS(pEFlags);
7011 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7012 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7013 else
7014 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7015
7016 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7017 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7018 IEM_MC_ADVANCE_RIP();
7019 IEM_MC_END();
7020 return VINF_SUCCESS;
7021
7022 case IEMMODE_64BIT:
7023 IEM_MC_BEGIN(4, 0);
7024 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7025 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7026#ifdef RT_ARCH_X86
7027 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7028#else
7029 IEM_MC_ARG(uint64_t, u64Src, 2);
7030#endif
7031 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7032
7033 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7034 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7035 IEM_MC_REF_EFLAGS(pEFlags);
7036#ifdef RT_ARCH_X86
7037 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7038 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7039 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7040 else
7041 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7042#else
7043 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7044 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7045 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7046 else
7047 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7048#endif
7049
7050 IEM_MC_ADVANCE_RIP();
7051 IEM_MC_END();
7052 return VINF_SUCCESS;
7053
7054 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7055 }
7056 }
7057 else
7058 {
7059 switch (pVCpu->iem.s.enmEffOpSize)
7060 {
7061 case IEMMODE_16BIT:
7062 IEM_MC_BEGIN(4, 3);
7063 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7064 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7065 IEM_MC_ARG(uint16_t, u16Src, 2);
7066 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7068 IEM_MC_LOCAL(uint16_t, u16Ax);
7069
7070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7071 IEMOP_HLP_DONE_DECODING();
7072 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7073 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7074 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7075 IEM_MC_FETCH_EFLAGS(EFlags);
7076 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7077 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7078 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7079 else
7080 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7081
7082 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7083 IEM_MC_COMMIT_EFLAGS(EFlags);
7084 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7085 IEM_MC_ADVANCE_RIP();
7086 IEM_MC_END();
7087 return VINF_SUCCESS;
7088
7089 case IEMMODE_32BIT:
7090 IEM_MC_BEGIN(4, 3);
7091 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7092 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7093 IEM_MC_ARG(uint32_t, u32Src, 2);
7094 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7096 IEM_MC_LOCAL(uint32_t, u32Eax);
7097
7098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7099 IEMOP_HLP_DONE_DECODING();
7100 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7101 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7102 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7103 IEM_MC_FETCH_EFLAGS(EFlags);
7104 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7105 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7106 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7107 else
7108 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7109
7110 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7111 IEM_MC_COMMIT_EFLAGS(EFlags);
7112 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7113 IEM_MC_ADVANCE_RIP();
7114 IEM_MC_END();
7115 return VINF_SUCCESS;
7116
7117 case IEMMODE_64BIT:
7118 IEM_MC_BEGIN(4, 3);
7119 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7120 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7121#ifdef RT_ARCH_X86
7122 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7123#else
7124 IEM_MC_ARG(uint64_t, u64Src, 2);
7125#endif
7126 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7128 IEM_MC_LOCAL(uint64_t, u64Rax);
7129
7130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7131 IEMOP_HLP_DONE_DECODING();
7132 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7133 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7134 IEM_MC_FETCH_EFLAGS(EFlags);
7135 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7136#ifdef RT_ARCH_X86
7137 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7138 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7139 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7140 else
7141 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7142#else
7143 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7144 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7145 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7146 else
7147 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7148#endif
7149
7150 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7151 IEM_MC_COMMIT_EFLAGS(EFlags);
7152 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7153 IEM_MC_ADVANCE_RIP();
7154 IEM_MC_END();
7155 return VINF_SUCCESS;
7156
7157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7158 }
7159 }
7160}
7161
7162
7163FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7164{
7165 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7166 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7167
7168 switch (pVCpu->iem.s.enmEffOpSize)
7169 {
7170 case IEMMODE_16BIT:
7171 IEM_MC_BEGIN(5, 1);
7172 IEM_MC_ARG(uint16_t, uSel, 0);
7173 IEM_MC_ARG(uint16_t, offSeg, 1);
7174 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7175 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7176 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7177 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7180 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7181 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7182 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7183 IEM_MC_END();
7184 return VINF_SUCCESS;
7185
7186 case IEMMODE_32BIT:
7187 IEM_MC_BEGIN(5, 1);
7188 IEM_MC_ARG(uint16_t, uSel, 0);
7189 IEM_MC_ARG(uint32_t, offSeg, 1);
7190 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7191 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7192 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7193 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7196 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7197 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7198 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7199 IEM_MC_END();
7200 return VINF_SUCCESS;
7201
7202 case IEMMODE_64BIT:
7203 IEM_MC_BEGIN(5, 1);
7204 IEM_MC_ARG(uint16_t, uSel, 0);
7205 IEM_MC_ARG(uint64_t, offSeg, 1);
7206 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7207 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7208 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7209 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7212 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7213 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7214 else
7215 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7216 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7217 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7218 IEM_MC_END();
7219 return VINF_SUCCESS;
7220
7221 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7222 }
7223}
7224
7225
7226/** Opcode 0x0f 0xb2. */
7227FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7228{
7229 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7230 IEMOP_HLP_MIN_386();
7231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7232 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7233 return IEMOP_RAISE_INVALID_OPCODE();
7234 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7235}
7236
7237
7238/** Opcode 0x0f 0xb3. */
7239FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7240{
7241 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7242 IEMOP_HLP_MIN_386();
7243 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7244}
7245
7246
7247/** Opcode 0x0f 0xb4. */
7248FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7249{
7250 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7251 IEMOP_HLP_MIN_386();
7252 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7253 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7254 return IEMOP_RAISE_INVALID_OPCODE();
7255 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7256}
7257
7258
7259/** Opcode 0x0f 0xb5. */
7260FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7261{
7262 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7263 IEMOP_HLP_MIN_386();
7264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7265 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7266 return IEMOP_RAISE_INVALID_OPCODE();
7267 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7268}
7269
7270
7271/** Opcode 0x0f 0xb6. */
7272FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7273{
7274 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7275 IEMOP_HLP_MIN_386();
7276
7277 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7278
7279 /*
7280 * If rm is denoting a register, no more instruction bytes.
7281 */
7282 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7283 {
7284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7285 switch (pVCpu->iem.s.enmEffOpSize)
7286 {
7287 case IEMMODE_16BIT:
7288 IEM_MC_BEGIN(0, 1);
7289 IEM_MC_LOCAL(uint16_t, u16Value);
7290 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7291 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7292 IEM_MC_ADVANCE_RIP();
7293 IEM_MC_END();
7294 return VINF_SUCCESS;
7295
7296 case IEMMODE_32BIT:
7297 IEM_MC_BEGIN(0, 1);
7298 IEM_MC_LOCAL(uint32_t, u32Value);
7299 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7300 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7301 IEM_MC_ADVANCE_RIP();
7302 IEM_MC_END();
7303 return VINF_SUCCESS;
7304
7305 case IEMMODE_64BIT:
7306 IEM_MC_BEGIN(0, 1);
7307 IEM_MC_LOCAL(uint64_t, u64Value);
7308 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7309 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7310 IEM_MC_ADVANCE_RIP();
7311 IEM_MC_END();
7312 return VINF_SUCCESS;
7313
7314 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7315 }
7316 }
7317 else
7318 {
7319 /*
7320 * We're loading a register from memory.
7321 */
7322 switch (pVCpu->iem.s.enmEffOpSize)
7323 {
7324 case IEMMODE_16BIT:
7325 IEM_MC_BEGIN(0, 2);
7326 IEM_MC_LOCAL(uint16_t, u16Value);
7327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7330 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7331 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7332 IEM_MC_ADVANCE_RIP();
7333 IEM_MC_END();
7334 return VINF_SUCCESS;
7335
7336 case IEMMODE_32BIT:
7337 IEM_MC_BEGIN(0, 2);
7338 IEM_MC_LOCAL(uint32_t, u32Value);
7339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7342 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7343 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7344 IEM_MC_ADVANCE_RIP();
7345 IEM_MC_END();
7346 return VINF_SUCCESS;
7347
7348 case IEMMODE_64BIT:
7349 IEM_MC_BEGIN(0, 2);
7350 IEM_MC_LOCAL(uint64_t, u64Value);
7351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7354 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7355 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7356 IEM_MC_ADVANCE_RIP();
7357 IEM_MC_END();
7358 return VINF_SUCCESS;
7359
7360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7361 }
7362 }
7363}
7364
7365
7366/** Opcode 0x0f 0xb7. */
7367FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7368{
7369 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7370 IEMOP_HLP_MIN_386();
7371
7372 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7373
7374 /** @todo Not entirely sure how the operand size prefix is handled here,
7375 * assuming that it will be ignored. Would be nice to have a few
7376 * test for this. */
7377 /*
7378 * If rm is denoting a register, no more instruction bytes.
7379 */
7380 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7381 {
7382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7383 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7384 {
7385 IEM_MC_BEGIN(0, 1);
7386 IEM_MC_LOCAL(uint32_t, u32Value);
7387 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7388 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7389 IEM_MC_ADVANCE_RIP();
7390 IEM_MC_END();
7391 }
7392 else
7393 {
7394 IEM_MC_BEGIN(0, 1);
7395 IEM_MC_LOCAL(uint64_t, u64Value);
7396 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7397 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7398 IEM_MC_ADVANCE_RIP();
7399 IEM_MC_END();
7400 }
7401 }
7402 else
7403 {
7404 /*
7405 * We're loading a register from memory.
7406 */
7407 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7408 {
7409 IEM_MC_BEGIN(0, 2);
7410 IEM_MC_LOCAL(uint32_t, u32Value);
7411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7414 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7415 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7416 IEM_MC_ADVANCE_RIP();
7417 IEM_MC_END();
7418 }
7419 else
7420 {
7421 IEM_MC_BEGIN(0, 2);
7422 IEM_MC_LOCAL(uint64_t, u64Value);
7423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7426 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7427 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7428 IEM_MC_ADVANCE_RIP();
7429 IEM_MC_END();
7430 }
7431 }
7432 return VINF_SUCCESS;
7433}
7434
7435
7436/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7437FNIEMOP_UD_STUB(iemOp_jmpe);
7438/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7439FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7440
7441
7442/**
7443 * @opcode 0xb9
7444 * @opinvalid intel-modrm
7445 * @optest ->
7446 */
7447FNIEMOP_DEF(iemOp_Grp10)
7448{
7449 /*
7450 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7451 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7452 */
7453 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7454 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7455 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7456}
7457
7458
7459/** Opcode 0x0f 0xba. */
7460FNIEMOP_DEF(iemOp_Grp8)
7461{
7462 IEMOP_HLP_MIN_386();
7463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7464 PCIEMOPBINSIZES pImpl;
7465 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7466 {
7467 case 0: case 1: case 2: case 3:
7468 /* Both AMD and Intel want full modr/m decoding and imm8. */
7469 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7470 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7471 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7472 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7473 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7475 }
7476 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7477
7478 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7479 {
7480 /* register destination. */
7481 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7483
7484 switch (pVCpu->iem.s.enmEffOpSize)
7485 {
7486 case IEMMODE_16BIT:
7487 IEM_MC_BEGIN(3, 0);
7488 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7489 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7490 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7491
7492 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7493 IEM_MC_REF_EFLAGS(pEFlags);
7494 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7495
7496 IEM_MC_ADVANCE_RIP();
7497 IEM_MC_END();
7498 return VINF_SUCCESS;
7499
7500 case IEMMODE_32BIT:
7501 IEM_MC_BEGIN(3, 0);
7502 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7503 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7504 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7505
7506 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7507 IEM_MC_REF_EFLAGS(pEFlags);
7508 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7509
7510 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7511 IEM_MC_ADVANCE_RIP();
7512 IEM_MC_END();
7513 return VINF_SUCCESS;
7514
7515 case IEMMODE_64BIT:
7516 IEM_MC_BEGIN(3, 0);
7517 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7518 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7519 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7520
7521 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7522 IEM_MC_REF_EFLAGS(pEFlags);
7523 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7524
7525 IEM_MC_ADVANCE_RIP();
7526 IEM_MC_END();
7527 return VINF_SUCCESS;
7528
7529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7530 }
7531 }
7532 else
7533 {
7534 /* memory destination. */
7535
7536 uint32_t fAccess;
7537 if (pImpl->pfnLockedU16)
7538 fAccess = IEM_ACCESS_DATA_RW;
7539 else /* BT */
7540 fAccess = IEM_ACCESS_DATA_R;
7541
7542 /** @todo test negative bit offsets! */
7543 switch (pVCpu->iem.s.enmEffOpSize)
7544 {
7545 case IEMMODE_16BIT:
7546 IEM_MC_BEGIN(3, 1);
7547 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7548 IEM_MC_ARG(uint16_t, u16Src, 1);
7549 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7551
7552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7553 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7554 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7555 if (pImpl->pfnLockedU16)
7556 IEMOP_HLP_DONE_DECODING();
7557 else
7558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7559 IEM_MC_FETCH_EFLAGS(EFlags);
7560 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7561 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7562 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7563 else
7564 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7565 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7566
7567 IEM_MC_COMMIT_EFLAGS(EFlags);
7568 IEM_MC_ADVANCE_RIP();
7569 IEM_MC_END();
7570 return VINF_SUCCESS;
7571
7572 case IEMMODE_32BIT:
7573 IEM_MC_BEGIN(3, 1);
7574 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7575 IEM_MC_ARG(uint32_t, u32Src, 1);
7576 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7578
7579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7580 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7581 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7582 if (pImpl->pfnLockedU16)
7583 IEMOP_HLP_DONE_DECODING();
7584 else
7585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7586 IEM_MC_FETCH_EFLAGS(EFlags);
7587 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7588 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7589 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7590 else
7591 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7592 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7593
7594 IEM_MC_COMMIT_EFLAGS(EFlags);
7595 IEM_MC_ADVANCE_RIP();
7596 IEM_MC_END();
7597 return VINF_SUCCESS;
7598
7599 case IEMMODE_64BIT:
7600 IEM_MC_BEGIN(3, 1);
7601 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7602 IEM_MC_ARG(uint64_t, u64Src, 1);
7603 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7605
7606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7607 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7608 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7609 if (pImpl->pfnLockedU16)
7610 IEMOP_HLP_DONE_DECODING();
7611 else
7612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7613 IEM_MC_FETCH_EFLAGS(EFlags);
7614 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7615 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7616 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7617 else
7618 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7619 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7620
7621 IEM_MC_COMMIT_EFLAGS(EFlags);
7622 IEM_MC_ADVANCE_RIP();
7623 IEM_MC_END();
7624 return VINF_SUCCESS;
7625
7626 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7627 }
7628 }
7629}
7630
7631
7632/** Opcode 0x0f 0xbb. */
7633FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7634{
7635 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7636 IEMOP_HLP_MIN_386();
7637 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7638}
7639
7640
7641/** Opcode 0x0f 0xbc. */
7642FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7643{
7644 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7645 IEMOP_HLP_MIN_386();
7646 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7647 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7648}
7649
7650
7651/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7652FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7653
7654
7655/** Opcode 0x0f 0xbd. */
7656FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7657{
7658 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7659 IEMOP_HLP_MIN_386();
7660 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7661 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7662}
7663
7664
7665/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7666FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7667
7668
7669/** Opcode 0x0f 0xbe. */
7670FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7671{
7672 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7673 IEMOP_HLP_MIN_386();
7674
7675 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7676
7677 /*
7678 * If rm is denoting a register, no more instruction bytes.
7679 */
7680 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7681 {
7682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7683 switch (pVCpu->iem.s.enmEffOpSize)
7684 {
7685 case IEMMODE_16BIT:
7686 IEM_MC_BEGIN(0, 1);
7687 IEM_MC_LOCAL(uint16_t, u16Value);
7688 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7689 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7690 IEM_MC_ADVANCE_RIP();
7691 IEM_MC_END();
7692 return VINF_SUCCESS;
7693
7694 case IEMMODE_32BIT:
7695 IEM_MC_BEGIN(0, 1);
7696 IEM_MC_LOCAL(uint32_t, u32Value);
7697 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7698 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7699 IEM_MC_ADVANCE_RIP();
7700 IEM_MC_END();
7701 return VINF_SUCCESS;
7702
7703 case IEMMODE_64BIT:
7704 IEM_MC_BEGIN(0, 1);
7705 IEM_MC_LOCAL(uint64_t, u64Value);
7706 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7707 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7708 IEM_MC_ADVANCE_RIP();
7709 IEM_MC_END();
7710 return VINF_SUCCESS;
7711
7712 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7713 }
7714 }
7715 else
7716 {
7717 /*
7718 * We're loading a register from memory.
7719 */
7720 switch (pVCpu->iem.s.enmEffOpSize)
7721 {
7722 case IEMMODE_16BIT:
7723 IEM_MC_BEGIN(0, 2);
7724 IEM_MC_LOCAL(uint16_t, u16Value);
7725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7728 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7729 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7730 IEM_MC_ADVANCE_RIP();
7731 IEM_MC_END();
7732 return VINF_SUCCESS;
7733
7734 case IEMMODE_32BIT:
7735 IEM_MC_BEGIN(0, 2);
7736 IEM_MC_LOCAL(uint32_t, u32Value);
7737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7740 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7741 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7742 IEM_MC_ADVANCE_RIP();
7743 IEM_MC_END();
7744 return VINF_SUCCESS;
7745
7746 case IEMMODE_64BIT:
7747 IEM_MC_BEGIN(0, 2);
7748 IEM_MC_LOCAL(uint64_t, u64Value);
7749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7752 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7753 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7754 IEM_MC_ADVANCE_RIP();
7755 IEM_MC_END();
7756 return VINF_SUCCESS;
7757
7758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7759 }
7760 }
7761}
7762
7763
7764/** Opcode 0x0f 0xbf. */
7765FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7766{
7767 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7768 IEMOP_HLP_MIN_386();
7769
7770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7771
7772 /** @todo Not entirely sure how the operand size prefix is handled here,
7773 * assuming that it will be ignored. Would be nice to have a few
7774 * test for this. */
7775 /*
7776 * If rm is denoting a register, no more instruction bytes.
7777 */
7778 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7779 {
7780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7781 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7782 {
7783 IEM_MC_BEGIN(0, 1);
7784 IEM_MC_LOCAL(uint32_t, u32Value);
7785 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7786 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7787 IEM_MC_ADVANCE_RIP();
7788 IEM_MC_END();
7789 }
7790 else
7791 {
7792 IEM_MC_BEGIN(0, 1);
7793 IEM_MC_LOCAL(uint64_t, u64Value);
7794 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7795 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7796 IEM_MC_ADVANCE_RIP();
7797 IEM_MC_END();
7798 }
7799 }
7800 else
7801 {
7802 /*
7803 * We're loading a register from memory.
7804 */
7805 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7806 {
7807 IEM_MC_BEGIN(0, 2);
7808 IEM_MC_LOCAL(uint32_t, u32Value);
7809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7812 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7813 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7814 IEM_MC_ADVANCE_RIP();
7815 IEM_MC_END();
7816 }
7817 else
7818 {
7819 IEM_MC_BEGIN(0, 2);
7820 IEM_MC_LOCAL(uint64_t, u64Value);
7821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7824 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7825 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7826 IEM_MC_ADVANCE_RIP();
7827 IEM_MC_END();
7828 }
7829 }
7830 return VINF_SUCCESS;
7831}
7832
7833
7834/** Opcode 0x0f 0xc0. */
7835FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7836{
7837 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7838 IEMOP_HLP_MIN_486();
7839 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7840
7841 /*
7842 * If rm is denoting a register, no more instruction bytes.
7843 */
7844 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7845 {
7846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7847
7848 IEM_MC_BEGIN(3, 0);
7849 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7850 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7851 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7852
7853 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7854 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7855 IEM_MC_REF_EFLAGS(pEFlags);
7856 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7857
7858 IEM_MC_ADVANCE_RIP();
7859 IEM_MC_END();
7860 }
7861 else
7862 {
7863 /*
7864 * We're accessing memory.
7865 */
7866 IEM_MC_BEGIN(3, 3);
7867 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7868 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7869 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7870 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7872
7873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7874 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7875 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7876 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7877 IEM_MC_FETCH_EFLAGS(EFlags);
7878 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7879 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7880 else
7881 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7882
7883 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7884 IEM_MC_COMMIT_EFLAGS(EFlags);
7885 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7886 IEM_MC_ADVANCE_RIP();
7887 IEM_MC_END();
7888 return VINF_SUCCESS;
7889 }
7890 return VINF_SUCCESS;
7891}
7892
7893
7894/** Opcode 0x0f 0xc1. */
7895FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7896{
7897 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7898 IEMOP_HLP_MIN_486();
7899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7900
7901 /*
7902 * If rm is denoting a register, no more instruction bytes.
7903 */
7904 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7905 {
7906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7907
7908 switch (pVCpu->iem.s.enmEffOpSize)
7909 {
7910 case IEMMODE_16BIT:
7911 IEM_MC_BEGIN(3, 0);
7912 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7913 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7914 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7915
7916 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7917 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7918 IEM_MC_REF_EFLAGS(pEFlags);
7919 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7920
7921 IEM_MC_ADVANCE_RIP();
7922 IEM_MC_END();
7923 return VINF_SUCCESS;
7924
7925 case IEMMODE_32BIT:
7926 IEM_MC_BEGIN(3, 0);
7927 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7928 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7929 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7930
7931 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7932 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7933 IEM_MC_REF_EFLAGS(pEFlags);
7934 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7935
7936 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7937 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7938 IEM_MC_ADVANCE_RIP();
7939 IEM_MC_END();
7940 return VINF_SUCCESS;
7941
7942 case IEMMODE_64BIT:
7943 IEM_MC_BEGIN(3, 0);
7944 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7945 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7946 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7947
7948 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7949 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7950 IEM_MC_REF_EFLAGS(pEFlags);
7951 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7952
7953 IEM_MC_ADVANCE_RIP();
7954 IEM_MC_END();
7955 return VINF_SUCCESS;
7956
7957 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7958 }
7959 }
7960 else
7961 {
7962 /*
7963 * We're accessing memory.
7964 */
7965 switch (pVCpu->iem.s.enmEffOpSize)
7966 {
7967 case IEMMODE_16BIT:
7968 IEM_MC_BEGIN(3, 3);
7969 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7970 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7971 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7972 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7974
7975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7976 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7977 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7978 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7979 IEM_MC_FETCH_EFLAGS(EFlags);
7980 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7981 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7982 else
7983 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7984
7985 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7986 IEM_MC_COMMIT_EFLAGS(EFlags);
7987 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7988 IEM_MC_ADVANCE_RIP();
7989 IEM_MC_END();
7990 return VINF_SUCCESS;
7991
7992 case IEMMODE_32BIT:
7993 IEM_MC_BEGIN(3, 3);
7994 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7995 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7996 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7997 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7999
8000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8001 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8002 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8003 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8004 IEM_MC_FETCH_EFLAGS(EFlags);
8005 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8006 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8007 else
8008 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8009
8010 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8011 IEM_MC_COMMIT_EFLAGS(EFlags);
8012 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8013 IEM_MC_ADVANCE_RIP();
8014 IEM_MC_END();
8015 return VINF_SUCCESS;
8016
8017 case IEMMODE_64BIT:
8018 IEM_MC_BEGIN(3, 3);
8019 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8020 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8021 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8022 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8024
8025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8026 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8027 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8028 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8029 IEM_MC_FETCH_EFLAGS(EFlags);
8030 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8031 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8032 else
8033 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8034
8035 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8036 IEM_MC_COMMIT_EFLAGS(EFlags);
8037 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8038 IEM_MC_ADVANCE_RIP();
8039 IEM_MC_END();
8040 return VINF_SUCCESS;
8041
8042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8043 }
8044 }
8045}
8046
8047
8048/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8049FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8050/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8051FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8052/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8053FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8054/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8055FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8056
8057
8058/** Opcode 0x0f 0xc3. */
8059FNIEMOP_DEF(iemOp_movnti_My_Gy)
8060{
8061 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8062
8063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8064
8065 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8066 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8067 {
8068 switch (pVCpu->iem.s.enmEffOpSize)
8069 {
8070 case IEMMODE_32BIT:
8071 IEM_MC_BEGIN(0, 2);
8072 IEM_MC_LOCAL(uint32_t, u32Value);
8073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8074
8075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8077 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8078 return IEMOP_RAISE_INVALID_OPCODE();
8079
8080 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8081 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8082 IEM_MC_ADVANCE_RIP();
8083 IEM_MC_END();
8084 break;
8085
8086 case IEMMODE_64BIT:
8087 IEM_MC_BEGIN(0, 2);
8088 IEM_MC_LOCAL(uint64_t, u64Value);
8089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8090
8091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8093 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8094 return IEMOP_RAISE_INVALID_OPCODE();
8095
8096 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8097 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8098 IEM_MC_ADVANCE_RIP();
8099 IEM_MC_END();
8100 break;
8101
8102 case IEMMODE_16BIT:
8103 /** @todo check this form. */
8104 return IEMOP_RAISE_INVALID_OPCODE();
8105 }
8106 }
8107 else
8108 return IEMOP_RAISE_INVALID_OPCODE();
8109 return VINF_SUCCESS;
8110}
8111/* Opcode 0x66 0x0f 0xc3 - invalid */
8112/* Opcode 0xf3 0x0f 0xc3 - invalid */
8113/* Opcode 0xf2 0x0f 0xc3 - invalid */
8114
8115/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8116FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8117/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8118FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8119/* Opcode 0xf3 0x0f 0xc4 - invalid */
8120/* Opcode 0xf2 0x0f 0xc4 - invalid */
8121
8122/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8123FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8124/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8125FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8126/* Opcode 0xf3 0x0f 0xc5 - invalid */
8127/* Opcode 0xf2 0x0f 0xc5 - invalid */
8128
8129/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8130FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8131/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8132FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8133/* Opcode 0xf3 0x0f 0xc6 - invalid */
8134/* Opcode 0xf2 0x0f 0xc6 - invalid */
8135
8136
8137/** Opcode 0x0f 0xc7 !11/1. */
8138FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8139{
8140 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8141
8142 IEM_MC_BEGIN(4, 3);
8143 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8144 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8145 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8146 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8147 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8148 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8150
8151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8152 IEMOP_HLP_DONE_DECODING();
8153 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8154
8155 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8156 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8157 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8158
8159 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8160 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8161 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8162
8163 IEM_MC_FETCH_EFLAGS(EFlags);
8164 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8165 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8166 else
8167 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8168
8169 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8170 IEM_MC_COMMIT_EFLAGS(EFlags);
8171 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8172 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8173 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8174 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8175 IEM_MC_ENDIF();
8176 IEM_MC_ADVANCE_RIP();
8177
8178 IEM_MC_END();
8179 return VINF_SUCCESS;
8180}
8181
8182
8183/** Opcode REX.W 0x0f 0xc7 !11/1. */
8184FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8185{
8186 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8187 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8188 {
8189#if 0
8190 RT_NOREF(bRm);
8191 IEMOP_BITCH_ABOUT_STUB();
8192 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8193#else
8194 IEM_MC_BEGIN(4, 3);
8195 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8196 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8197 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8198 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8199 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8200 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8202
8203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8204 IEMOP_HLP_DONE_DECODING();
8205 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8206 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8207
8208 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8209 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8210 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8211
8212 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8213 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8214 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8215
8216 IEM_MC_FETCH_EFLAGS(EFlags);
8217# ifdef RT_ARCH_AMD64
8218 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8219 {
8220 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8221 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8222 else
8223 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8224 }
8225 else
8226# endif
8227 {
8228 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8229 accesses and not all all atomic, which works fine on in UNI CPU guest
8230 configuration (ignoring DMA). If guest SMP is active we have no choice
8231 but to use a rendezvous callback here. Sigh. */
8232 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8233 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8234 else
8235 {
8236 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8237 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8238 }
8239 }
8240
8241 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8242 IEM_MC_COMMIT_EFLAGS(EFlags);
8243 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8244 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8245 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8246 IEM_MC_ENDIF();
8247 IEM_MC_ADVANCE_RIP();
8248
8249 IEM_MC_END();
8250 return VINF_SUCCESS;
8251#endif
8252 }
8253 Log(("cmpxchg16b -> #UD\n"));
8254 return IEMOP_RAISE_INVALID_OPCODE();
8255}
8256
8257FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8258{
8259 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8260 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8261 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8262}
8263
8264/** Opcode 0x0f 0xc7 11/6. */
8265FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8266
8267/** Opcode 0x0f 0xc7 !11/6. */
8268FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8269
8270/** Opcode 0x66 0x0f 0xc7 !11/6. */
8271FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8272
8273/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8274FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8275
8276/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8277FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8278
8279/** Opcode 0x0f 0xc7 11/7. */
8280FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8281
8282
8283/**
8284 * Group 9 jump table for register variant.
8285 */
8286IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8287{ /* pfx: none, 066h, 0f3h, 0f2h */
8288 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8289 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8290 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8291 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8292 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8293 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8294 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8295 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8296};
8297AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8298
8299
8300/**
8301 * Group 9 jump table for memory variant.
8302 */
8303IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8304{ /* pfx: none, 066h, 0f3h, 0f2h */
8305 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8306 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8307 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8308 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8309 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8310 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8311 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8312 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8313};
8314AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8315
8316
8317/** Opcode 0x0f 0xc7. */
8318FNIEMOP_DEF(iemOp_Grp9)
8319{
8320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8321 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8322 /* register, register */
8323 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8324 + pVCpu->iem.s.idxPrefix], bRm);
8325 /* memory, register */
8326 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8327 + pVCpu->iem.s.idxPrefix], bRm);
8328}
8329
8330
8331/**
8332 * Common 'bswap register' helper.
8333 */
8334FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8335{
8336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8337 switch (pVCpu->iem.s.enmEffOpSize)
8338 {
8339 case IEMMODE_16BIT:
8340 IEM_MC_BEGIN(1, 0);
8341 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8342 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8343 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8344 IEM_MC_ADVANCE_RIP();
8345 IEM_MC_END();
8346 return VINF_SUCCESS;
8347
8348 case IEMMODE_32BIT:
8349 IEM_MC_BEGIN(1, 0);
8350 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8351 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8352 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8353 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8354 IEM_MC_ADVANCE_RIP();
8355 IEM_MC_END();
8356 return VINF_SUCCESS;
8357
8358 case IEMMODE_64BIT:
8359 IEM_MC_BEGIN(1, 0);
8360 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8361 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8362 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8363 IEM_MC_ADVANCE_RIP();
8364 IEM_MC_END();
8365 return VINF_SUCCESS;
8366
8367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8368 }
8369}
8370
8371
8372/** Opcode 0x0f 0xc8. */
8373FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8374{
8375 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8376 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8377 prefix. REX.B is the correct prefix it appears. For a parallel
8378 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8379 IEMOP_HLP_MIN_486();
8380 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8381}
8382
8383
8384/** Opcode 0x0f 0xc9. */
8385FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8386{
8387 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8388 IEMOP_HLP_MIN_486();
8389 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8390}
8391
8392
8393/** Opcode 0x0f 0xca. */
8394FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8395{
8396 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8397 IEMOP_HLP_MIN_486();
8398 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8399}
8400
8401
8402/** Opcode 0x0f 0xcb. */
8403FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8404{
8405 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8406 IEMOP_HLP_MIN_486();
8407 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8408}
8409
8410
8411/** Opcode 0x0f 0xcc. */
8412FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8413{
8414 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8415 IEMOP_HLP_MIN_486();
8416 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8417}
8418
8419
8420/** Opcode 0x0f 0xcd. */
8421FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8422{
8423 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8424 IEMOP_HLP_MIN_486();
8425 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8426}
8427
8428
8429/** Opcode 0x0f 0xce. */
8430FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8431{
8432 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8433 IEMOP_HLP_MIN_486();
8434 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8435}
8436
8437
8438/** Opcode 0x0f 0xcf. */
8439FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8440{
8441 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8442 IEMOP_HLP_MIN_486();
8443 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8444}
8445
8446
8447/* Opcode 0x0f 0xd0 - invalid */
8448/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8449FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8450/* Opcode 0xf3 0x0f 0xd0 - invalid */
8451/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8452FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8453
8454/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8455FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8456/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8457FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8458/* Opcode 0xf3 0x0f 0xd1 - invalid */
8459/* Opcode 0xf2 0x0f 0xd1 - invalid */
8460
8461/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8462FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8463/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8464FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8465/* Opcode 0xf3 0x0f 0xd2 - invalid */
8466/* Opcode 0xf2 0x0f 0xd2 - invalid */
8467
8468/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8469FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8470/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8471FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8472/* Opcode 0xf3 0x0f 0xd3 - invalid */
8473/* Opcode 0xf2 0x0f 0xd3 - invalid */
8474
8475/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8476FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8477/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8478FNIEMOP_STUB(iemOp_paddq_Vx_W);
8479/* Opcode 0xf3 0x0f 0xd4 - invalid */
8480/* Opcode 0xf2 0x0f 0xd4 - invalid */
8481
8482/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8483FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8484/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8485FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8486/* Opcode 0xf3 0x0f 0xd5 - invalid */
8487/* Opcode 0xf2 0x0f 0xd5 - invalid */
8488
8489/* Opcode 0x0f 0xd6 - invalid */
8490
8491/**
8492 * @opcode 0xd6
8493 * @oppfx 0x66
8494 * @opcpuid sse2
8495 * @opgroup og_sse2_pcksclr_datamove
8496 * @opxcpttype none
8497 * @optest op1=-1 op2=2 -> op1=2
8498 * @optest op1=0 op2=-42 -> op1=-42
8499 */
8500FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8501{
8502 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8504 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8505 {
8506 /*
8507 * Register, register.
8508 */
8509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8510 IEM_MC_BEGIN(0, 2);
8511 IEM_MC_LOCAL(uint64_t, uSrc);
8512
8513 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8514 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8515
8516 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8517 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8518
8519 IEM_MC_ADVANCE_RIP();
8520 IEM_MC_END();
8521 }
8522 else
8523 {
8524 /*
8525 * Memory, register.
8526 */
8527 IEM_MC_BEGIN(0, 2);
8528 IEM_MC_LOCAL(uint64_t, uSrc);
8529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8530
8531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8533 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8534 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8535
8536 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8537 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8538
8539 IEM_MC_ADVANCE_RIP();
8540 IEM_MC_END();
8541 }
8542 return VINF_SUCCESS;
8543}
8544
8545
8546/**
8547 * @opcode 0xd6
8548 * @opcodesub 11 mr/reg
8549 * @oppfx f3
8550 * @opcpuid sse2
8551 * @opgroup og_sse2_simdint_datamove
8552 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8553 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8554 */
8555FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8556{
8557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8558 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8559 {
8560 /*
8561 * Register, register.
8562 */
8563 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8565 IEM_MC_BEGIN(0, 1);
8566 IEM_MC_LOCAL(uint64_t, uSrc);
8567
8568 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8569 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8570
8571 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8572 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8573 IEM_MC_FPU_TO_MMX_MODE();
8574
8575 IEM_MC_ADVANCE_RIP();
8576 IEM_MC_END();
8577 return VINF_SUCCESS;
8578 }
8579
8580 /**
8581 * @opdone
8582 * @opmnemonic udf30fd6mem
8583 * @opcode 0xd6
8584 * @opcodesub !11 mr/reg
8585 * @oppfx f3
8586 * @opunused intel-modrm
8587 * @opcpuid sse
8588 * @optest ->
8589 */
8590 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8591}
8592
8593
8594/**
8595 * @opcode 0xd6
8596 * @opcodesub 11 mr/reg
8597 * @oppfx f2
8598 * @opcpuid sse2
8599 * @opgroup og_sse2_simdint_datamove
8600 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8601 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8602 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
8603 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
8604 * @optest op1=-42 op2=0xfedcba9876543210
8605 * -> op1=0xfedcba9876543210 ftw=0xff
8606 */
8607FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
8608{
8609 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8610 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8611 {
8612 /*
8613 * Register, register.
8614 */
8615 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8617 IEM_MC_BEGIN(0, 1);
8618 IEM_MC_LOCAL(uint64_t, uSrc);
8619
8620 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8621 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8622
8623 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8624 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
8625 IEM_MC_FPU_TO_MMX_MODE();
8626
8627 IEM_MC_ADVANCE_RIP();
8628 IEM_MC_END();
8629 return VINF_SUCCESS;
8630 }
8631
8632 /**
8633 * @opdone
8634 * @opmnemonic udf20fd6mem
8635 * @opcode 0xd6
8636 * @opcodesub !11 mr/reg
8637 * @oppfx f2
8638 * @opunused intel-modrm
8639 * @opcpuid sse
8640 * @optest ->
8641 */
8642 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8643}
8644
8645/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8646FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8647{
8648 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8649 /** @todo testcase: Check that the instruction implicitly clears the high
8650 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8651 * and opcode modifications are made to work with the whole width (not
8652 * just 128). */
8653 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8654 /* Docs says register only. */
8655 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8656 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8657 {
8658 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8659 IEM_MC_BEGIN(2, 0);
8660 IEM_MC_ARG(uint64_t *, pDst, 0);
8661 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8662 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8663 IEM_MC_PREPARE_FPU_USAGE();
8664 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8665 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8666 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8667 IEM_MC_ADVANCE_RIP();
8668 IEM_MC_END();
8669 return VINF_SUCCESS;
8670 }
8671 return IEMOP_RAISE_INVALID_OPCODE();
8672}
8673
8674/** Opcode 0x66 0x0f 0xd7 - */
8675FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8676{
8677 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8678 /** @todo testcase: Check that the instruction implicitly clears the high
8679 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8680 * and opcode modifications are made to work with the whole width (not
8681 * just 128). */
8682 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8683 /* Docs says register only. */
8684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8685 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8686 {
8687 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8688 IEM_MC_BEGIN(2, 0);
8689 IEM_MC_ARG(uint64_t *, pDst, 0);
8690 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8691 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8692 IEM_MC_PREPARE_SSE_USAGE();
8693 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8694 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8695 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8696 IEM_MC_ADVANCE_RIP();
8697 IEM_MC_END();
8698 return VINF_SUCCESS;
8699 }
8700 return IEMOP_RAISE_INVALID_OPCODE();
8701}
8702
8703/* Opcode 0xf3 0x0f 0xd7 - invalid */
8704/* Opcode 0xf2 0x0f 0xd7 - invalid */
8705
8706
8707/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8708FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8709/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8710FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8711/* Opcode 0xf3 0x0f 0xd8 - invalid */
8712/* Opcode 0xf2 0x0f 0xd8 - invalid */
8713
8714/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8715FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8716/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8717FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8718/* Opcode 0xf3 0x0f 0xd9 - invalid */
8719/* Opcode 0xf2 0x0f 0xd9 - invalid */
8720
8721/** Opcode 0x0f 0xda - pminub Pq, Qq */
8722FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8723/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8724FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8725/* Opcode 0xf3 0x0f 0xda - invalid */
8726/* Opcode 0xf2 0x0f 0xda - invalid */
8727
8728/** Opcode 0x0f 0xdb - pand Pq, Qq */
8729FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8730/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8731FNIEMOP_STUB(iemOp_pand_Vx_W);
8732/* Opcode 0xf3 0x0f 0xdb - invalid */
8733/* Opcode 0xf2 0x0f 0xdb - invalid */
8734
8735/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8736FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8737/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8738FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8739/* Opcode 0xf3 0x0f 0xdc - invalid */
8740/* Opcode 0xf2 0x0f 0xdc - invalid */
8741
8742/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8743FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8744/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8745FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8746/* Opcode 0xf3 0x0f 0xdd - invalid */
8747/* Opcode 0xf2 0x0f 0xdd - invalid */
8748
8749/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8750FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8751/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8752FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8753/* Opcode 0xf3 0x0f 0xde - invalid */
8754/* Opcode 0xf2 0x0f 0xde - invalid */
8755
8756/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8757FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8758/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8759FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8760/* Opcode 0xf3 0x0f 0xdf - invalid */
8761/* Opcode 0xf2 0x0f 0xdf - invalid */
8762
8763/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8764FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8765/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8766FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8767/* Opcode 0xf3 0x0f 0xe0 - invalid */
8768/* Opcode 0xf2 0x0f 0xe0 - invalid */
8769
8770/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8771FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8772/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8773FNIEMOP_STUB(iemOp_psraw_Vx_W);
8774/* Opcode 0xf3 0x0f 0xe1 - invalid */
8775/* Opcode 0xf2 0x0f 0xe1 - invalid */
8776
8777/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8778FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8779/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8780FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8781/* Opcode 0xf3 0x0f 0xe2 - invalid */
8782/* Opcode 0xf2 0x0f 0xe2 - invalid */
8783
8784/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8785FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8786/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8787FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8788/* Opcode 0xf3 0x0f 0xe3 - invalid */
8789/* Opcode 0xf2 0x0f 0xe3 - invalid */
8790
8791/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8792FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8793/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8794FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8795/* Opcode 0xf3 0x0f 0xe4 - invalid */
8796/* Opcode 0xf2 0x0f 0xe4 - invalid */
8797
8798/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8799FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8800/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8801FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8802/* Opcode 0xf3 0x0f 0xe5 - invalid */
8803/* Opcode 0xf2 0x0f 0xe5 - invalid */
8804
8805/* Opcode 0x0f 0xe6 - invalid */
8806/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8807FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8808/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8809FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8810/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8811FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8812
8813
8814/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8815FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8816{
8817 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8819 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8820 {
8821 /* Register, memory. */
8822 IEM_MC_BEGIN(0, 2);
8823 IEM_MC_LOCAL(uint64_t, uSrc);
8824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8825
8826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8828 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8829 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8830
8831 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8832 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8833
8834 IEM_MC_ADVANCE_RIP();
8835 IEM_MC_END();
8836 return VINF_SUCCESS;
8837 }
8838 /* The register, register encoding is invalid. */
8839 return IEMOP_RAISE_INVALID_OPCODE();
8840}
8841
8842/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
8843FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
8844{
8845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8846 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8847 {
8848 /* Register, memory. */
8849 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
8850 IEM_MC_BEGIN(0, 2);
8851 IEM_MC_LOCAL(RTUINT128U, uSrc);
8852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8853
8854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8856 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8857 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8858
8859 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8860 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8861
8862 IEM_MC_ADVANCE_RIP();
8863 IEM_MC_END();
8864 return VINF_SUCCESS;
8865 }
8866
8867 /* The register, register encoding is invalid. */
8868 return IEMOP_RAISE_INVALID_OPCODE();
8869}
8870
8871/* Opcode 0xf3 0x0f 0xe7 - invalid */
8872/* Opcode 0xf2 0x0f 0xe7 - invalid */
8873
8874
8875/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8876FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8877/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8878FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8879/* Opcode 0xf3 0x0f 0xe8 - invalid */
8880/* Opcode 0xf2 0x0f 0xe8 - invalid */
8881
8882/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8883FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8884/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8885FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8886/* Opcode 0xf3 0x0f 0xe9 - invalid */
8887/* Opcode 0xf2 0x0f 0xe9 - invalid */
8888
8889/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8890FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8891/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8892FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8893/* Opcode 0xf3 0x0f 0xea - invalid */
8894/* Opcode 0xf2 0x0f 0xea - invalid */
8895
8896/** Opcode 0x0f 0xeb - por Pq, Qq */
8897FNIEMOP_STUB(iemOp_por_Pq_Qq);
8898/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8899FNIEMOP_STUB(iemOp_por_Vx_W);
8900/* Opcode 0xf3 0x0f 0xeb - invalid */
8901/* Opcode 0xf2 0x0f 0xeb - invalid */
8902
8903/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8904FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8905/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8906FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8907/* Opcode 0xf3 0x0f 0xec - invalid */
8908/* Opcode 0xf2 0x0f 0xec - invalid */
8909
8910/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8911FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8912/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8913FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8914/* Opcode 0xf3 0x0f 0xed - invalid */
8915/* Opcode 0xf2 0x0f 0xed - invalid */
8916
8917/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8918FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8919/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8920FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8921/* Opcode 0xf3 0x0f 0xee - invalid */
8922/* Opcode 0xf2 0x0f 0xee - invalid */
8923
8924
8925/** Opcode 0x0f 0xef - pxor Pq, Qq */
8926FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8927{
8928 IEMOP_MNEMONIC(pxor, "pxor");
8929 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8930}
8931
8932/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8933FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8934{
8935 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8936 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8937}
8938
8939/* Opcode 0xf3 0x0f 0xef - invalid */
8940/* Opcode 0xf2 0x0f 0xef - invalid */
8941
8942/* Opcode 0x0f 0xf0 - invalid */
8943/* Opcode 0x66 0x0f 0xf0 - invalid */
8944/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8945FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8946
8947/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8948FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8949/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8950FNIEMOP_STUB(iemOp_psllw_Vx_W);
8951/* Opcode 0xf2 0x0f 0xf1 - invalid */
8952
8953/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8954FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8955/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8956FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8957/* Opcode 0xf2 0x0f 0xf2 - invalid */
8958
8959/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8960FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8961/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8962FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8963/* Opcode 0xf2 0x0f 0xf3 - invalid */
8964
8965/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8966FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8967/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8968FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8969/* Opcode 0xf2 0x0f 0xf4 - invalid */
8970
8971/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8972FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8973/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
8974FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
8975/* Opcode 0xf2 0x0f 0xf5 - invalid */
8976
8977/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8978FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8979/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
8980FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
8981/* Opcode 0xf2 0x0f 0xf6 - invalid */
8982
8983/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8984FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8985/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
8986FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
8987/* Opcode 0xf2 0x0f 0xf7 - invalid */
8988
8989/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8990FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8991/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
8992FNIEMOP_STUB(iemOp_psubb_Vx_W);
8993/* Opcode 0xf2 0x0f 0xf8 - invalid */
8994
8995/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8996FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8997/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
8998FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
8999/* Opcode 0xf2 0x0f 0xf9 - invalid */
9000
9001/** Opcode 0x0f 0xfa - psubd Pq, Qq */
9002FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
9003/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9004FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9005/* Opcode 0xf2 0x0f 0xfa - invalid */
9006
9007/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9008FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9009/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9010FNIEMOP_STUB(iemOp_psubq_Vx_W);
9011/* Opcode 0xf2 0x0f 0xfb - invalid */
9012
9013/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9014FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9015/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9016FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9017/* Opcode 0xf2 0x0f 0xfc - invalid */
9018
9019/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9020FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9021/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9022FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9023/* Opcode 0xf2 0x0f 0xfd - invalid */
9024
9025/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9026FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9027/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9028FNIEMOP_STUB(iemOp_paddd_Vx_W);
9029/* Opcode 0xf2 0x0f 0xfe - invalid */
9030
9031
9032/** Opcode **** 0x0f 0xff - UD0 */
9033FNIEMOP_DEF(iemOp_ud0)
9034{
9035 IEMOP_MNEMONIC(ud0, "ud0");
9036 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9037 {
9038 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9039#ifndef TST_IEM_CHECK_MC
9040 RTGCPTR GCPtrEff;
9041 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9042 if (rcStrict != VINF_SUCCESS)
9043 return rcStrict;
9044#endif
9045 IEMOP_HLP_DONE_DECODING();
9046 }
9047 return IEMOP_RAISE_INVALID_OPCODE();
9048}
9049
9050
9051
9052/**
9053 * Two byte opcode map, first byte 0x0f.
9054 *
9055 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9056 * check if it needs updating as well when making changes.
9057 */
9058IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9059{
9060 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9061 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9062 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9063 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9064 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9065 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9066 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9067 /* 0x06 */ IEMOP_X4(iemOp_clts),
9068 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9069 /* 0x08 */ IEMOP_X4(iemOp_invd),
9070 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9071 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9072 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9073 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9074 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9075 /* 0x0e */ IEMOP_X4(iemOp_femms),
9076 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9077
9078 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9079 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9080 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9081 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9082 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9083 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9084 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9085 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9086 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9087 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9088 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9089 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9090 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9091 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9092 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9093 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9094
9095 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9096 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9097 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9098 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9099 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9100 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9101 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9102 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9103 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9104 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9105 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9106 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9107 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9108 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9109 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9110 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9111
9112 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9113 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9114 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9115 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9116 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9117 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9118 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9119 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9120 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9121 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9122 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9123 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9124 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9125 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9126 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9127 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9128
9129 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9130 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9131 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9132 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9133 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9134 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9135 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9136 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9137 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9138 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9139 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9140 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9141 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9142 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9143 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9144 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9145
9146 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9147 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9148 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9149 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9150 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9151 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9152 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9153 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9154 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9155 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9156 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9157 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9158 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9159 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9160 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9161 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9162
9163 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9164 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9165 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9166 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9167 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9168 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9169 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9170 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9171 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9172 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9173 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9174 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9175 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9176 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9177 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9178 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vx_Wx, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM,
9179
9180 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9181 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9182 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9183 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9184 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9185 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9186 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9187 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9188
9189 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9190 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9191 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9192 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9193 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9194 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9195 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9196 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9197
9198 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9199 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9200 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9201 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9202 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9203 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9204 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9205 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9206 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9207 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9208 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9209 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9210 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9211 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9212 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9213 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9214
9215 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9216 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9217 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9218 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9219 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9220 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9221 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9222 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9223 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9224 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9225 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9226 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9227 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9228 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9229 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9230 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9231
9232 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9233 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9234 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9235 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9236 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9237 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9238 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9239 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9240 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9241 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9242 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9243 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9244 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9245 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9246 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9247 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9248
9249 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9250 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9251 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9252 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9253 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9254 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9255 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9256 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9257 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9258 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9259 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9260 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9261 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9262 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9263 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9264 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9265
9266 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9267 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9268 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9269 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9270 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9271 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9272 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9273 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9274 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9275 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9276 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9277 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9278 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9279 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9280 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9281 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9282
9283 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9284 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9285 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9286 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9287 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9288 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9289 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9290 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9291 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9292 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9293 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9294 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9295 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9296 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9297 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9298 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9299
9300 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9301 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9302 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9303 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9304 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9305 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9306 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9307 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9308 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9309 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9310 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9311 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9312 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9313 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9314 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9315 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9316
9317 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9318 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9319 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9320 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9321 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9322 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9323 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9324 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9325 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9326 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9327 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9328 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9329 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9330 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9331 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9332 /* 0xff */ IEMOP_X4(iemOp_ud0),
9333};
9334AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9335
9336/** @} */
9337
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette