VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 67004

Last change on this file since 67004 was 67004, checked in by vboxsync, 8 years ago

IEM: movq Vq,Eq & movd Vd,Ed docs+tests+fixes.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 326.9 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 67004 2017-05-22 10:20:28Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.215389.xyz. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
38 switch (pVCpu->iem.s.enmEffOpSize)
39 {
40 case IEMMODE_16BIT:
41 IEM_MC_BEGIN(0, 1);
42 IEM_MC_LOCAL(uint16_t, u16Ldtr);
43 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
44 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
45 IEM_MC_ADVANCE_RIP();
46 IEM_MC_END();
47 break;
48
49 case IEMMODE_32BIT:
50 IEM_MC_BEGIN(0, 1);
51 IEM_MC_LOCAL(uint32_t, u32Ldtr);
52 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
53 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
54 IEM_MC_ADVANCE_RIP();
55 IEM_MC_END();
56 break;
57
58 case IEMMODE_64BIT:
59 IEM_MC_BEGIN(0, 1);
60 IEM_MC_LOCAL(uint64_t, u64Ldtr);
61 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
62 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 break;
66
67 IEM_NOT_REACHED_DEFAULT_CASE_RET();
68 }
69 }
70 else
71 {
72 IEM_MC_BEGIN(0, 2);
73 IEM_MC_LOCAL(uint16_t, u16Ldtr);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
76 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
77 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
78 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
79 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/** Opcode 0x0f 0x00 /1. */
88FNIEMOPRM_DEF(iemOp_Grp6_str)
89{
90 IEMOP_MNEMONIC(str, "str Rv/Mw");
91 IEMOP_HLP_MIN_286();
92 IEMOP_HLP_NO_REAL_OR_V86_MODE();
93
94 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
95 {
96 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
97 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
98 switch (pVCpu->iem.s.enmEffOpSize)
99 {
100 case IEMMODE_16BIT:
101 IEM_MC_BEGIN(0, 1);
102 IEM_MC_LOCAL(uint16_t, u16Tr);
103 IEM_MC_FETCH_TR_U16(u16Tr);
104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
105 IEM_MC_ADVANCE_RIP();
106 IEM_MC_END();
107 break;
108
109 case IEMMODE_32BIT:
110 IEM_MC_BEGIN(0, 1);
111 IEM_MC_LOCAL(uint32_t, u32Tr);
112 IEM_MC_FETCH_TR_U32(u32Tr);
113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
114 IEM_MC_ADVANCE_RIP();
115 IEM_MC_END();
116 break;
117
118 case IEMMODE_64BIT:
119 IEM_MC_BEGIN(0, 1);
120 IEM_MC_LOCAL(uint64_t, u64Tr);
121 IEM_MC_FETCH_TR_U64(u64Tr);
122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 break;
126
127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
128 }
129 }
130 else
131 {
132 IEM_MC_BEGIN(0, 2);
133 IEM_MC_LOCAL(uint16_t, u16Tr);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
136 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
137 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
138 IEM_MC_FETCH_TR_U16(u16Tr);
139 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
140 IEM_MC_ADVANCE_RIP();
141 IEM_MC_END();
142 }
143 return VINF_SUCCESS;
144}
145
146
147/** Opcode 0x0f 0x00 /2. */
148FNIEMOPRM_DEF(iemOp_Grp6_lldt)
149{
150 IEMOP_MNEMONIC(lldt, "lldt Ew");
151 IEMOP_HLP_MIN_286();
152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
153
154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
155 {
156 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
157 IEM_MC_BEGIN(1, 0);
158 IEM_MC_ARG(uint16_t, u16Sel, 0);
159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
161 IEM_MC_END();
162 }
163 else
164 {
165 IEM_MC_BEGIN(1, 1);
166 IEM_MC_ARG(uint16_t, u16Sel, 0);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
169 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
170 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
171 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
172 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
173 IEM_MC_END();
174 }
175 return VINF_SUCCESS;
176}
177
178
179/** Opcode 0x0f 0x00 /3. */
180FNIEMOPRM_DEF(iemOp_Grp6_ltr)
181{
182 IEMOP_MNEMONIC(ltr, "ltr Ew");
183 IEMOP_HLP_MIN_286();
184 IEMOP_HLP_NO_REAL_OR_V86_MODE();
185
186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
187 {
188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
189 IEM_MC_BEGIN(1, 0);
190 IEM_MC_ARG(uint16_t, u16Sel, 0);
191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
192 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
193 IEM_MC_END();
194 }
195 else
196 {
197 IEM_MC_BEGIN(1, 1);
198 IEM_MC_ARG(uint16_t, u16Sel, 0);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
202 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
203 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
204 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
205 IEM_MC_END();
206 }
207 return VINF_SUCCESS;
208}
209
210
211/** Opcode 0x0f 0x00 /3. */
212FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
213{
214 IEMOP_HLP_MIN_286();
215 IEMOP_HLP_NO_REAL_OR_V86_MODE();
216
217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
218 {
219 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
220 IEM_MC_BEGIN(2, 0);
221 IEM_MC_ARG(uint16_t, u16Sel, 0);
222 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
223 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
224 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
225 IEM_MC_END();
226 }
227 else
228 {
229 IEM_MC_BEGIN(2, 1);
230 IEM_MC_ARG(uint16_t, u16Sel, 0);
231 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
234 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
235 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
236 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
237 IEM_MC_END();
238 }
239 return VINF_SUCCESS;
240}
241
242
243/** Opcode 0x0f 0x00 /4. */
244FNIEMOPRM_DEF(iemOp_Grp6_verr)
245{
246 IEMOP_MNEMONIC(verr, "verr Ew");
247 IEMOP_HLP_MIN_286();
248 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
249}
250
251
252/** Opcode 0x0f 0x00 /5. */
253FNIEMOPRM_DEF(iemOp_Grp6_verw)
254{
255 IEMOP_MNEMONIC(verw, "verw Ew");
256 IEMOP_HLP_MIN_286();
257 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
258}
259
260
261/**
262 * Group 6 jump table.
263 */
264IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
265{
266 iemOp_Grp6_sldt,
267 iemOp_Grp6_str,
268 iemOp_Grp6_lldt,
269 iemOp_Grp6_ltr,
270 iemOp_Grp6_verr,
271 iemOp_Grp6_verw,
272 iemOp_InvalidWithRM,
273 iemOp_InvalidWithRM
274};
275
276/** Opcode 0x0f 0x00. */
277FNIEMOP_DEF(iemOp_Grp6)
278{
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
280 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
281}
282
283
284/** Opcode 0x0f 0x01 /0. */
285FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
286{
287 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
288 IEMOP_HLP_MIN_286();
289 IEMOP_HLP_64BIT_OP_SIZE();
290 IEM_MC_BEGIN(2, 1);
291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
292 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
295 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
296 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
297 IEM_MC_END();
298 return VINF_SUCCESS;
299}
300
301
302/** Opcode 0x0f 0x01 /0. */
303FNIEMOP_DEF(iemOp_Grp7_vmcall)
304{
305 IEMOP_BITCH_ABOUT_STUB();
306 return IEMOP_RAISE_INVALID_OPCODE();
307}
308
309
310/** Opcode 0x0f 0x01 /0. */
311FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
312{
313 IEMOP_BITCH_ABOUT_STUB();
314 return IEMOP_RAISE_INVALID_OPCODE();
315}
316
317
318/** Opcode 0x0f 0x01 /0. */
319FNIEMOP_DEF(iemOp_Grp7_vmresume)
320{
321 IEMOP_BITCH_ABOUT_STUB();
322 return IEMOP_RAISE_INVALID_OPCODE();
323}
324
325
326/** Opcode 0x0f 0x01 /0. */
327FNIEMOP_DEF(iemOp_Grp7_vmxoff)
328{
329 IEMOP_BITCH_ABOUT_STUB();
330 return IEMOP_RAISE_INVALID_OPCODE();
331}
332
333
334/** Opcode 0x0f 0x01 /1. */
335FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
336{
337 IEMOP_MNEMONIC(sidt, "sidt Ms");
338 IEMOP_HLP_MIN_286();
339 IEMOP_HLP_64BIT_OP_SIZE();
340 IEM_MC_BEGIN(2, 1);
341 IEM_MC_ARG(uint8_t, iEffSeg, 0);
342 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
345 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
346 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
347 IEM_MC_END();
348 return VINF_SUCCESS;
349}
350
351
352/** Opcode 0x0f 0x01 /1. */
353FNIEMOP_DEF(iemOp_Grp7_monitor)
354{
355 IEMOP_MNEMONIC(monitor, "monitor");
356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
357 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
358}
359
360
361/** Opcode 0x0f 0x01 /1. */
362FNIEMOP_DEF(iemOp_Grp7_mwait)
363{
364 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
367}
368
369
370/** Opcode 0x0f 0x01 /2. */
371FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
372{
373 IEMOP_MNEMONIC(lgdt, "lgdt");
374 IEMOP_HLP_64BIT_OP_SIZE();
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd0. */
389FNIEMOP_DEF(iemOp_Grp7_xgetbv)
390{
391 IEMOP_MNEMONIC(xgetbv, "xgetbv");
392 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
393 {
394 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
396 }
397 return IEMOP_RAISE_INVALID_OPCODE();
398}
399
400
401/** Opcode 0x0f 0x01 0xd1. */
402FNIEMOP_DEF(iemOp_Grp7_xsetbv)
403{
404 IEMOP_MNEMONIC(xsetbv, "xsetbv");
405 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
406 {
407 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
408 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
409 }
410 return IEMOP_RAISE_INVALID_OPCODE();
411}
412
413
414/** Opcode 0x0f 0x01 /3. */
415FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
416{
417 IEMOP_MNEMONIC(lidt, "lidt");
418 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
419 ? IEMMODE_64BIT
420 : pVCpu->iem.s.enmEffOpSize;
421 IEM_MC_BEGIN(3, 1);
422 IEM_MC_ARG(uint8_t, iEffSeg, 0);
423 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
424 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
428 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
429 IEM_MC_END();
430 return VINF_SUCCESS;
431}
432
433
434#ifdef VBOX_WITH_NESTED_HWVIRT
435/** Opcode 0x0f 0x01 0xd8. */
436FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
437{
438 IEMOP_MNEMONIC(vmrun, "vmrun");
439 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
440}
441
442/** Opcode 0x0f 0x01 0xd9. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
444{
445 IEMOP_MNEMONIC(vmmcall, "vmmcall");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
447}
448
449
450/** Opcode 0x0f 0x01 0xda. */
451FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
452{
453 IEMOP_MNEMONIC(vmload, "vmload");
454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
455}
456
457
458/** Opcode 0x0f 0x01 0xdb. */
459FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
460{
461 IEMOP_MNEMONIC(vmsave, "vmsave");
462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
463}
464
465
466/** Opcode 0x0f 0x01 0xdc. */
467FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
468{
469 IEMOP_MNEMONIC(stgi, "stgi");
470 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
471}
472
473
474/** Opcode 0x0f 0x01 0xdd. */
475FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
476{
477 IEMOP_MNEMONIC(clgi, "clgi");
478 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
479}
480
481
482/** Opcode 0x0f 0x01 0xdf. */
483FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
484{
485 IEMOP_MNEMONIC(invlpga, "invlpga");
486 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
487}
488
489
490/** Opcode 0x0f 0x01 0xde. */
491FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
492{
493 IEMOP_MNEMONIC(skinit, "skinit");
494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
495}
496#else
497/** Opcode 0x0f 0x01 0xd8. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
499
500/** Opcode 0x0f 0x01 0xd9. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
502
503/** Opcode 0x0f 0x01 0xda. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
505
506/** Opcode 0x0f 0x01 0xdb. */
507FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
508
509/** Opcode 0x0f 0x01 0xdc. */
510FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
511
512/** Opcode 0x0f 0x01 0xdd. */
513FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
514
515/** Opcode 0x0f 0x01 0xdf. */
516FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520#endif /* VBOX_WITH_NESTED_HWVIRT */
521
522/** Opcode 0x0f 0x01 /4. */
523FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
524{
525 IEMOP_MNEMONIC(smsw, "smsw");
526 IEMOP_HLP_MIN_286();
527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
528 {
529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
530 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
531 switch (pVCpu->iem.s.enmEffOpSize)
532 {
533 case IEMMODE_16BIT:
534 IEM_MC_BEGIN(0, 1);
535 IEM_MC_LOCAL(uint16_t, u16Tmp);
536 IEM_MC_FETCH_CR0_U16(u16Tmp);
537 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
538 { /* likely */ }
539 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
541 else
542 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
543 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
544 IEM_MC_ADVANCE_RIP();
545 IEM_MC_END();
546 return VINF_SUCCESS;
547
548 case IEMMODE_32BIT:
549 IEM_MC_BEGIN(0, 1);
550 IEM_MC_LOCAL(uint32_t, u32Tmp);
551 IEM_MC_FETCH_CR0_U32(u32Tmp);
552 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 return VINF_SUCCESS;
556
557 case IEMMODE_64BIT:
558 IEM_MC_BEGIN(0, 1);
559 IEM_MC_LOCAL(uint64_t, u64Tmp);
560 IEM_MC_FETCH_CR0_U64(u64Tmp);
561 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
562 IEM_MC_ADVANCE_RIP();
563 IEM_MC_END();
564 return VINF_SUCCESS;
565
566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
567 }
568 }
569 else
570 {
571 /* Ignore operand size here, memory refs are always 16-bit. */
572 IEM_MC_BEGIN(0, 2);
573 IEM_MC_LOCAL(uint16_t, u16Tmp);
574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
578 IEM_MC_FETCH_CR0_U16(u16Tmp);
579 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
580 { /* likely */ }
581 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
582 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
583 else
584 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
585 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
586 IEM_MC_ADVANCE_RIP();
587 IEM_MC_END();
588 return VINF_SUCCESS;
589 }
590}
591
592
593/** Opcode 0x0f 0x01 /6. */
594FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
595{
596 /* The operand size is effectively ignored, all is 16-bit and only the
597 lower 3-bits are used. */
598 IEMOP_MNEMONIC(lmsw, "lmsw");
599 IEMOP_HLP_MIN_286();
600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
601 {
602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
603 IEM_MC_BEGIN(1, 0);
604 IEM_MC_ARG(uint16_t, u16Tmp, 0);
605 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
606 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
607 IEM_MC_END();
608 }
609 else
610 {
611 IEM_MC_BEGIN(1, 1);
612 IEM_MC_ARG(uint16_t, u16Tmp, 0);
613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
617 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
618 IEM_MC_END();
619 }
620 return VINF_SUCCESS;
621}
622
623
624/** Opcode 0x0f 0x01 /7. */
625FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
626{
627 IEMOP_MNEMONIC(invlpg, "invlpg");
628 IEMOP_HLP_MIN_486();
629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
630 IEM_MC_BEGIN(1, 1);
631 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
633 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
634 IEM_MC_END();
635 return VINF_SUCCESS;
636}
637
638
639/** Opcode 0x0f 0x01 /7. */
640FNIEMOP_DEF(iemOp_Grp7_swapgs)
641{
642 IEMOP_MNEMONIC(swapgs, "swapgs");
643 IEMOP_HLP_ONLY_64BIT();
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
646}
647
648
649/** Opcode 0x0f 0x01 /7. */
650FNIEMOP_DEF(iemOp_Grp7_rdtscp)
651{
652 IEMOP_MNEMONIC(rdtscp, "rdtscp");
653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
654 /** @todo SVM intercept removal from here. */
655 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RDTSCP, SVM_EXIT_RDTSCP, 0, 0);
656 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
657}
658
659
660/**
661 * Group 7 jump table, memory variant.
662 */
663IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
664{
665 iemOp_Grp7_sgdt,
666 iemOp_Grp7_sidt,
667 iemOp_Grp7_lgdt,
668 iemOp_Grp7_lidt,
669 iemOp_Grp7_smsw,
670 iemOp_InvalidWithRM,
671 iemOp_Grp7_lmsw,
672 iemOp_Grp7_invlpg
673};
674
675
676/** Opcode 0x0f 0x01. */
677FNIEMOP_DEF(iemOp_Grp7)
678{
679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
680 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
681 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
682
683 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
684 {
685 case 0:
686 switch (bRm & X86_MODRM_RM_MASK)
687 {
688 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
689 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
690 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
691 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 1:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
700 }
701 return IEMOP_RAISE_INVALID_OPCODE();
702
703 case 2:
704 switch (bRm & X86_MODRM_RM_MASK)
705 {
706 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
707 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
708 }
709 return IEMOP_RAISE_INVALID_OPCODE();
710
711 case 3:
712 switch (bRm & X86_MODRM_RM_MASK)
713 {
714 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
715 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
716 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
717 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
718 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
719 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
720 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
721 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
723 }
724
725 case 4:
726 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
727
728 case 5:
729 return IEMOP_RAISE_INVALID_OPCODE();
730
731 case 6:
732 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
733
734 case 7:
735 switch (bRm & X86_MODRM_RM_MASK)
736 {
737 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
738 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
739 }
740 return IEMOP_RAISE_INVALID_OPCODE();
741
742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
743 }
744}
745
746/** Opcode 0x0f 0x00 /3. */
747FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
748{
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
751
752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
753 {
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 switch (pVCpu->iem.s.enmEffOpSize)
756 {
757 case IEMMODE_16BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 case IEMMODE_32BIT:
773 case IEMMODE_64BIT:
774 {
775 IEM_MC_BEGIN(3, 0);
776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
777 IEM_MC_ARG(uint16_t, u16Sel, 1);
778 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
779
780 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
781 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
782 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
783
784 IEM_MC_END();
785 return VINF_SUCCESS;
786 }
787
788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
789 }
790 }
791 else
792 {
793 switch (pVCpu->iem.s.enmEffOpSize)
794 {
795 case IEMMODE_16BIT:
796 {
797 IEM_MC_BEGIN(3, 1);
798 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
799 IEM_MC_ARG(uint16_t, u16Sel, 1);
800 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
802
803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
804 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
805
806 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
807 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
808 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
809
810 IEM_MC_END();
811 return VINF_SUCCESS;
812 }
813
814 case IEMMODE_32BIT:
815 case IEMMODE_64BIT:
816 {
817 IEM_MC_BEGIN(3, 1);
818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 1);
820 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
822
823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
824 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
825/** @todo testcase: make sure it's a 16-bit read. */
826
827 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
828 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
829 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
830
831 IEM_MC_END();
832 return VINF_SUCCESS;
833 }
834
835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
836 }
837 }
838}
839
840
841
842/** Opcode 0x0f 0x02. */
843FNIEMOP_DEF(iemOp_lar_Gv_Ew)
844{
845 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
846 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
847}
848
849
850/** Opcode 0x0f 0x03. */
851FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
852{
853 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
854 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
855}
856
857
858/** Opcode 0x0f 0x05. */
859FNIEMOP_DEF(iemOp_syscall)
860{
861 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
863 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
864}
865
866
867/** Opcode 0x0f 0x06. */
868FNIEMOP_DEF(iemOp_clts)
869{
870 IEMOP_MNEMONIC(clts, "clts");
871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
872 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
873}
874
875
876/** Opcode 0x0f 0x07. */
877FNIEMOP_DEF(iemOp_sysret)
878{
879 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
881 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
882}
883
884
885/** Opcode 0x0f 0x08. */
886FNIEMOP_DEF(iemOp_invd)
887{
888 IEMOP_MNEMONIC(invd, "invd");
889#ifdef VBOX_WITH_NESTED_HWVIRT
890 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
891 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
892#else
893 RT_NOREF_PV(pVCpu);
894#endif
895 /** @todo implement invd for the regular case (above only handles nested SVM
896 * exits). */
897 IEMOP_BITCH_ABOUT_STUB();
898 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
899}
900
901// IEMOP_HLP_MIN_486();
902
903
904/** Opcode 0x0f 0x09. */
905FNIEMOP_DEF(iemOp_wbinvd)
906{
907 IEMOP_MNEMONIC(wbinvd, "wbinvd");
908 IEMOP_HLP_MIN_486();
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
910 IEM_MC_BEGIN(0, 0);
911 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
912 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
913 IEM_MC_ADVANCE_RIP();
914 IEM_MC_END();
915 return VINF_SUCCESS; /* ignore for now */
916}
917
918
919/** Opcode 0x0f 0x0b. */
920FNIEMOP_DEF(iemOp_ud2)
921{
922 IEMOP_MNEMONIC(ud2, "ud2");
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926/** Opcode 0x0f 0x0d. */
927FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
928{
929 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
931 {
932 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
933 return IEMOP_RAISE_INVALID_OPCODE();
934 }
935
936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
938 {
939 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
940 return IEMOP_RAISE_INVALID_OPCODE();
941 }
942
943 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
944 {
945 case 2: /* Aliased to /0 for the time being. */
946 case 4: /* Aliased to /0 for the time being. */
947 case 5: /* Aliased to /0 for the time being. */
948 case 6: /* Aliased to /0 for the time being. */
949 case 7: /* Aliased to /0 for the time being. */
950 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
951 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
952 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
954 }
955
956 IEM_MC_BEGIN(0, 1);
957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
960 /* Currently a NOP. */
961 NOREF(GCPtrEffSrc);
962 IEM_MC_ADVANCE_RIP();
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x0e. */
969FNIEMOP_STUB(iemOp_femms);
970
971
972/** Opcode 0x0f 0x0f. */
973FNIEMOP_DEF(iemOp_3Dnow)
974{
975 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
976 {
977 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
978 return IEMOP_RAISE_INVALID_OPCODE();
979 }
980
981#ifdef IEM_WITH_3DNOW
982 /* This is pretty sparse, use switch instead of table. */
983 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
984 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
985#else
986 IEMOP_BITCH_ABOUT_STUB();
987 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
988#endif
989}
990
991
992/**
993 * @opcode 0x10
994 * @oppfx none
995 * @opcpuid sse
996 * @opgroup og_sse_simdfp_datamove
997 * @opxcpttype 4UA
998 * @optest op1=1 op2=2 -> op1=2
999 * @optest op1=0 op2=-22 -> op1=-22
1000 */
1001FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1002{
1003 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1006 {
1007 /*
1008 * Register, register.
1009 */
1010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1011 IEM_MC_BEGIN(0, 0);
1012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1013 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1014 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1015 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1016 IEM_MC_ADVANCE_RIP();
1017 IEM_MC_END();
1018 }
1019 else
1020 {
1021 /*
1022 * Memory, register.
1023 */
1024 IEM_MC_BEGIN(0, 2);
1025 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1027
1028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1032
1033 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1034 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1035
1036 IEM_MC_ADVANCE_RIP();
1037 IEM_MC_END();
1038 }
1039 return VINF_SUCCESS;
1040
1041}
1042
1043
1044/**
1045 * @opcode 0x10
1046 * @oppfx 0x66
1047 * @opcpuid sse2
1048 * @opgroup og_sse2_pcksclr_datamove
1049 * @opxcpttype 4UA
1050 * @optest op1=1 op2=2 -> op1=2
1051 * @optest op1=0 op2=-42 -> op1=-42
1052 */
1053FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1054{
1055 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1058 {
1059 /*
1060 * Register, register.
1061 */
1062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1063 IEM_MC_BEGIN(0, 0);
1064 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1066 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1067 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1068 IEM_MC_ADVANCE_RIP();
1069 IEM_MC_END();
1070 }
1071 else
1072 {
1073 /*
1074 * Memory, register.
1075 */
1076 IEM_MC_BEGIN(0, 2);
1077 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1079
1080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1083 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1084
1085 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1086 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1087
1088 IEM_MC_ADVANCE_RIP();
1089 IEM_MC_END();
1090 }
1091 return VINF_SUCCESS;
1092}
1093
1094
1095/**
1096 * @opcode 0x10
1097 * @oppfx 0xf3
1098 * @opcpuid sse
1099 * @opgroup og_sse_simdfp_datamove
1100 * @opxcpttype 5
1101 * @optest op1=1 op2=2 -> op1=2
1102 * @optest op1=0 op2=-22 -> op1=-22
1103 */
1104FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1105{
1106 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1109 {
1110 /*
1111 * Register, register.
1112 */
1113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1114 IEM_MC_BEGIN(0, 1);
1115 IEM_MC_LOCAL(uint32_t, uSrc);
1116
1117 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1118 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1119 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1120 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 else
1126 {
1127 /*
1128 * Memory, register.
1129 */
1130 IEM_MC_BEGIN(0, 2);
1131 IEM_MC_LOCAL(uint32_t, uSrc);
1132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1133
1134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1136 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1138
1139 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1140 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1141
1142 IEM_MC_ADVANCE_RIP();
1143 IEM_MC_END();
1144 }
1145 return VINF_SUCCESS;
1146}
1147
1148
1149/**
1150 * @opcode 0x10
1151 * @oppfx 0xf2
1152 * @opcpuid sse2
1153 * @opgroup og_sse2_pcksclr_datamove
1154 * @opxcpttype 5
1155 * @optest op1=1 op2=2 -> op1=2
1156 * @optest op1=0 op2=-42 -> op1=-42
1157 */
1158FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1159{
1160 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1163 {
1164 /*
1165 * Register, register.
1166 */
1167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1168 IEM_MC_BEGIN(0, 1);
1169 IEM_MC_LOCAL(uint64_t, uSrc);
1170
1171 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1172 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1173 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1174 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1175
1176 IEM_MC_ADVANCE_RIP();
1177 IEM_MC_END();
1178 }
1179 else
1180 {
1181 /*
1182 * Memory, register.
1183 */
1184 IEM_MC_BEGIN(0, 2);
1185 IEM_MC_LOCAL(uint64_t, uSrc);
1186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1187
1188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1190 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1191 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1192
1193 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1194 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1195
1196 IEM_MC_ADVANCE_RIP();
1197 IEM_MC_END();
1198 }
1199 return VINF_SUCCESS;
1200}
1201
1202
1203/**
1204 * @opcode 0x11
1205 * @oppfx none
1206 * @opcpuid sse
1207 * @opgroup og_sse_simdfp_datamove
1208 * @opxcpttype 4UA
1209 * @optest op1=1 op2=2 -> op1=2
1210 * @optest op1=0 op2=-42 -> op1=-42
1211 */
1212FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1213{
1214 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1217 {
1218 /*
1219 * Register, register.
1220 */
1221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1222 IEM_MC_BEGIN(0, 0);
1223 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1224 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1225 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1226 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1227 IEM_MC_ADVANCE_RIP();
1228 IEM_MC_END();
1229 }
1230 else
1231 {
1232 /*
1233 * Memory, register.
1234 */
1235 IEM_MC_BEGIN(0, 2);
1236 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1238
1239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1243
1244 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1245 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1246
1247 IEM_MC_ADVANCE_RIP();
1248 IEM_MC_END();
1249 }
1250 return VINF_SUCCESS;
1251}
1252
1253
1254/**
1255 * @opcode 0x11
1256 * @oppfx 0x66
1257 * @opcpuid sse2
1258 * @opgroup og_sse2_pcksclr_datamove
1259 * @opxcpttype 4UA
1260 * @optest op1=1 op2=2 -> op1=2
1261 * @optest op1=0 op2=-42 -> op1=-42
1262 */
1263FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1264{
1265 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1268 {
1269 /*
1270 * Register, register.
1271 */
1272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1273 IEM_MC_BEGIN(0, 0);
1274 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1275 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1276 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1277 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1278 IEM_MC_ADVANCE_RIP();
1279 IEM_MC_END();
1280 }
1281 else
1282 {
1283 /*
1284 * Memory, register.
1285 */
1286 IEM_MC_BEGIN(0, 2);
1287 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1289
1290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1292 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1294
1295 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1296 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1297
1298 IEM_MC_ADVANCE_RIP();
1299 IEM_MC_END();
1300 }
1301 return VINF_SUCCESS;
1302}
1303
1304
1305/**
1306 * @opcode 0x11
1307 * @oppfx 0xf3
1308 * @opcpuid sse
1309 * @opgroup og_sse_simdfp_datamove
1310 * @opxcpttype 5
1311 * @optest op1=1 op2=2 -> op1=2
1312 * @optest op1=0 op2=-22 -> op1=-22
1313 */
1314FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1315{
1316 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1319 {
1320 /*
1321 * Register, register.
1322 */
1323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1324 IEM_MC_BEGIN(0, 1);
1325 IEM_MC_LOCAL(uint32_t, uSrc);
1326
1327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1328 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1329 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1330 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1331
1332 IEM_MC_ADVANCE_RIP();
1333 IEM_MC_END();
1334 }
1335 else
1336 {
1337 /*
1338 * Memory, register.
1339 */
1340 IEM_MC_BEGIN(0, 2);
1341 IEM_MC_LOCAL(uint32_t, uSrc);
1342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1343
1344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1346 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1348
1349 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1350 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1351
1352 IEM_MC_ADVANCE_RIP();
1353 IEM_MC_END();
1354 }
1355 return VINF_SUCCESS;
1356}
1357
1358
1359/**
1360 * @opcode 0x11
1361 * @oppfx 0xf2
1362 * @opcpuid sse2
1363 * @opgroup og_sse2_pcksclr_datamove
1364 * @opxcpttype 5
1365 * @optest op1=1 op2=2 -> op1=2
1366 * @optest op1=0 op2=-42 -> op1=-42
1367 */
1368FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1369{
1370 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1373 {
1374 /*
1375 * Register, register.
1376 */
1377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1378 IEM_MC_BEGIN(0, 1);
1379 IEM_MC_LOCAL(uint64_t, uSrc);
1380
1381 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1382 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1383 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1384 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1385
1386 IEM_MC_ADVANCE_RIP();
1387 IEM_MC_END();
1388 }
1389 else
1390 {
1391 /*
1392 * Memory, register.
1393 */
1394 IEM_MC_BEGIN(0, 2);
1395 IEM_MC_LOCAL(uint64_t, uSrc);
1396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1397
1398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1400 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1402
1403 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1404 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1405
1406 IEM_MC_ADVANCE_RIP();
1407 IEM_MC_END();
1408 }
1409 return VINF_SUCCESS;
1410}
1411
1412
1413FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1414{
1415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1416 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1417 {
1418 /**
1419 * @opcode 0x12
1420 * @opcodesub 11 mr/reg
1421 * @oppfx none
1422 * @opcpuid sse
1423 * @opgroup og_sse_simdfp_datamove
1424 * @opxcpttype 5
1425 * @optest op1=1 op2=2 -> op1=2
1426 * @optest op1=0 op2=-42 -> op1=-42
1427 */
1428 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1429
1430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1431 IEM_MC_BEGIN(0, 1);
1432 IEM_MC_LOCAL(uint64_t, uSrc);
1433
1434 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1436 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1437 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1438
1439 IEM_MC_ADVANCE_RIP();
1440 IEM_MC_END();
1441 }
1442 else
1443 {
1444 /**
1445 * @opdone
1446 * @opcode 0x12
1447 * @opcodesub !11 mr/reg
1448 * @oppfx none
1449 * @opcpuid sse
1450 * @opgroup og_sse_simdfp_datamove
1451 * @opxcpttype 5
1452 * @optest op1=1 op2=2 -> op1=2
1453 * @optest op1=0 op2=-42 -> op1=-42
1454 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1455 */
1456 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1457
1458 IEM_MC_BEGIN(0, 2);
1459 IEM_MC_LOCAL(uint64_t, uSrc);
1460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1461
1462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1466
1467 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1468 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1469
1470 IEM_MC_ADVANCE_RIP();
1471 IEM_MC_END();
1472 }
1473 return VINF_SUCCESS;
1474}
1475
1476
1477/**
1478 * @opcode 0x12
1479 * @opcodesub !11 mr/reg
1480 * @oppfx 0x66
1481 * @opcpuid sse2
1482 * @opgroup og_sse2_pcksclr_datamove
1483 * @opxcpttype 5
1484 * @optest op1=1 op2=2 -> op1=2
1485 * @optest op1=0 op2=-42 -> op1=-42
1486 */
1487FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1488{
1489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1490 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1491 {
1492 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1493
1494 IEM_MC_BEGIN(0, 2);
1495 IEM_MC_LOCAL(uint64_t, uSrc);
1496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1497
1498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1502
1503 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1504 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1505
1506 IEM_MC_ADVANCE_RIP();
1507 IEM_MC_END();
1508 return VINF_SUCCESS;
1509 }
1510
1511 /**
1512 * @opdone
1513 * @opmnemonic ud660f12m3
1514 * @opcode 0x12
1515 * @opcodesub 11 mr/reg
1516 * @oppfx 0x66
1517 * @opunused immediate
1518 * @opcpuid sse
1519 * @optest ->
1520 */
1521 return IEMOP_RAISE_INVALID_OPCODE();
1522}
1523
1524
1525/**
1526 * @opcode 0x12
1527 * @oppfx 0xf3
1528 * @opcpuid sse3
1529 * @opgroup og_sse3_pcksclr_datamove
1530 * @opxcpttype 4
1531 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1532 * op1=0x00000002000000020000000100000001
1533 */
1534FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1535{
1536 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1538 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1539 {
1540 /*
1541 * Register, register.
1542 */
1543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1544 IEM_MC_BEGIN(2, 0);
1545 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1546 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1547
1548 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1549 IEM_MC_PREPARE_SSE_USAGE();
1550
1551 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1552 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1553 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1554
1555 IEM_MC_ADVANCE_RIP();
1556 IEM_MC_END();
1557 }
1558 else
1559 {
1560 /*
1561 * Register, memory.
1562 */
1563 IEM_MC_BEGIN(2, 2);
1564 IEM_MC_LOCAL(RTUINT128U, uSrc);
1565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1566 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1567 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1568
1569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1571 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1572 IEM_MC_PREPARE_SSE_USAGE();
1573
1574 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1575 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1576 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1577
1578 IEM_MC_ADVANCE_RIP();
1579 IEM_MC_END();
1580 }
1581 return VINF_SUCCESS;
1582}
1583
1584
1585/**
1586 * @opcode 0x12
1587 * @oppfx 0xf2
1588 * @opcpuid sse3
1589 * @opgroup og_sse3_pcksclr_datamove
1590 * @opxcpttype 5
1591 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1592 * op1=0x22222222111111112222222211111111
1593 */
1594FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1595{
1596 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1599 {
1600 /*
1601 * Register, register.
1602 */
1603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1604 IEM_MC_BEGIN(2, 0);
1605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1606 IEM_MC_ARG(uint64_t, uSrc, 1);
1607
1608 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1609 IEM_MC_PREPARE_SSE_USAGE();
1610
1611 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1612 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1613 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1614
1615 IEM_MC_ADVANCE_RIP();
1616 IEM_MC_END();
1617 }
1618 else
1619 {
1620 /*
1621 * Register, memory.
1622 */
1623 IEM_MC_BEGIN(2, 2);
1624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1625 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1626 IEM_MC_ARG(uint64_t, uSrc, 1);
1627
1628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1630 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1631 IEM_MC_PREPARE_SSE_USAGE();
1632
1633 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1634 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1635 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1636
1637 IEM_MC_ADVANCE_RIP();
1638 IEM_MC_END();
1639 }
1640 return VINF_SUCCESS;
1641}
1642
1643
1644/**
1645 * @opcode 0x13
1646 * @opcodesub !11 mr/reg
1647 * @oppfx none
1648 * @opcpuid sse
1649 * @opgroup og_sse_simdfp_datamove
1650 * @opxcpttype 5
1651 * @optest op1=1 op2=2 -> op1=2
1652 * @optest op1=0 op2=-42 -> op1=-42
1653 */
1654FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1655{
1656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1657 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1658 {
1659 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1660
1661 IEM_MC_BEGIN(0, 2);
1662 IEM_MC_LOCAL(uint64_t, uSrc);
1663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1664
1665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1667 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1668 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1669
1670 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1671 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1672
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 return VINF_SUCCESS;
1676 }
1677
1678 /**
1679 * @opdone
1680 * @opmnemonic ud0f13m3
1681 * @opcode 0x13
1682 * @opcodesub 11 mr/reg
1683 * @oppfx none
1684 * @opunused immediate
1685 * @opcpuid sse
1686 * @optest ->
1687 */
1688 return IEMOP_RAISE_INVALID_OPCODE();
1689}
1690
1691
1692/**
1693 * @opcode 0x13
1694 * @opcodesub !11 mr/reg
1695 * @oppfx 0x66
1696 * @opcpuid sse2
1697 * @opgroup og_sse2_pcksclr_datamove
1698 * @opxcpttype 5
1699 * @optest op1=1 op2=2 -> op1=2
1700 * @optest op1=0 op2=-42 -> op1=-42
1701 */
1702FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1703{
1704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1705 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1706 {
1707 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1708 IEM_MC_BEGIN(0, 2);
1709 IEM_MC_LOCAL(uint64_t, uSrc);
1710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1711
1712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1714 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1715 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1716
1717 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1718 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1719
1720 IEM_MC_ADVANCE_RIP();
1721 IEM_MC_END();
1722 return VINF_SUCCESS;
1723 }
1724
1725 /**
1726 * @opdone
1727 * @opmnemonic ud660f13m3
1728 * @opcode 0x13
1729 * @opcodesub 11 mr/reg
1730 * @oppfx 0x66
1731 * @opunused immediate
1732 * @opcpuid sse
1733 * @optest ->
1734 */
1735 return IEMOP_RAISE_INVALID_OPCODE();
1736}
1737
1738
1739/**
1740 * @opmnemonic udf30f13
1741 * @opcode 0x13
1742 * @oppfx 0xf3
1743 * @opunused intel-modrm
1744 * @opcpuid sse
1745 * @optest ->
1746 * @opdone
1747 */
1748
1749/**
1750 * @opmnemonic udf20f13
1751 * @opcode 0x13
1752 * @oppfx 0xf2
1753 * @opunused intel-modrm
1754 * @opcpuid sse
1755 * @optest ->
1756 * @opdone
1757 */
1758
1759/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1760FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1761/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1762FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1763
1764/**
1765 * @opdone
1766 * @opmnemonic udf30f14
1767 * @opcode 0x14
1768 * @oppfx 0xf3
1769 * @opunused intel-modrm
1770 * @opcpuid sse
1771 * @optest ->
1772 * @opdone
1773 */
1774
1775/**
1776 * @opmnemonic udf20f14
1777 * @opcode 0x14
1778 * @oppfx 0xf2
1779 * @opunused intel-modrm
1780 * @opcpuid sse
1781 * @optest ->
1782 * @opdone
1783 */
1784
1785/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1786FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1787/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1788FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1789/* Opcode 0xf3 0x0f 0x15 - invalid */
1790/* Opcode 0xf2 0x0f 0x15 - invalid */
1791
1792/**
1793 * @opdone
1794 * @opmnemonic udf30f15
1795 * @opcode 0x15
1796 * @oppfx 0xf3
1797 * @opunused intel-modrm
1798 * @opcpuid sse
1799 * @optest ->
1800 * @opdone
1801 */
1802
1803/**
1804 * @opmnemonic udf20f15
1805 * @opcode 0x15
1806 * @oppfx 0xf2
1807 * @opunused intel-modrm
1808 * @opcpuid sse
1809 * @optest ->
1810 * @opdone
1811 */
1812
1813FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1814{
1815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1817 {
1818 /**
1819 * @opcode 0x16
1820 * @opcodesub 11 mr/reg
1821 * @oppfx none
1822 * @opcpuid sse
1823 * @opgroup og_sse_simdfp_datamove
1824 * @opxcpttype 5
1825 * @optest op1=1 op2=2 -> op1=2
1826 * @optest op1=0 op2=-42 -> op1=-42
1827 */
1828 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1829
1830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1831 IEM_MC_BEGIN(0, 1);
1832 IEM_MC_LOCAL(uint64_t, uSrc);
1833
1834 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1835 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1836 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1837 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1838
1839 IEM_MC_ADVANCE_RIP();
1840 IEM_MC_END();
1841 }
1842 else
1843 {
1844 /**
1845 * @opdone
1846 * @opcode 0x16
1847 * @opcodesub !11 mr/reg
1848 * @oppfx none
1849 * @opcpuid sse
1850 * @opgroup og_sse_simdfp_datamove
1851 * @opxcpttype 5
1852 * @optest op1=1 op2=2 -> op1=2
1853 * @optest op1=0 op2=-42 -> op1=-42
1854 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1855 */
1856 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1857
1858 IEM_MC_BEGIN(0, 2);
1859 IEM_MC_LOCAL(uint64_t, uSrc);
1860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1861
1862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1866
1867 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1868 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1869
1870 IEM_MC_ADVANCE_RIP();
1871 IEM_MC_END();
1872 }
1873 return VINF_SUCCESS;
1874}
1875
1876
1877/**
1878 * @opcode 0x16
1879 * @opcodesub !11 mr/reg
1880 * @oppfx 0x66
1881 * @opcpuid sse2
1882 * @opgroup og_sse2_pcksclr_datamove
1883 * @opxcpttype 5
1884 * @optest op1=1 op2=2 -> op1=2
1885 * @optest op1=0 op2=-42 -> op1=-42
1886 */
1887FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1888{
1889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1890 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1891 {
1892 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1893 IEM_MC_BEGIN(0, 2);
1894 IEM_MC_LOCAL(uint64_t, uSrc);
1895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1896
1897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1899 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1900 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1901
1902 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1903 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1904
1905 IEM_MC_ADVANCE_RIP();
1906 IEM_MC_END();
1907 return VINF_SUCCESS;
1908 }
1909
1910 /**
1911 * @opdone
1912 * @opmnemonic ud660f16m3
1913 * @opcode 0x16
1914 * @opcodesub 11 mr/reg
1915 * @oppfx 0x66
1916 * @opunused immediate
1917 * @opcpuid sse
1918 * @optest ->
1919 */
1920 return IEMOP_RAISE_INVALID_OPCODE();
1921}
1922
1923
1924/**
1925 * @opcode 0x16
1926 * @oppfx 0xf3
1927 * @opcpuid sse3
1928 * @opgroup og_sse3_pcksclr_datamove
1929 * @opxcpttype 4
1930 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1931 * op1=0x00000002000000020000000100000001
1932 */
1933FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1934{
1935 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1938 {
1939 /*
1940 * Register, register.
1941 */
1942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1943 IEM_MC_BEGIN(2, 0);
1944 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1945 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1946
1947 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1948 IEM_MC_PREPARE_SSE_USAGE();
1949
1950 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1951 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1952 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1953
1954 IEM_MC_ADVANCE_RIP();
1955 IEM_MC_END();
1956 }
1957 else
1958 {
1959 /*
1960 * Register, memory.
1961 */
1962 IEM_MC_BEGIN(2, 2);
1963 IEM_MC_LOCAL(RTUINT128U, uSrc);
1964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1965 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1966 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1967
1968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1970 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1971 IEM_MC_PREPARE_SSE_USAGE();
1972
1973 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1974 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1975 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1976
1977 IEM_MC_ADVANCE_RIP();
1978 IEM_MC_END();
1979 }
1980 return VINF_SUCCESS;
1981}
1982
1983/**
1984 * @opdone
1985 * @opmnemonic udf30f16
1986 * @opcode 0x16
1987 * @oppfx 0xf2
1988 * @opunused intel-modrm
1989 * @opcpuid sse
1990 * @optest ->
1991 * @opdone
1992 */
1993
1994
1995/**
1996 * @opcode 0x17
1997 * @opcodesub !11 mr/reg
1998 * @oppfx none
1999 * @opcpuid sse
2000 * @opgroup og_sse_simdfp_datamove
2001 * @opxcpttype 5
2002 * @optest op1=1 op2=2 -> op1=2
2003 * @optest op1=0 op2=-42 -> op1=-42
2004 */
2005FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2006{
2007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2008 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2009 {
2010 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2011
2012 IEM_MC_BEGIN(0, 2);
2013 IEM_MC_LOCAL(uint64_t, uSrc);
2014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2015
2016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2018 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2019 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2020
2021 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2022 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2023
2024 IEM_MC_ADVANCE_RIP();
2025 IEM_MC_END();
2026 return VINF_SUCCESS;
2027 }
2028
2029 /**
2030 * @opdone
2031 * @opmnemonic ud0f17m3
2032 * @opcode 0x17
2033 * @opcodesub 11 mr/reg
2034 * @oppfx none
2035 * @opunused immediate
2036 * @opcpuid sse
2037 * @optest ->
2038 */
2039 return IEMOP_RAISE_INVALID_OPCODE();
2040}
2041
2042
2043/**
2044 * @opcode 0x17
2045 * @opcodesub !11 mr/reg
2046 * @oppfx 0x66
2047 * @opcpuid sse2
2048 * @opgroup og_sse2_pcksclr_datamove
2049 * @opxcpttype 5
2050 * @optest op1=1 op2=2 -> op1=2
2051 * @optest op1=0 op2=-42 -> op1=-42
2052 */
2053FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
2054{
2055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2056 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2057 {
2058 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2059
2060 IEM_MC_BEGIN(0, 2);
2061 IEM_MC_LOCAL(uint64_t, uSrc);
2062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2063
2064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2066 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2067 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2068
2069 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2070 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2071
2072 IEM_MC_ADVANCE_RIP();
2073 IEM_MC_END();
2074 return VINF_SUCCESS;
2075 }
2076
2077 /**
2078 * @opdone
2079 * @opmnemonic ud660f17m3
2080 * @opcode 0x17
2081 * @opcodesub 11 mr/reg
2082 * @oppfx 0x66
2083 * @opunused immediate
2084 * @opcpuid sse
2085 * @optest ->
2086 */
2087 return IEMOP_RAISE_INVALID_OPCODE();
2088}
2089
2090
2091/**
2092 * @opdone
2093 * @opmnemonic udf30f17
2094 * @opcode 0x17
2095 * @oppfx 0xf3
2096 * @opunused intel-modrm
2097 * @opcpuid sse
2098 * @optest ->
2099 * @opdone
2100 */
2101
2102/**
2103 * @opmnemonic udf20f17
2104 * @opcode 0x17
2105 * @oppfx 0xf2
2106 * @opunused intel-modrm
2107 * @opcpuid sse
2108 * @optest ->
2109 * @opdone
2110 */
2111
2112
2113/** Opcode 0x0f 0x18. */
2114FNIEMOP_DEF(iemOp_prefetch_Grp16)
2115{
2116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2117 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2118 {
2119 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2120 {
2121 case 4: /* Aliased to /0 for the time being according to AMD. */
2122 case 5: /* Aliased to /0 for the time being according to AMD. */
2123 case 6: /* Aliased to /0 for the time being according to AMD. */
2124 case 7: /* Aliased to /0 for the time being according to AMD. */
2125 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2126 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2127 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2128 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2130 }
2131
2132 IEM_MC_BEGIN(0, 1);
2133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2136 /* Currently a NOP. */
2137 NOREF(GCPtrEffSrc);
2138 IEM_MC_ADVANCE_RIP();
2139 IEM_MC_END();
2140 return VINF_SUCCESS;
2141 }
2142
2143 return IEMOP_RAISE_INVALID_OPCODE();
2144}
2145
2146
2147/** Opcode 0x0f 0x19..0x1f. */
2148FNIEMOP_DEF(iemOp_nop_Ev)
2149{
2150 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2151 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2152 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2153 {
2154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2155 IEM_MC_BEGIN(0, 0);
2156 IEM_MC_ADVANCE_RIP();
2157 IEM_MC_END();
2158 }
2159 else
2160 {
2161 IEM_MC_BEGIN(0, 1);
2162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2165 /* Currently a NOP. */
2166 NOREF(GCPtrEffSrc);
2167 IEM_MC_ADVANCE_RIP();
2168 IEM_MC_END();
2169 }
2170 return VINF_SUCCESS;
2171}
2172
2173
2174/** Opcode 0x0f 0x20. */
2175FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2176{
2177 /* mod is ignored, as is operand size overrides. */
2178 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2179 IEMOP_HLP_MIN_386();
2180 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2181 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2182 else
2183 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2184
2185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2186 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2187 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2188 {
2189 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2190 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2191 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2192 iCrReg |= 8;
2193 }
2194 switch (iCrReg)
2195 {
2196 case 0: case 2: case 3: case 4: case 8:
2197 break;
2198 default:
2199 return IEMOP_RAISE_INVALID_OPCODE();
2200 }
2201 IEMOP_HLP_DONE_DECODING();
2202
2203 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2204}
2205
2206
2207/** Opcode 0x0f 0x21. */
2208FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2209{
2210 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2211 IEMOP_HLP_MIN_386();
2212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2214 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2215 return IEMOP_RAISE_INVALID_OPCODE();
2216 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2217 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2218 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2219}
2220
2221
2222/** Opcode 0x0f 0x22. */
2223FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2224{
2225 /* mod is ignored, as is operand size overrides. */
2226 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2227 IEMOP_HLP_MIN_386();
2228 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2229 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2230 else
2231 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2232
2233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2234 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2235 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2236 {
2237 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2238 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2239 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2240 iCrReg |= 8;
2241 }
2242 switch (iCrReg)
2243 {
2244 case 0: case 2: case 3: case 4: case 8:
2245 break;
2246 default:
2247 return IEMOP_RAISE_INVALID_OPCODE();
2248 }
2249 IEMOP_HLP_DONE_DECODING();
2250
2251 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2252}
2253
2254
2255/** Opcode 0x0f 0x23. */
2256FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2257{
2258 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2259 IEMOP_HLP_MIN_386();
2260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2262 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2263 return IEMOP_RAISE_INVALID_OPCODE();
2264 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2265 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2266 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2267}
2268
2269
2270/** Opcode 0x0f 0x24. */
2271FNIEMOP_DEF(iemOp_mov_Rd_Td)
2272{
2273 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2274 /** @todo works on 386 and 486. */
2275 /* The RM byte is not considered, see testcase. */
2276 return IEMOP_RAISE_INVALID_OPCODE();
2277}
2278
2279
2280/** Opcode 0x0f 0x26. */
2281FNIEMOP_DEF(iemOp_mov_Td_Rd)
2282{
2283 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2284 /** @todo works on 386 and 486. */
2285 /* The RM byte is not considered, see testcase. */
2286 return IEMOP_RAISE_INVALID_OPCODE();
2287}
2288
2289
2290/**
2291 * @opcode 0x28
2292 * @oppfx none
2293 * @opcpuid sse
2294 * @opgroup og_sse_simdfp_datamove
2295 * @opxcpttype 1
2296 * @optest op1=1 op2=2 -> op1=2
2297 * @optest op1=0 op2=-42 -> op1=-42
2298 */
2299FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2300{
2301 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2303 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2304 {
2305 /*
2306 * Register, register.
2307 */
2308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2309 IEM_MC_BEGIN(0, 0);
2310 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2312 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2313 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2314 IEM_MC_ADVANCE_RIP();
2315 IEM_MC_END();
2316 }
2317 else
2318 {
2319 /*
2320 * Register, memory.
2321 */
2322 IEM_MC_BEGIN(0, 2);
2323 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2325
2326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2328 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2329 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2330
2331 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2332 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2333
2334 IEM_MC_ADVANCE_RIP();
2335 IEM_MC_END();
2336 }
2337 return VINF_SUCCESS;
2338}
2339
2340/**
2341 * @opcode 0x28
2342 * @oppfx 66
2343 * @opcpuid sse2
2344 * @opgroup og_sse2_pcksclr_datamove
2345 * @opxcpttype 1
2346 * @optest op1=1 op2=2 -> op1=2
2347 * @optest op1=0 op2=-42 -> op1=-42
2348 */
2349FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2350{
2351 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2352 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2353 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2354 {
2355 /*
2356 * Register, register.
2357 */
2358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2359 IEM_MC_BEGIN(0, 0);
2360 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2361 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2362 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2363 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2364 IEM_MC_ADVANCE_RIP();
2365 IEM_MC_END();
2366 }
2367 else
2368 {
2369 /*
2370 * Register, memory.
2371 */
2372 IEM_MC_BEGIN(0, 2);
2373 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2375
2376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2378 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2379 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2380
2381 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2382 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2383
2384 IEM_MC_ADVANCE_RIP();
2385 IEM_MC_END();
2386 }
2387 return VINF_SUCCESS;
2388}
2389
2390/* Opcode 0xf3 0x0f 0x28 - invalid */
2391/* Opcode 0xf2 0x0f 0x28 - invalid */
2392
2393/**
2394 * @opcode 0x29
2395 * @oppfx none
2396 * @opcpuid sse
2397 * @opgroup og_sse_simdfp_datamove
2398 * @opxcpttype 1
2399 * @optest op1=1 op2=2 -> op1=2
2400 * @optest op1=0 op2=-42 -> op1=-42
2401 */
2402FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2403{
2404 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2406 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2407 {
2408 /*
2409 * Register, register.
2410 */
2411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2412 IEM_MC_BEGIN(0, 0);
2413 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2414 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2415 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2416 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2417 IEM_MC_ADVANCE_RIP();
2418 IEM_MC_END();
2419 }
2420 else
2421 {
2422 /*
2423 * Memory, register.
2424 */
2425 IEM_MC_BEGIN(0, 2);
2426 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2428
2429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2431 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2432 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2433
2434 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2435 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2436
2437 IEM_MC_ADVANCE_RIP();
2438 IEM_MC_END();
2439 }
2440 return VINF_SUCCESS;
2441}
2442
2443/**
2444 * @opcode 0x29
2445 * @oppfx 66
2446 * @opcpuid sse2
2447 * @opgroup og_sse2_pcksclr_datamove
2448 * @opxcpttype 1
2449 * @optest op1=1 op2=2 -> op1=2
2450 * @optest op1=0 op2=-42 -> op1=-42
2451 */
2452FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2453{
2454 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2456 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2457 {
2458 /*
2459 * Register, register.
2460 */
2461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2462 IEM_MC_BEGIN(0, 0);
2463 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2464 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2465 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2466 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2467 IEM_MC_ADVANCE_RIP();
2468 IEM_MC_END();
2469 }
2470 else
2471 {
2472 /*
2473 * Memory, register.
2474 */
2475 IEM_MC_BEGIN(0, 2);
2476 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2478
2479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2481 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2483
2484 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2485 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2486
2487 IEM_MC_ADVANCE_RIP();
2488 IEM_MC_END();
2489 }
2490 return VINF_SUCCESS;
2491}
2492
2493/* Opcode 0xf3 0x0f 0x29 - invalid */
2494/* Opcode 0xf2 0x0f 0x29 - invalid */
2495
2496
2497/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2498FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2499/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2500FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2501/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2502FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2503/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2504FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2505
2506
2507/**
2508 * @opcode 0x2b
2509 * @opcodesub !11 mr/reg
2510 * @oppfx none
2511 * @opcpuid sse
2512 * @opgroup og_sse1_cachect
2513 * @opxcpttype 1
2514 * @optest op1=1 op2=2 -> op1=2
2515 * @optest op1=0 op2=-42 -> op1=-42
2516 */
2517FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2518{
2519 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2521 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2522 {
2523 /*
2524 * memory, register.
2525 */
2526 IEM_MC_BEGIN(0, 2);
2527 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2529
2530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2532 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2533 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2534
2535 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2536 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2537
2538 IEM_MC_ADVANCE_RIP();
2539 IEM_MC_END();
2540 }
2541 /* The register, register encoding is invalid. */
2542 else
2543 return IEMOP_RAISE_INVALID_OPCODE();
2544 return VINF_SUCCESS;
2545}
2546
2547/**
2548 * @opcode 0x2b
2549 * @opcodesub !11 mr/reg
2550 * @oppfx 0x66
2551 * @opcpuid sse2
2552 * @opgroup og_sse2_cachect
2553 * @opxcpttype 1
2554 * @optest op1=1 op2=2 -> op1=2
2555 * @optest op1=0 op2=-42 -> op1=-42
2556 */
2557FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2558{
2559 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2561 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2562 {
2563 /*
2564 * memory, register.
2565 */
2566 IEM_MC_BEGIN(0, 2);
2567 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2569
2570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2572 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2573 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2574
2575 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2576 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2577
2578 IEM_MC_ADVANCE_RIP();
2579 IEM_MC_END();
2580 }
2581 /* The register, register encoding is invalid. */
2582 else
2583 return IEMOP_RAISE_INVALID_OPCODE();
2584 return VINF_SUCCESS;
2585}
2586/* Opcode 0xf3 0x0f 0x2b - invalid */
2587/* Opcode 0xf2 0x0f 0x2b - invalid */
2588
2589
2590/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2591FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2592/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2593FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2594/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2595FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2596/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2597FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2598
2599/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2600FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2601/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2602FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2603/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2604FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2605/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2606FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2607
2608/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2609FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2610/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2611FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2612/* Opcode 0xf3 0x0f 0x2e - invalid */
2613/* Opcode 0xf2 0x0f 0x2e - invalid */
2614
2615/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2616FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2617/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2618FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2619/* Opcode 0xf3 0x0f 0x2f - invalid */
2620/* Opcode 0xf2 0x0f 0x2f - invalid */
2621
2622/** Opcode 0x0f 0x30. */
2623FNIEMOP_DEF(iemOp_wrmsr)
2624{
2625 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2627 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2628}
2629
2630
2631/** Opcode 0x0f 0x31. */
2632FNIEMOP_DEF(iemOp_rdtsc)
2633{
2634 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2636 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2637}
2638
2639
2640/** Opcode 0x0f 0x33. */
2641FNIEMOP_DEF(iemOp_rdmsr)
2642{
2643 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2646}
2647
2648
2649/** Opcode 0x0f 0x34. */
2650FNIEMOP_DEF(iemOp_rdpmc)
2651{
2652 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2654 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2655}
2656
2657
2658/** Opcode 0x0f 0x34. */
2659FNIEMOP_STUB(iemOp_sysenter);
2660/** Opcode 0x0f 0x35. */
2661FNIEMOP_STUB(iemOp_sysexit);
2662/** Opcode 0x0f 0x37. */
2663FNIEMOP_STUB(iemOp_getsec);
2664
2665
2666/** Opcode 0x0f 0x38. */
2667FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2668{
2669#ifdef IEM_WITH_THREE_0F_38
2670 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2671 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2672#else
2673 IEMOP_BITCH_ABOUT_STUB();
2674 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2675#endif
2676}
2677
2678
2679/** Opcode 0x0f 0x3a. */
2680FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2681{
2682#ifdef IEM_WITH_THREE_0F_3A
2683 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2684 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2685#else
2686 IEMOP_BITCH_ABOUT_STUB();
2687 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2688#endif
2689}
2690
2691
2692/**
2693 * Implements a conditional move.
2694 *
2695 * Wish there was an obvious way to do this where we could share and reduce
2696 * code bloat.
2697 *
2698 * @param a_Cnd The conditional "microcode" operation.
2699 */
2700#define CMOV_X(a_Cnd) \
2701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2702 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2703 { \
2704 switch (pVCpu->iem.s.enmEffOpSize) \
2705 { \
2706 case IEMMODE_16BIT: \
2707 IEM_MC_BEGIN(0, 1); \
2708 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2709 a_Cnd { \
2710 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2711 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2712 } IEM_MC_ENDIF(); \
2713 IEM_MC_ADVANCE_RIP(); \
2714 IEM_MC_END(); \
2715 return VINF_SUCCESS; \
2716 \
2717 case IEMMODE_32BIT: \
2718 IEM_MC_BEGIN(0, 1); \
2719 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2720 a_Cnd { \
2721 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2722 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2723 } IEM_MC_ELSE() { \
2724 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2725 } IEM_MC_ENDIF(); \
2726 IEM_MC_ADVANCE_RIP(); \
2727 IEM_MC_END(); \
2728 return VINF_SUCCESS; \
2729 \
2730 case IEMMODE_64BIT: \
2731 IEM_MC_BEGIN(0, 1); \
2732 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2733 a_Cnd { \
2734 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2735 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2736 } IEM_MC_ENDIF(); \
2737 IEM_MC_ADVANCE_RIP(); \
2738 IEM_MC_END(); \
2739 return VINF_SUCCESS; \
2740 \
2741 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2742 } \
2743 } \
2744 else \
2745 { \
2746 switch (pVCpu->iem.s.enmEffOpSize) \
2747 { \
2748 case IEMMODE_16BIT: \
2749 IEM_MC_BEGIN(0, 2); \
2750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2751 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2753 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2754 a_Cnd { \
2755 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2756 } IEM_MC_ENDIF(); \
2757 IEM_MC_ADVANCE_RIP(); \
2758 IEM_MC_END(); \
2759 return VINF_SUCCESS; \
2760 \
2761 case IEMMODE_32BIT: \
2762 IEM_MC_BEGIN(0, 2); \
2763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2764 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2766 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2767 a_Cnd { \
2768 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2769 } IEM_MC_ELSE() { \
2770 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2771 } IEM_MC_ENDIF(); \
2772 IEM_MC_ADVANCE_RIP(); \
2773 IEM_MC_END(); \
2774 return VINF_SUCCESS; \
2775 \
2776 case IEMMODE_64BIT: \
2777 IEM_MC_BEGIN(0, 2); \
2778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2779 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2781 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2782 a_Cnd { \
2783 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2784 } IEM_MC_ENDIF(); \
2785 IEM_MC_ADVANCE_RIP(); \
2786 IEM_MC_END(); \
2787 return VINF_SUCCESS; \
2788 \
2789 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2790 } \
2791 } do {} while (0)
2792
2793
2794
2795/** Opcode 0x0f 0x40. */
2796FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2797{
2798 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2799 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2800}
2801
2802
2803/** Opcode 0x0f 0x41. */
2804FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2805{
2806 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2807 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2808}
2809
2810
2811/** Opcode 0x0f 0x42. */
2812FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2813{
2814 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2815 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2816}
2817
2818
2819/** Opcode 0x0f 0x43. */
2820FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2821{
2822 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2823 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2824}
2825
2826
2827/** Opcode 0x0f 0x44. */
2828FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2829{
2830 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2831 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2832}
2833
2834
2835/** Opcode 0x0f 0x45. */
2836FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2837{
2838 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2839 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2840}
2841
2842
2843/** Opcode 0x0f 0x46. */
2844FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2845{
2846 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2847 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2848}
2849
2850
2851/** Opcode 0x0f 0x47. */
2852FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2853{
2854 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2855 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2856}
2857
2858
2859/** Opcode 0x0f 0x48. */
2860FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2861{
2862 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2863 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2864}
2865
2866
2867/** Opcode 0x0f 0x49. */
2868FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2869{
2870 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2871 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2872}
2873
2874
2875/** Opcode 0x0f 0x4a. */
2876FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2877{
2878 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2879 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2880}
2881
2882
2883/** Opcode 0x0f 0x4b. */
2884FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2885{
2886 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2887 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2888}
2889
2890
2891/** Opcode 0x0f 0x4c. */
2892FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2893{
2894 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2895 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2896}
2897
2898
2899/** Opcode 0x0f 0x4d. */
2900FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2901{
2902 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2903 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2904}
2905
2906
2907/** Opcode 0x0f 0x4e. */
2908FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2909{
2910 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2911 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2912}
2913
2914
2915/** Opcode 0x0f 0x4f. */
2916FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2917{
2918 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2919 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2920}
2921
2922#undef CMOV_X
2923
2924/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2925FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2926/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2927FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2928/* Opcode 0xf3 0x0f 0x50 - invalid */
2929/* Opcode 0xf2 0x0f 0x50 - invalid */
2930
2931/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2932FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2933/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2934FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2935/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2936FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2937/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2938FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2939
2940/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2941FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2942/* Opcode 0x66 0x0f 0x52 - invalid */
2943/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2944FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2945/* Opcode 0xf2 0x0f 0x52 - invalid */
2946
2947/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2948FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2949/* Opcode 0x66 0x0f 0x53 - invalid */
2950/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2951FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2952/* Opcode 0xf2 0x0f 0x53 - invalid */
2953
2954/** Opcode 0x0f 0x54 - andps Vps, Wps */
2955FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2956/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2957FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2958/* Opcode 0xf3 0x0f 0x54 - invalid */
2959/* Opcode 0xf2 0x0f 0x54 - invalid */
2960
2961/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2962FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2963/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2964FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2965/* Opcode 0xf3 0x0f 0x55 - invalid */
2966/* Opcode 0xf2 0x0f 0x55 - invalid */
2967
2968/** Opcode 0x0f 0x56 - orps Vps, Wps */
2969FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2970/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2971FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2972/* Opcode 0xf3 0x0f 0x56 - invalid */
2973/* Opcode 0xf2 0x0f 0x56 - invalid */
2974
2975/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2976FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2977/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2978FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2979/* Opcode 0xf3 0x0f 0x57 - invalid */
2980/* Opcode 0xf2 0x0f 0x57 - invalid */
2981
2982/** Opcode 0x0f 0x58 - addps Vps, Wps */
2983FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2984/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2985FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2986/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2987FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2988/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2989FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2990
2991/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2992FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2993/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2994FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2995/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2996FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2997/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2998FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2999
3000/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
3001FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
3002/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
3003FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
3004/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
3005FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
3006/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
3007FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
3008
3009/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
3010FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
3011/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
3012FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
3013/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
3014FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
3015/* Opcode 0xf2 0x0f 0x5b - invalid */
3016
3017/** Opcode 0x0f 0x5c - subps Vps, Wps */
3018FNIEMOP_STUB(iemOp_subps_Vps_Wps);
3019/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
3020FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
3021/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
3022FNIEMOP_STUB(iemOp_subss_Vss_Wss);
3023/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
3024FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
3025
3026/** Opcode 0x0f 0x5d - minps Vps, Wps */
3027FNIEMOP_STUB(iemOp_minps_Vps_Wps);
3028/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
3029FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
3030/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
3031FNIEMOP_STUB(iemOp_minss_Vss_Wss);
3032/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
3033FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
3034
3035/** Opcode 0x0f 0x5e - divps Vps, Wps */
3036FNIEMOP_STUB(iemOp_divps_Vps_Wps);
3037/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
3038FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
3039/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
3040FNIEMOP_STUB(iemOp_divss_Vss_Wss);
3041/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
3042FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
3043
3044/** Opcode 0x0f 0x5f - maxps Vps, Wps */
3045FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
3046/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
3047FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
3048/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
3049FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
3050/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
3051FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
3052
3053/**
3054 * Common worker for MMX instructions on the forms:
3055 * pxxxx mm1, mm2/mem32
3056 *
3057 * The 2nd operand is the first half of a register, which in the memory case
3058 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3059 * memory accessed for MMX.
3060 *
3061 * Exceptions type 4.
3062 */
3063FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3064{
3065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3067 {
3068 /*
3069 * Register, register.
3070 */
3071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3072 IEM_MC_BEGIN(2, 0);
3073 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3074 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3075 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3076 IEM_MC_PREPARE_SSE_USAGE();
3077 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3078 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3079 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3080 IEM_MC_ADVANCE_RIP();
3081 IEM_MC_END();
3082 }
3083 else
3084 {
3085 /*
3086 * Register, memory.
3087 */
3088 IEM_MC_BEGIN(2, 2);
3089 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3090 IEM_MC_LOCAL(uint64_t, uSrc);
3091 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3093
3094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3096 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3097 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3098
3099 IEM_MC_PREPARE_SSE_USAGE();
3100 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3101 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3102
3103 IEM_MC_ADVANCE_RIP();
3104 IEM_MC_END();
3105 }
3106 return VINF_SUCCESS;
3107}
3108
3109
3110/**
3111 * Common worker for SSE2 instructions on the forms:
3112 * pxxxx xmm1, xmm2/mem128
3113 *
3114 * The 2nd operand is the first half of a register, which in the memory case
3115 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3116 * memory accessed for MMX.
3117 *
3118 * Exceptions type 4.
3119 */
3120FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3121{
3122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3123 if (!pImpl->pfnU64)
3124 return IEMOP_RAISE_INVALID_OPCODE();
3125 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3126 {
3127 /*
3128 * Register, register.
3129 */
3130 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3131 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3133 IEM_MC_BEGIN(2, 0);
3134 IEM_MC_ARG(uint64_t *, pDst, 0);
3135 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3136 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3137 IEM_MC_PREPARE_FPU_USAGE();
3138 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3139 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3140 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3141 IEM_MC_ADVANCE_RIP();
3142 IEM_MC_END();
3143 }
3144 else
3145 {
3146 /*
3147 * Register, memory.
3148 */
3149 IEM_MC_BEGIN(2, 2);
3150 IEM_MC_ARG(uint64_t *, pDst, 0);
3151 IEM_MC_LOCAL(uint32_t, uSrc);
3152 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3154
3155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3157 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3158 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3159
3160 IEM_MC_PREPARE_FPU_USAGE();
3161 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3162 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3163
3164 IEM_MC_ADVANCE_RIP();
3165 IEM_MC_END();
3166 }
3167 return VINF_SUCCESS;
3168}
3169
3170
3171/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3172FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3173{
3174 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3175 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3176}
3177
3178/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3179FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3180{
3181 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3182 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3183}
3184
3185/* Opcode 0xf3 0x0f 0x60 - invalid */
3186
3187
3188/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3189FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3190{
3191 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3192 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3193}
3194
3195/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3196FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3197{
3198 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3199 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3200}
3201
3202/* Opcode 0xf3 0x0f 0x61 - invalid */
3203
3204
3205/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3206FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3207{
3208 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3209 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3210}
3211
3212/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3213FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3214{
3215 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3216 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3217}
3218
3219/* Opcode 0xf3 0x0f 0x62 - invalid */
3220
3221
3222
3223/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3224FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3225/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3226FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3227/* Opcode 0xf3 0x0f 0x63 - invalid */
3228
3229/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3230FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3231/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3232FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3233/* Opcode 0xf3 0x0f 0x64 - invalid */
3234
3235/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3236FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3237/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3238FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3239/* Opcode 0xf3 0x0f 0x65 - invalid */
3240
3241/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3242FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3243/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3244FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3245/* Opcode 0xf3 0x0f 0x66 - invalid */
3246
3247/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3248FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3249/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3250FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3251/* Opcode 0xf3 0x0f 0x67 - invalid */
3252
3253
3254/**
3255 * Common worker for MMX instructions on the form:
3256 * pxxxx mm1, mm2/mem64
3257 *
3258 * The 2nd operand is the second half of a register, which in the memory case
3259 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3260 * where it may read the full 128 bits or only the upper 64 bits.
3261 *
3262 * Exceptions type 4.
3263 */
3264FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3265{
3266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3267 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3269 {
3270 /*
3271 * Register, register.
3272 */
3273 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3274 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3276 IEM_MC_BEGIN(2, 0);
3277 IEM_MC_ARG(uint64_t *, pDst, 0);
3278 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3279 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3280 IEM_MC_PREPARE_FPU_USAGE();
3281 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3282 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3283 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3284 IEM_MC_ADVANCE_RIP();
3285 IEM_MC_END();
3286 }
3287 else
3288 {
3289 /*
3290 * Register, memory.
3291 */
3292 IEM_MC_BEGIN(2, 2);
3293 IEM_MC_ARG(uint64_t *, pDst, 0);
3294 IEM_MC_LOCAL(uint64_t, uSrc);
3295 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3297
3298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3300 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3301 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3302
3303 IEM_MC_PREPARE_FPU_USAGE();
3304 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3305 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3306
3307 IEM_MC_ADVANCE_RIP();
3308 IEM_MC_END();
3309 }
3310 return VINF_SUCCESS;
3311}
3312
3313
3314/**
3315 * Common worker for SSE2 instructions on the form:
3316 * pxxxx xmm1, xmm2/mem128
3317 *
3318 * The 2nd operand is the second half of a register, which in the memory case
3319 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3320 * where it may read the full 128 bits or only the upper 64 bits.
3321 *
3322 * Exceptions type 4.
3323 */
3324FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3325{
3326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3327 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3328 {
3329 /*
3330 * Register, register.
3331 */
3332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3333 IEM_MC_BEGIN(2, 0);
3334 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3335 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3336 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3337 IEM_MC_PREPARE_SSE_USAGE();
3338 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3339 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3340 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3341 IEM_MC_ADVANCE_RIP();
3342 IEM_MC_END();
3343 }
3344 else
3345 {
3346 /*
3347 * Register, memory.
3348 */
3349 IEM_MC_BEGIN(2, 2);
3350 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3351 IEM_MC_LOCAL(RTUINT128U, uSrc);
3352 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3354
3355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3357 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3358 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3359
3360 IEM_MC_PREPARE_SSE_USAGE();
3361 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3362 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3363
3364 IEM_MC_ADVANCE_RIP();
3365 IEM_MC_END();
3366 }
3367 return VINF_SUCCESS;
3368}
3369
3370
3371/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3372FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3373{
3374 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3375 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3376}
3377
3378/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3379FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3380{
3381 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3382 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3383}
3384/* Opcode 0xf3 0x0f 0x68 - invalid */
3385
3386
3387/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3388FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3389{
3390 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3391 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3392}
3393
3394/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3395FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3396{
3397 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3398 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3399
3400}
3401/* Opcode 0xf3 0x0f 0x69 - invalid */
3402
3403
3404/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3405FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3406{
3407 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3408 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3409}
3410
3411/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3412FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3413{
3414 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3415 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3416}
3417/* Opcode 0xf3 0x0f 0x6a - invalid */
3418
3419
3420/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3421FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3422/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3423FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3424/* Opcode 0xf3 0x0f 0x6b - invalid */
3425
3426
3427/* Opcode 0x0f 0x6c - invalid */
3428
3429/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3430FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3431{
3432 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3433 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3434}
3435
3436/* Opcode 0xf3 0x0f 0x6c - invalid */
3437/* Opcode 0xf2 0x0f 0x6c - invalid */
3438
3439
3440/* Opcode 0x0f 0x6d - invalid */
3441
3442/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3443FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3444{
3445 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3446 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3447}
3448
3449/* Opcode 0xf3 0x0f 0x6d - invalid */
3450
3451
3452FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3453{
3454 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3455 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3456 {
3457 /**
3458 * @opcode 0x6e
3459 * @opcodesub rex.w=1
3460 * @oppfx none
3461 * @opcpuid mmx
3462 * @opgroup og_mmx_datamove
3463 * @opxcpttype 5
3464 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3465 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3466 */
3467 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3468 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3469 {
3470 /* MMX, greg64 */
3471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3472 IEM_MC_BEGIN(0, 1);
3473 IEM_MC_LOCAL(uint64_t, u64Tmp);
3474
3475 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3476 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3477
3478 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3479 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3480 IEM_MC_FPU_TO_MMX_MODE();
3481
3482 IEM_MC_ADVANCE_RIP();
3483 IEM_MC_END();
3484 }
3485 else
3486 {
3487 /* MMX, [mem64] */
3488 IEM_MC_BEGIN(0, 2);
3489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3490 IEM_MC_LOCAL(uint64_t, u64Tmp);
3491
3492 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3495 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3496
3497 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3498 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3499 IEM_MC_FPU_TO_MMX_MODE();
3500
3501 IEM_MC_ADVANCE_RIP();
3502 IEM_MC_END();
3503 }
3504 }
3505 else
3506 {
3507 /**
3508 * @opdone
3509 * @opcode 0x6e
3510 * @opcodesub rex.w=0
3511 * @oppfx none
3512 * @opcpuid mmx
3513 * @opgroup og_mmx_datamove
3514 * @opxcpttype 5
3515 * @opfunction iemOp_movd_q_Pd_Ey
3516 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3517 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3518 */
3519 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3521 {
3522 /* MMX, greg */
3523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3524 IEM_MC_BEGIN(0, 1);
3525 IEM_MC_LOCAL(uint64_t, u64Tmp);
3526
3527 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3528 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3529
3530 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3531 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3532 IEM_MC_FPU_TO_MMX_MODE();
3533
3534 IEM_MC_ADVANCE_RIP();
3535 IEM_MC_END();
3536 }
3537 else
3538 {
3539 /* MMX, [mem] */
3540 IEM_MC_BEGIN(0, 2);
3541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3542 IEM_MC_LOCAL(uint32_t, u32Tmp);
3543
3544 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3547 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3548
3549 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3550 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3551 IEM_MC_FPU_TO_MMX_MODE();
3552
3553 IEM_MC_ADVANCE_RIP();
3554 IEM_MC_END();
3555 }
3556 }
3557 return VINF_SUCCESS;
3558}
3559
3560/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
3561FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3562{
3563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3564 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3565 {
3566 /**
3567 * @opcode 0x6e
3568 * @opcodesub rex.w=1
3569 * @oppfx 0x66
3570 * @opcpuid sse2
3571 * @opgroup og_sse2_simdint_datamov
3572 * @opxcpttype 5
3573 * @optest 64-bit / op1=1 op2=2 -> op1=2
3574 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3575 * @oponly
3576 */
3577 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3578 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3579 {
3580 /* XMM, greg64 */
3581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3582 IEM_MC_BEGIN(0, 1);
3583 IEM_MC_LOCAL(uint64_t, u64Tmp);
3584
3585 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3586 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3587
3588 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3589 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3590
3591 IEM_MC_ADVANCE_RIP();
3592 IEM_MC_END();
3593 }
3594 else
3595 {
3596 /* XMM, [mem64] */
3597 IEM_MC_BEGIN(0, 2);
3598 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3599 IEM_MC_LOCAL(uint64_t, u64Tmp);
3600
3601 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3604 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3605
3606 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3607 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3608
3609 IEM_MC_ADVANCE_RIP();
3610 IEM_MC_END();
3611 }
3612 }
3613 else
3614 {
3615 /**
3616 * @opdone
3617 * @opcode 0x6e
3618 * @opcodesub rex.w=0
3619 * @oppfx 0x66
3620 * @opcpuid sse2
3621 * @opgroup og_sse2_simdint_datamov
3622 * @opxcpttype 5
3623 * @opfunction iemOp_movd_q_Vy_Ey
3624 * @optest op1=1 op2=2 -> op1=2
3625 * @optest op1=0 op2=-42 -> op1=-42
3626 * @oponly
3627 */
3628 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3629 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3630 {
3631 /* XMM, greg32 */
3632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3633 IEM_MC_BEGIN(0, 1);
3634 IEM_MC_LOCAL(uint32_t, u32Tmp);
3635
3636 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3637 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3638
3639 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3640 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3641
3642 IEM_MC_ADVANCE_RIP();
3643 IEM_MC_END();
3644 }
3645 else
3646 {
3647 /* XMM, [mem32] */
3648 IEM_MC_BEGIN(0, 2);
3649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3650 IEM_MC_LOCAL(uint32_t, u32Tmp);
3651
3652 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3655 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3656
3657 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3658 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3659
3660 IEM_MC_ADVANCE_RIP();
3661 IEM_MC_END();
3662 }
3663 }
3664 return VINF_SUCCESS;
3665}
3666
3667/* Opcode 0xf3 0x0f 0x6e - invalid */
3668
3669
3670/** Opcode 0x0f 0x6f - movq Pq, Qq */
3671FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3672{
3673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3674 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3675 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3676 {
3677 /*
3678 * Register, register.
3679 */
3680 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3681 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3683 IEM_MC_BEGIN(0, 1);
3684 IEM_MC_LOCAL(uint64_t, u64Tmp);
3685 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3686 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3687 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3688 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3689 IEM_MC_ADVANCE_RIP();
3690 IEM_MC_END();
3691 }
3692 else
3693 {
3694 /*
3695 * Register, memory.
3696 */
3697 IEM_MC_BEGIN(0, 2);
3698 IEM_MC_LOCAL(uint64_t, u64Tmp);
3699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3700
3701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3703 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3704 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3705 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3706 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3707
3708 IEM_MC_ADVANCE_RIP();
3709 IEM_MC_END();
3710 }
3711 return VINF_SUCCESS;
3712}
3713
3714/** Opcode 0x66 0x0f 0x6f - movdqa Vx, Wx */
3715FNIEMOP_DEF(iemOp_movdqa_Vx_Wx)
3716{
3717 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3718 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3719 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3720 {
3721 /*
3722 * Register, register.
3723 */
3724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3725 IEM_MC_BEGIN(0, 0);
3726 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3727 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3728 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3729 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3730 IEM_MC_ADVANCE_RIP();
3731 IEM_MC_END();
3732 }
3733 else
3734 {
3735 /*
3736 * Register, memory.
3737 */
3738 IEM_MC_BEGIN(0, 2);
3739 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3741
3742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3744 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3745 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3746 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3747 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3748
3749 IEM_MC_ADVANCE_RIP();
3750 IEM_MC_END();
3751 }
3752 return VINF_SUCCESS;
3753}
3754
3755/** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */
3756FNIEMOP_DEF(iemOp_movdqu_Vx_Wx)
3757{
3758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3759 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3760 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3761 {
3762 /*
3763 * Register, register.
3764 */
3765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3766 IEM_MC_BEGIN(0, 0);
3767 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3768 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3769 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3770 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3771 IEM_MC_ADVANCE_RIP();
3772 IEM_MC_END();
3773 }
3774 else
3775 {
3776 /*
3777 * Register, memory.
3778 */
3779 IEM_MC_BEGIN(0, 2);
3780 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3782
3783 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3785 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3786 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3787 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3788 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3789
3790 IEM_MC_ADVANCE_RIP();
3791 IEM_MC_END();
3792 }
3793 return VINF_SUCCESS;
3794}
3795
3796
3797/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3798FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3799{
3800 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3801 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3802 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3803 {
3804 /*
3805 * Register, register.
3806 */
3807 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3809
3810 IEM_MC_BEGIN(3, 0);
3811 IEM_MC_ARG(uint64_t *, pDst, 0);
3812 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3813 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3814 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3815 IEM_MC_PREPARE_FPU_USAGE();
3816 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3817 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3818 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3819 IEM_MC_ADVANCE_RIP();
3820 IEM_MC_END();
3821 }
3822 else
3823 {
3824 /*
3825 * Register, memory.
3826 */
3827 IEM_MC_BEGIN(3, 2);
3828 IEM_MC_ARG(uint64_t *, pDst, 0);
3829 IEM_MC_LOCAL(uint64_t, uSrc);
3830 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3832
3833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3834 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3835 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3837 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3838
3839 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3840 IEM_MC_PREPARE_FPU_USAGE();
3841 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3842 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3843
3844 IEM_MC_ADVANCE_RIP();
3845 IEM_MC_END();
3846 }
3847 return VINF_SUCCESS;
3848}
3849
3850/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3851FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3852{
3853 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3855 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3856 {
3857 /*
3858 * Register, register.
3859 */
3860 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3862
3863 IEM_MC_BEGIN(3, 0);
3864 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3865 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3866 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3867 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3868 IEM_MC_PREPARE_SSE_USAGE();
3869 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3870 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3871 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3872 IEM_MC_ADVANCE_RIP();
3873 IEM_MC_END();
3874 }
3875 else
3876 {
3877 /*
3878 * Register, memory.
3879 */
3880 IEM_MC_BEGIN(3, 2);
3881 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3882 IEM_MC_LOCAL(RTUINT128U, uSrc);
3883 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3885
3886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3887 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3888 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3890 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3891
3892 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3893 IEM_MC_PREPARE_SSE_USAGE();
3894 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3895 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3896
3897 IEM_MC_ADVANCE_RIP();
3898 IEM_MC_END();
3899 }
3900 return VINF_SUCCESS;
3901}
3902
3903/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3904FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3905{
3906 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3907 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3908 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3909 {
3910 /*
3911 * Register, register.
3912 */
3913 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3915
3916 IEM_MC_BEGIN(3, 0);
3917 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3918 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3919 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3920 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3921 IEM_MC_PREPARE_SSE_USAGE();
3922 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3923 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3924 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3925 IEM_MC_ADVANCE_RIP();
3926 IEM_MC_END();
3927 }
3928 else
3929 {
3930 /*
3931 * Register, memory.
3932 */
3933 IEM_MC_BEGIN(3, 2);
3934 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3935 IEM_MC_LOCAL(RTUINT128U, uSrc);
3936 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3938
3939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3940 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3941 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3943 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3944
3945 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3946 IEM_MC_PREPARE_SSE_USAGE();
3947 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3948 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3949
3950 IEM_MC_ADVANCE_RIP();
3951 IEM_MC_END();
3952 }
3953 return VINF_SUCCESS;
3954}
3955
3956/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3957FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3958{
3959 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3960 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3961 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3962 {
3963 /*
3964 * Register, register.
3965 */
3966 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3968
3969 IEM_MC_BEGIN(3, 0);
3970 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3971 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3972 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3973 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3974 IEM_MC_PREPARE_SSE_USAGE();
3975 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3976 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3977 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3978 IEM_MC_ADVANCE_RIP();
3979 IEM_MC_END();
3980 }
3981 else
3982 {
3983 /*
3984 * Register, memory.
3985 */
3986 IEM_MC_BEGIN(3, 2);
3987 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3988 IEM_MC_LOCAL(RTUINT128U, uSrc);
3989 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3991
3992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3993 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3994 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3996 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3997
3998 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3999 IEM_MC_PREPARE_SSE_USAGE();
4000 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4001 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
4002
4003 IEM_MC_ADVANCE_RIP();
4004 IEM_MC_END();
4005 }
4006 return VINF_SUCCESS;
4007}
4008
4009
4010/** Opcode 0x0f 0x71 11/2. */
4011FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
4012
4013/** Opcode 0x66 0x0f 0x71 11/2. */
4014FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
4015
4016/** Opcode 0x0f 0x71 11/4. */
4017FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
4018
4019/** Opcode 0x66 0x0f 0x71 11/4. */
4020FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
4021
4022/** Opcode 0x0f 0x71 11/6. */
4023FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
4024
4025/** Opcode 0x66 0x0f 0x71 11/6. */
4026FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
4027
4028
4029/**
4030 * Group 12 jump table for register variant.
4031 */
4032IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
4033{
4034 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4035 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4036 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4037 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4038 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4039 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4040 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4041 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4042};
4043AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
4044
4045
4046/** Opcode 0x0f 0x71. */
4047FNIEMOP_DEF(iemOp_Grp12)
4048{
4049 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4050 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4051 /* register, register */
4052 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4053 + pVCpu->iem.s.idxPrefix], bRm);
4054 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4055}
4056
4057
4058/** Opcode 0x0f 0x72 11/2. */
4059FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
4060
4061/** Opcode 0x66 0x0f 0x72 11/2. */
4062FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
4063
4064/** Opcode 0x0f 0x72 11/4. */
4065FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
4066
4067/** Opcode 0x66 0x0f 0x72 11/4. */
4068FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4069
4070/** Opcode 0x0f 0x72 11/6. */
4071FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4072
4073/** Opcode 0x66 0x0f 0x72 11/6. */
4074FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4075
4076
4077/**
4078 * Group 13 jump table for register variant.
4079 */
4080IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4081{
4082 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4083 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4084 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4085 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4086 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4087 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4088 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4089 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4090};
4091AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4092
4093/** Opcode 0x0f 0x72. */
4094FNIEMOP_DEF(iemOp_Grp13)
4095{
4096 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4097 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4098 /* register, register */
4099 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4100 + pVCpu->iem.s.idxPrefix], bRm);
4101 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4102}
4103
4104
4105/** Opcode 0x0f 0x73 11/2. */
4106FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4107
4108/** Opcode 0x66 0x0f 0x73 11/2. */
4109FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4110
4111/** Opcode 0x66 0x0f 0x73 11/3. */
4112FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4113
4114/** Opcode 0x0f 0x73 11/6. */
4115FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4116
4117/** Opcode 0x66 0x0f 0x73 11/6. */
4118FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4119
4120/** Opcode 0x66 0x0f 0x73 11/7. */
4121FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4122
4123/**
4124 * Group 14 jump table for register variant.
4125 */
4126IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4127{
4128 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4129 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4130 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4131 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4132 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4133 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4134 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4135 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4136};
4137AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4138
4139
4140/** Opcode 0x0f 0x73. */
4141FNIEMOP_DEF(iemOp_Grp14)
4142{
4143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4144 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4145 /* register, register */
4146 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4147 + pVCpu->iem.s.idxPrefix], bRm);
4148 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4149}
4150
4151
4152/**
4153 * Common worker for MMX instructions on the form:
4154 * pxxx mm1, mm2/mem64
4155 */
4156FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4157{
4158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4159 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4160 {
4161 /*
4162 * Register, register.
4163 */
4164 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4165 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4167 IEM_MC_BEGIN(2, 0);
4168 IEM_MC_ARG(uint64_t *, pDst, 0);
4169 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4170 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4171 IEM_MC_PREPARE_FPU_USAGE();
4172 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4173 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4174 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4175 IEM_MC_ADVANCE_RIP();
4176 IEM_MC_END();
4177 }
4178 else
4179 {
4180 /*
4181 * Register, memory.
4182 */
4183 IEM_MC_BEGIN(2, 2);
4184 IEM_MC_ARG(uint64_t *, pDst, 0);
4185 IEM_MC_LOCAL(uint64_t, uSrc);
4186 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4188
4189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4191 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4192 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4193
4194 IEM_MC_PREPARE_FPU_USAGE();
4195 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4196 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4197
4198 IEM_MC_ADVANCE_RIP();
4199 IEM_MC_END();
4200 }
4201 return VINF_SUCCESS;
4202}
4203
4204
4205/**
4206 * Common worker for SSE2 instructions on the forms:
4207 * pxxx xmm1, xmm2/mem128
4208 *
4209 * Proper alignment of the 128-bit operand is enforced.
4210 * Exceptions type 4. SSE2 cpuid checks.
4211 */
4212FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4213{
4214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4215 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4216 {
4217 /*
4218 * Register, register.
4219 */
4220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4221 IEM_MC_BEGIN(2, 0);
4222 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4223 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4224 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4225 IEM_MC_PREPARE_SSE_USAGE();
4226 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4227 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4228 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4229 IEM_MC_ADVANCE_RIP();
4230 IEM_MC_END();
4231 }
4232 else
4233 {
4234 /*
4235 * Register, memory.
4236 */
4237 IEM_MC_BEGIN(2, 2);
4238 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4239 IEM_MC_LOCAL(RTUINT128U, uSrc);
4240 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4242
4243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4245 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4246 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4247
4248 IEM_MC_PREPARE_SSE_USAGE();
4249 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4250 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4251
4252 IEM_MC_ADVANCE_RIP();
4253 IEM_MC_END();
4254 }
4255 return VINF_SUCCESS;
4256}
4257
4258
4259/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4260FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4261{
4262 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4263 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4264}
4265
4266/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4267FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4268{
4269 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4270 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4271}
4272
4273/* Opcode 0xf3 0x0f 0x74 - invalid */
4274/* Opcode 0xf2 0x0f 0x74 - invalid */
4275
4276
4277/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4278FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4279{
4280 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4281 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4282}
4283
4284/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4285FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4286{
4287 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4288 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4289}
4290
4291/* Opcode 0xf3 0x0f 0x75 - invalid */
4292/* Opcode 0xf2 0x0f 0x75 - invalid */
4293
4294
4295/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4296FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4297{
4298 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4299 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4300}
4301
4302/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4303FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4304{
4305 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4306 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4307}
4308
4309/* Opcode 0xf3 0x0f 0x76 - invalid */
4310/* Opcode 0xf2 0x0f 0x76 - invalid */
4311
4312
4313/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4314FNIEMOP_STUB(iemOp_emms);
4315/* Opcode 0x66 0x0f 0x77 - invalid */
4316/* Opcode 0xf3 0x0f 0x77 - invalid */
4317/* Opcode 0xf2 0x0f 0x77 - invalid */
4318
4319/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4320FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4321/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4322FNIEMOP_STUB(iemOp_AmdGrp17);
4323/* Opcode 0xf3 0x0f 0x78 - invalid */
4324/* Opcode 0xf2 0x0f 0x78 - invalid */
4325
4326/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4327FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4328/* Opcode 0x66 0x0f 0x79 - invalid */
4329/* Opcode 0xf3 0x0f 0x79 - invalid */
4330/* Opcode 0xf2 0x0f 0x79 - invalid */
4331
4332/* Opcode 0x0f 0x7a - invalid */
4333/* Opcode 0x66 0x0f 0x7a - invalid */
4334/* Opcode 0xf3 0x0f 0x7a - invalid */
4335/* Opcode 0xf2 0x0f 0x7a - invalid */
4336
4337/* Opcode 0x0f 0x7b - invalid */
4338/* Opcode 0x66 0x0f 0x7b - invalid */
4339/* Opcode 0xf3 0x0f 0x7b - invalid */
4340/* Opcode 0xf2 0x0f 0x7b - invalid */
4341
4342/* Opcode 0x0f 0x7c - invalid */
4343/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4344FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4345/* Opcode 0xf3 0x0f 0x7c - invalid */
4346/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4347FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4348
4349/* Opcode 0x0f 0x7d - invalid */
4350/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4351FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4352/* Opcode 0xf3 0x0f 0x7d - invalid */
4353/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4354FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4355
4356
4357/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4358FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4359{
4360 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4361 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4362 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
4363 else
4364 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
4365 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4366 {
4367 /* greg, MMX */
4368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4369 IEM_MC_BEGIN(0, 1);
4370 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4371 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4372 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4373 {
4374 IEM_MC_LOCAL(uint64_t, u64Tmp);
4375 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4376 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4377 }
4378 else
4379 {
4380 IEM_MC_LOCAL(uint32_t, u32Tmp);
4381 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4382 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4383 }
4384 IEM_MC_ADVANCE_RIP();
4385 IEM_MC_END();
4386 }
4387 else
4388 {
4389 /* [mem], MMX */
4390 IEM_MC_BEGIN(0, 2);
4391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4392 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4395 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4396 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4397 {
4398 IEM_MC_LOCAL(uint64_t, u64Tmp);
4399 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4400 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4401 }
4402 else
4403 {
4404 IEM_MC_LOCAL(uint32_t, u32Tmp);
4405 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4406 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4407 }
4408 IEM_MC_ADVANCE_RIP();
4409 IEM_MC_END();
4410 }
4411 return VINF_SUCCESS;
4412}
4413
4414/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
4415FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4416{
4417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4418 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4419 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
4420 else
4421 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
4422 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4423 {
4424 /* greg, XMM */
4425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4426 IEM_MC_BEGIN(0, 1);
4427 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4428 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4429 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4430 {
4431 IEM_MC_LOCAL(uint64_t, u64Tmp);
4432 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4433 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4434 }
4435 else
4436 {
4437 IEM_MC_LOCAL(uint32_t, u32Tmp);
4438 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4439 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4440 }
4441 IEM_MC_ADVANCE_RIP();
4442 IEM_MC_END();
4443 }
4444 else
4445 {
4446 /* [mem], XMM */
4447 IEM_MC_BEGIN(0, 2);
4448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4449 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4452 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4453 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4454 {
4455 IEM_MC_LOCAL(uint64_t, u64Tmp);
4456 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4457 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4458 }
4459 else
4460 {
4461 IEM_MC_LOCAL(uint32_t, u32Tmp);
4462 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4463 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4464 }
4465 IEM_MC_ADVANCE_RIP();
4466 IEM_MC_END();
4467 }
4468 return VINF_SUCCESS;
4469}
4470
4471
4472/**
4473 * @opcode 0x7e
4474 * @opcodesub !11 mr/reg
4475 * @oppfx 0xf3
4476 * @opcpuid sse2
4477 * @opgroup og_sse2_pcksclr_datamove
4478 * @opxcpttype 5
4479 * @optest op1=1 op2=2 -> op1=2
4480 * @optest op1=0 op2=-42 -> op1=-42
4481 */
4482FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4483{
4484 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4485 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4486 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4487 {
4488 /*
4489 * Register, register.
4490 */
4491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4492 IEM_MC_BEGIN(0, 2);
4493 IEM_MC_LOCAL(uint64_t, uSrc);
4494
4495 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4496 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4497
4498 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4499 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4500
4501 IEM_MC_ADVANCE_RIP();
4502 IEM_MC_END();
4503 }
4504 else
4505 {
4506 /*
4507 * Memory, register.
4508 */
4509 IEM_MC_BEGIN(0, 2);
4510 IEM_MC_LOCAL(uint64_t, uSrc);
4511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4512
4513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4515 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4516 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4517
4518 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4519 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4520
4521 IEM_MC_ADVANCE_RIP();
4522 IEM_MC_END();
4523 }
4524 return VINF_SUCCESS;
4525}
4526
4527/* Opcode 0xf2 0x0f 0x7e - invalid */
4528
4529
4530/** Opcode 0x0f 0x7f - movq Qq, Pq */
4531FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4532{
4533 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4535 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4536 {
4537 /*
4538 * Register, register.
4539 */
4540 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4541 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4543 IEM_MC_BEGIN(0, 1);
4544 IEM_MC_LOCAL(uint64_t, u64Tmp);
4545 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4546 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4547 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4548 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4549 IEM_MC_ADVANCE_RIP();
4550 IEM_MC_END();
4551 }
4552 else
4553 {
4554 /*
4555 * Register, memory.
4556 */
4557 IEM_MC_BEGIN(0, 2);
4558 IEM_MC_LOCAL(uint64_t, u64Tmp);
4559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4560
4561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4563 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4564 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4565
4566 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4567 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4568
4569 IEM_MC_ADVANCE_RIP();
4570 IEM_MC_END();
4571 }
4572 return VINF_SUCCESS;
4573}
4574
4575/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4576FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4577{
4578 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4579 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4580 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4581 {
4582 /*
4583 * Register, register.
4584 */
4585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4586 IEM_MC_BEGIN(0, 0);
4587 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4588 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4589 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4590 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4591 IEM_MC_ADVANCE_RIP();
4592 IEM_MC_END();
4593 }
4594 else
4595 {
4596 /*
4597 * Register, memory.
4598 */
4599 IEM_MC_BEGIN(0, 2);
4600 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4602
4603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4605 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4606 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4607
4608 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4609 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4610
4611 IEM_MC_ADVANCE_RIP();
4612 IEM_MC_END();
4613 }
4614 return VINF_SUCCESS;
4615}
4616
4617/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4618FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4619{
4620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4621 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4622 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4623 {
4624 /*
4625 * Register, register.
4626 */
4627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4628 IEM_MC_BEGIN(0, 0);
4629 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4630 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4631 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4632 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4633 IEM_MC_ADVANCE_RIP();
4634 IEM_MC_END();
4635 }
4636 else
4637 {
4638 /*
4639 * Register, memory.
4640 */
4641 IEM_MC_BEGIN(0, 2);
4642 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4644
4645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4647 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4648 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4649
4650 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4651 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4652
4653 IEM_MC_ADVANCE_RIP();
4654 IEM_MC_END();
4655 }
4656 return VINF_SUCCESS;
4657}
4658
4659/* Opcode 0xf2 0x0f 0x7f - invalid */
4660
4661
4662
4663/** Opcode 0x0f 0x80. */
4664FNIEMOP_DEF(iemOp_jo_Jv)
4665{
4666 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4667 IEMOP_HLP_MIN_386();
4668 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4669 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4670 {
4671 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4673
4674 IEM_MC_BEGIN(0, 0);
4675 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4676 IEM_MC_REL_JMP_S16(i16Imm);
4677 } IEM_MC_ELSE() {
4678 IEM_MC_ADVANCE_RIP();
4679 } IEM_MC_ENDIF();
4680 IEM_MC_END();
4681 }
4682 else
4683 {
4684 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4686
4687 IEM_MC_BEGIN(0, 0);
4688 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4689 IEM_MC_REL_JMP_S32(i32Imm);
4690 } IEM_MC_ELSE() {
4691 IEM_MC_ADVANCE_RIP();
4692 } IEM_MC_ENDIF();
4693 IEM_MC_END();
4694 }
4695 return VINF_SUCCESS;
4696}
4697
4698
4699/** Opcode 0x0f 0x81. */
4700FNIEMOP_DEF(iemOp_jno_Jv)
4701{
4702 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4703 IEMOP_HLP_MIN_386();
4704 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4705 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4706 {
4707 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4709
4710 IEM_MC_BEGIN(0, 0);
4711 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4712 IEM_MC_ADVANCE_RIP();
4713 } IEM_MC_ELSE() {
4714 IEM_MC_REL_JMP_S16(i16Imm);
4715 } IEM_MC_ENDIF();
4716 IEM_MC_END();
4717 }
4718 else
4719 {
4720 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4722
4723 IEM_MC_BEGIN(0, 0);
4724 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4725 IEM_MC_ADVANCE_RIP();
4726 } IEM_MC_ELSE() {
4727 IEM_MC_REL_JMP_S32(i32Imm);
4728 } IEM_MC_ENDIF();
4729 IEM_MC_END();
4730 }
4731 return VINF_SUCCESS;
4732}
4733
4734
4735/** Opcode 0x0f 0x82. */
4736FNIEMOP_DEF(iemOp_jc_Jv)
4737{
4738 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4739 IEMOP_HLP_MIN_386();
4740 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4741 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4742 {
4743 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4745
4746 IEM_MC_BEGIN(0, 0);
4747 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4748 IEM_MC_REL_JMP_S16(i16Imm);
4749 } IEM_MC_ELSE() {
4750 IEM_MC_ADVANCE_RIP();
4751 } IEM_MC_ENDIF();
4752 IEM_MC_END();
4753 }
4754 else
4755 {
4756 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4758
4759 IEM_MC_BEGIN(0, 0);
4760 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4761 IEM_MC_REL_JMP_S32(i32Imm);
4762 } IEM_MC_ELSE() {
4763 IEM_MC_ADVANCE_RIP();
4764 } IEM_MC_ENDIF();
4765 IEM_MC_END();
4766 }
4767 return VINF_SUCCESS;
4768}
4769
4770
4771/** Opcode 0x0f 0x83. */
4772FNIEMOP_DEF(iemOp_jnc_Jv)
4773{
4774 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4775 IEMOP_HLP_MIN_386();
4776 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4777 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4778 {
4779 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4781
4782 IEM_MC_BEGIN(0, 0);
4783 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4784 IEM_MC_ADVANCE_RIP();
4785 } IEM_MC_ELSE() {
4786 IEM_MC_REL_JMP_S16(i16Imm);
4787 } IEM_MC_ENDIF();
4788 IEM_MC_END();
4789 }
4790 else
4791 {
4792 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4794
4795 IEM_MC_BEGIN(0, 0);
4796 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4797 IEM_MC_ADVANCE_RIP();
4798 } IEM_MC_ELSE() {
4799 IEM_MC_REL_JMP_S32(i32Imm);
4800 } IEM_MC_ENDIF();
4801 IEM_MC_END();
4802 }
4803 return VINF_SUCCESS;
4804}
4805
4806
4807/** Opcode 0x0f 0x84. */
4808FNIEMOP_DEF(iemOp_je_Jv)
4809{
4810 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4811 IEMOP_HLP_MIN_386();
4812 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4813 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4814 {
4815 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4817
4818 IEM_MC_BEGIN(0, 0);
4819 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4820 IEM_MC_REL_JMP_S16(i16Imm);
4821 } IEM_MC_ELSE() {
4822 IEM_MC_ADVANCE_RIP();
4823 } IEM_MC_ENDIF();
4824 IEM_MC_END();
4825 }
4826 else
4827 {
4828 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4830
4831 IEM_MC_BEGIN(0, 0);
4832 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4833 IEM_MC_REL_JMP_S32(i32Imm);
4834 } IEM_MC_ELSE() {
4835 IEM_MC_ADVANCE_RIP();
4836 } IEM_MC_ENDIF();
4837 IEM_MC_END();
4838 }
4839 return VINF_SUCCESS;
4840}
4841
4842
4843/** Opcode 0x0f 0x85. */
4844FNIEMOP_DEF(iemOp_jne_Jv)
4845{
4846 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4847 IEMOP_HLP_MIN_386();
4848 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4849 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4850 {
4851 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4853
4854 IEM_MC_BEGIN(0, 0);
4855 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4856 IEM_MC_ADVANCE_RIP();
4857 } IEM_MC_ELSE() {
4858 IEM_MC_REL_JMP_S16(i16Imm);
4859 } IEM_MC_ENDIF();
4860 IEM_MC_END();
4861 }
4862 else
4863 {
4864 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4866
4867 IEM_MC_BEGIN(0, 0);
4868 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4869 IEM_MC_ADVANCE_RIP();
4870 } IEM_MC_ELSE() {
4871 IEM_MC_REL_JMP_S32(i32Imm);
4872 } IEM_MC_ENDIF();
4873 IEM_MC_END();
4874 }
4875 return VINF_SUCCESS;
4876}
4877
4878
4879/** Opcode 0x0f 0x86. */
4880FNIEMOP_DEF(iemOp_jbe_Jv)
4881{
4882 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4883 IEMOP_HLP_MIN_386();
4884 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4885 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4886 {
4887 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4889
4890 IEM_MC_BEGIN(0, 0);
4891 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4892 IEM_MC_REL_JMP_S16(i16Imm);
4893 } IEM_MC_ELSE() {
4894 IEM_MC_ADVANCE_RIP();
4895 } IEM_MC_ENDIF();
4896 IEM_MC_END();
4897 }
4898 else
4899 {
4900 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4902
4903 IEM_MC_BEGIN(0, 0);
4904 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4905 IEM_MC_REL_JMP_S32(i32Imm);
4906 } IEM_MC_ELSE() {
4907 IEM_MC_ADVANCE_RIP();
4908 } IEM_MC_ENDIF();
4909 IEM_MC_END();
4910 }
4911 return VINF_SUCCESS;
4912}
4913
4914
4915/** Opcode 0x0f 0x87. */
4916FNIEMOP_DEF(iemOp_jnbe_Jv)
4917{
4918 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4919 IEMOP_HLP_MIN_386();
4920 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4921 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4922 {
4923 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4925
4926 IEM_MC_BEGIN(0, 0);
4927 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4928 IEM_MC_ADVANCE_RIP();
4929 } IEM_MC_ELSE() {
4930 IEM_MC_REL_JMP_S16(i16Imm);
4931 } IEM_MC_ENDIF();
4932 IEM_MC_END();
4933 }
4934 else
4935 {
4936 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4938
4939 IEM_MC_BEGIN(0, 0);
4940 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4941 IEM_MC_ADVANCE_RIP();
4942 } IEM_MC_ELSE() {
4943 IEM_MC_REL_JMP_S32(i32Imm);
4944 } IEM_MC_ENDIF();
4945 IEM_MC_END();
4946 }
4947 return VINF_SUCCESS;
4948}
4949
4950
4951/** Opcode 0x0f 0x88. */
4952FNIEMOP_DEF(iemOp_js_Jv)
4953{
4954 IEMOP_MNEMONIC(js_Jv, "js Jv");
4955 IEMOP_HLP_MIN_386();
4956 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4957 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4958 {
4959 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4961
4962 IEM_MC_BEGIN(0, 0);
4963 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4964 IEM_MC_REL_JMP_S16(i16Imm);
4965 } IEM_MC_ELSE() {
4966 IEM_MC_ADVANCE_RIP();
4967 } IEM_MC_ENDIF();
4968 IEM_MC_END();
4969 }
4970 else
4971 {
4972 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4974
4975 IEM_MC_BEGIN(0, 0);
4976 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4977 IEM_MC_REL_JMP_S32(i32Imm);
4978 } IEM_MC_ELSE() {
4979 IEM_MC_ADVANCE_RIP();
4980 } IEM_MC_ENDIF();
4981 IEM_MC_END();
4982 }
4983 return VINF_SUCCESS;
4984}
4985
4986
4987/** Opcode 0x0f 0x89. */
4988FNIEMOP_DEF(iemOp_jns_Jv)
4989{
4990 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4991 IEMOP_HLP_MIN_386();
4992 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4993 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4994 {
4995 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4997
4998 IEM_MC_BEGIN(0, 0);
4999 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5000 IEM_MC_ADVANCE_RIP();
5001 } IEM_MC_ELSE() {
5002 IEM_MC_REL_JMP_S16(i16Imm);
5003 } IEM_MC_ENDIF();
5004 IEM_MC_END();
5005 }
5006 else
5007 {
5008 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5010
5011 IEM_MC_BEGIN(0, 0);
5012 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5013 IEM_MC_ADVANCE_RIP();
5014 } IEM_MC_ELSE() {
5015 IEM_MC_REL_JMP_S32(i32Imm);
5016 } IEM_MC_ENDIF();
5017 IEM_MC_END();
5018 }
5019 return VINF_SUCCESS;
5020}
5021
5022
5023/** Opcode 0x0f 0x8a. */
5024FNIEMOP_DEF(iemOp_jp_Jv)
5025{
5026 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5027 IEMOP_HLP_MIN_386();
5028 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5029 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5030 {
5031 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5033
5034 IEM_MC_BEGIN(0, 0);
5035 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5036 IEM_MC_REL_JMP_S16(i16Imm);
5037 } IEM_MC_ELSE() {
5038 IEM_MC_ADVANCE_RIP();
5039 } IEM_MC_ENDIF();
5040 IEM_MC_END();
5041 }
5042 else
5043 {
5044 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5046
5047 IEM_MC_BEGIN(0, 0);
5048 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5049 IEM_MC_REL_JMP_S32(i32Imm);
5050 } IEM_MC_ELSE() {
5051 IEM_MC_ADVANCE_RIP();
5052 } IEM_MC_ENDIF();
5053 IEM_MC_END();
5054 }
5055 return VINF_SUCCESS;
5056}
5057
5058
5059/** Opcode 0x0f 0x8b. */
5060FNIEMOP_DEF(iemOp_jnp_Jv)
5061{
5062 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5063 IEMOP_HLP_MIN_386();
5064 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5065 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5066 {
5067 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5069
5070 IEM_MC_BEGIN(0, 0);
5071 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5072 IEM_MC_ADVANCE_RIP();
5073 } IEM_MC_ELSE() {
5074 IEM_MC_REL_JMP_S16(i16Imm);
5075 } IEM_MC_ENDIF();
5076 IEM_MC_END();
5077 }
5078 else
5079 {
5080 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5082
5083 IEM_MC_BEGIN(0, 0);
5084 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5085 IEM_MC_ADVANCE_RIP();
5086 } IEM_MC_ELSE() {
5087 IEM_MC_REL_JMP_S32(i32Imm);
5088 } IEM_MC_ENDIF();
5089 IEM_MC_END();
5090 }
5091 return VINF_SUCCESS;
5092}
5093
5094
5095/** Opcode 0x0f 0x8c. */
5096FNIEMOP_DEF(iemOp_jl_Jv)
5097{
5098 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5099 IEMOP_HLP_MIN_386();
5100 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5101 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5102 {
5103 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5105
5106 IEM_MC_BEGIN(0, 0);
5107 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5108 IEM_MC_REL_JMP_S16(i16Imm);
5109 } IEM_MC_ELSE() {
5110 IEM_MC_ADVANCE_RIP();
5111 } IEM_MC_ENDIF();
5112 IEM_MC_END();
5113 }
5114 else
5115 {
5116 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5118
5119 IEM_MC_BEGIN(0, 0);
5120 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5121 IEM_MC_REL_JMP_S32(i32Imm);
5122 } IEM_MC_ELSE() {
5123 IEM_MC_ADVANCE_RIP();
5124 } IEM_MC_ENDIF();
5125 IEM_MC_END();
5126 }
5127 return VINF_SUCCESS;
5128}
5129
5130
5131/** Opcode 0x0f 0x8d. */
5132FNIEMOP_DEF(iemOp_jnl_Jv)
5133{
5134 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5135 IEMOP_HLP_MIN_386();
5136 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5137 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5138 {
5139 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5141
5142 IEM_MC_BEGIN(0, 0);
5143 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5144 IEM_MC_ADVANCE_RIP();
5145 } IEM_MC_ELSE() {
5146 IEM_MC_REL_JMP_S16(i16Imm);
5147 } IEM_MC_ENDIF();
5148 IEM_MC_END();
5149 }
5150 else
5151 {
5152 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5154
5155 IEM_MC_BEGIN(0, 0);
5156 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5157 IEM_MC_ADVANCE_RIP();
5158 } IEM_MC_ELSE() {
5159 IEM_MC_REL_JMP_S32(i32Imm);
5160 } IEM_MC_ENDIF();
5161 IEM_MC_END();
5162 }
5163 return VINF_SUCCESS;
5164}
5165
5166
5167/** Opcode 0x0f 0x8e. */
5168FNIEMOP_DEF(iemOp_jle_Jv)
5169{
5170 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5171 IEMOP_HLP_MIN_386();
5172 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5173 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5174 {
5175 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5177
5178 IEM_MC_BEGIN(0, 0);
5179 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5180 IEM_MC_REL_JMP_S16(i16Imm);
5181 } IEM_MC_ELSE() {
5182 IEM_MC_ADVANCE_RIP();
5183 } IEM_MC_ENDIF();
5184 IEM_MC_END();
5185 }
5186 else
5187 {
5188 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5190
5191 IEM_MC_BEGIN(0, 0);
5192 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5193 IEM_MC_REL_JMP_S32(i32Imm);
5194 } IEM_MC_ELSE() {
5195 IEM_MC_ADVANCE_RIP();
5196 } IEM_MC_ENDIF();
5197 IEM_MC_END();
5198 }
5199 return VINF_SUCCESS;
5200}
5201
5202
5203/** Opcode 0x0f 0x8f. */
5204FNIEMOP_DEF(iemOp_jnle_Jv)
5205{
5206 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5207 IEMOP_HLP_MIN_386();
5208 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5209 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5210 {
5211 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5213
5214 IEM_MC_BEGIN(0, 0);
5215 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5216 IEM_MC_ADVANCE_RIP();
5217 } IEM_MC_ELSE() {
5218 IEM_MC_REL_JMP_S16(i16Imm);
5219 } IEM_MC_ENDIF();
5220 IEM_MC_END();
5221 }
5222 else
5223 {
5224 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5226
5227 IEM_MC_BEGIN(0, 0);
5228 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5229 IEM_MC_ADVANCE_RIP();
5230 } IEM_MC_ELSE() {
5231 IEM_MC_REL_JMP_S32(i32Imm);
5232 } IEM_MC_ENDIF();
5233 IEM_MC_END();
5234 }
5235 return VINF_SUCCESS;
5236}
5237
5238
5239/** Opcode 0x0f 0x90. */
5240FNIEMOP_DEF(iemOp_seto_Eb)
5241{
5242 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5243 IEMOP_HLP_MIN_386();
5244 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5245
5246 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5247 * any way. AMD says it's "unused", whatever that means. We're
5248 * ignoring for now. */
5249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5250 {
5251 /* register target */
5252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5253 IEM_MC_BEGIN(0, 0);
5254 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5255 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5256 } IEM_MC_ELSE() {
5257 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5258 } IEM_MC_ENDIF();
5259 IEM_MC_ADVANCE_RIP();
5260 IEM_MC_END();
5261 }
5262 else
5263 {
5264 /* memory target */
5265 IEM_MC_BEGIN(0, 1);
5266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5269 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5270 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5271 } IEM_MC_ELSE() {
5272 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5273 } IEM_MC_ENDIF();
5274 IEM_MC_ADVANCE_RIP();
5275 IEM_MC_END();
5276 }
5277 return VINF_SUCCESS;
5278}
5279
5280
5281/** Opcode 0x0f 0x91. */
5282FNIEMOP_DEF(iemOp_setno_Eb)
5283{
5284 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5285 IEMOP_HLP_MIN_386();
5286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5287
5288 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5289 * any way. AMD says it's "unused", whatever that means. We're
5290 * ignoring for now. */
5291 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5292 {
5293 /* register target */
5294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5295 IEM_MC_BEGIN(0, 0);
5296 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5297 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5298 } IEM_MC_ELSE() {
5299 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5300 } IEM_MC_ENDIF();
5301 IEM_MC_ADVANCE_RIP();
5302 IEM_MC_END();
5303 }
5304 else
5305 {
5306 /* memory target */
5307 IEM_MC_BEGIN(0, 1);
5308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5309 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5311 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5312 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5313 } IEM_MC_ELSE() {
5314 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5315 } IEM_MC_ENDIF();
5316 IEM_MC_ADVANCE_RIP();
5317 IEM_MC_END();
5318 }
5319 return VINF_SUCCESS;
5320}
5321
5322
5323/** Opcode 0x0f 0x92. */
5324FNIEMOP_DEF(iemOp_setc_Eb)
5325{
5326 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5327 IEMOP_HLP_MIN_386();
5328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5329
5330 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5331 * any way. AMD says it's "unused", whatever that means. We're
5332 * ignoring for now. */
5333 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5334 {
5335 /* register target */
5336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5337 IEM_MC_BEGIN(0, 0);
5338 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5339 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5340 } IEM_MC_ELSE() {
5341 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5342 } IEM_MC_ENDIF();
5343 IEM_MC_ADVANCE_RIP();
5344 IEM_MC_END();
5345 }
5346 else
5347 {
5348 /* memory target */
5349 IEM_MC_BEGIN(0, 1);
5350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5353 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5354 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5355 } IEM_MC_ELSE() {
5356 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5357 } IEM_MC_ENDIF();
5358 IEM_MC_ADVANCE_RIP();
5359 IEM_MC_END();
5360 }
5361 return VINF_SUCCESS;
5362}
5363
5364
5365/** Opcode 0x0f 0x93. */
5366FNIEMOP_DEF(iemOp_setnc_Eb)
5367{
5368 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5369 IEMOP_HLP_MIN_386();
5370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5371
5372 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5373 * any way. AMD says it's "unused", whatever that means. We're
5374 * ignoring for now. */
5375 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5376 {
5377 /* register target */
5378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5379 IEM_MC_BEGIN(0, 0);
5380 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5381 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5382 } IEM_MC_ELSE() {
5383 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5384 } IEM_MC_ENDIF();
5385 IEM_MC_ADVANCE_RIP();
5386 IEM_MC_END();
5387 }
5388 else
5389 {
5390 /* memory target */
5391 IEM_MC_BEGIN(0, 1);
5392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5395 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5396 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5397 } IEM_MC_ELSE() {
5398 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5399 } IEM_MC_ENDIF();
5400 IEM_MC_ADVANCE_RIP();
5401 IEM_MC_END();
5402 }
5403 return VINF_SUCCESS;
5404}
5405
5406
5407/** Opcode 0x0f 0x94. */
5408FNIEMOP_DEF(iemOp_sete_Eb)
5409{
5410 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5411 IEMOP_HLP_MIN_386();
5412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5413
5414 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5415 * any way. AMD says it's "unused", whatever that means. We're
5416 * ignoring for now. */
5417 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5418 {
5419 /* register target */
5420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5421 IEM_MC_BEGIN(0, 0);
5422 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5423 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5424 } IEM_MC_ELSE() {
5425 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5426 } IEM_MC_ENDIF();
5427 IEM_MC_ADVANCE_RIP();
5428 IEM_MC_END();
5429 }
5430 else
5431 {
5432 /* memory target */
5433 IEM_MC_BEGIN(0, 1);
5434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5437 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5438 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5439 } IEM_MC_ELSE() {
5440 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5441 } IEM_MC_ENDIF();
5442 IEM_MC_ADVANCE_RIP();
5443 IEM_MC_END();
5444 }
5445 return VINF_SUCCESS;
5446}
5447
5448
5449/** Opcode 0x0f 0x95. */
5450FNIEMOP_DEF(iemOp_setne_Eb)
5451{
5452 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5453 IEMOP_HLP_MIN_386();
5454 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5455
5456 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5457 * any way. AMD says it's "unused", whatever that means. We're
5458 * ignoring for now. */
5459 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5460 {
5461 /* register target */
5462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5463 IEM_MC_BEGIN(0, 0);
5464 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5465 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5466 } IEM_MC_ELSE() {
5467 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5468 } IEM_MC_ENDIF();
5469 IEM_MC_ADVANCE_RIP();
5470 IEM_MC_END();
5471 }
5472 else
5473 {
5474 /* memory target */
5475 IEM_MC_BEGIN(0, 1);
5476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5479 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5480 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5481 } IEM_MC_ELSE() {
5482 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5483 } IEM_MC_ENDIF();
5484 IEM_MC_ADVANCE_RIP();
5485 IEM_MC_END();
5486 }
5487 return VINF_SUCCESS;
5488}
5489
5490
5491/** Opcode 0x0f 0x96. */
5492FNIEMOP_DEF(iemOp_setbe_Eb)
5493{
5494 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5495 IEMOP_HLP_MIN_386();
5496 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5497
5498 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5499 * any way. AMD says it's "unused", whatever that means. We're
5500 * ignoring for now. */
5501 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5502 {
5503 /* register target */
5504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5505 IEM_MC_BEGIN(0, 0);
5506 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5507 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5508 } IEM_MC_ELSE() {
5509 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5510 } IEM_MC_ENDIF();
5511 IEM_MC_ADVANCE_RIP();
5512 IEM_MC_END();
5513 }
5514 else
5515 {
5516 /* memory target */
5517 IEM_MC_BEGIN(0, 1);
5518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5521 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5522 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5523 } IEM_MC_ELSE() {
5524 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5525 } IEM_MC_ENDIF();
5526 IEM_MC_ADVANCE_RIP();
5527 IEM_MC_END();
5528 }
5529 return VINF_SUCCESS;
5530}
5531
5532
5533/** Opcode 0x0f 0x97. */
5534FNIEMOP_DEF(iemOp_setnbe_Eb)
5535{
5536 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5537 IEMOP_HLP_MIN_386();
5538 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5539
5540 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5541 * any way. AMD says it's "unused", whatever that means. We're
5542 * ignoring for now. */
5543 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5544 {
5545 /* register target */
5546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5547 IEM_MC_BEGIN(0, 0);
5548 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5549 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5550 } IEM_MC_ELSE() {
5551 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5552 } IEM_MC_ENDIF();
5553 IEM_MC_ADVANCE_RIP();
5554 IEM_MC_END();
5555 }
5556 else
5557 {
5558 /* memory target */
5559 IEM_MC_BEGIN(0, 1);
5560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5563 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5564 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5565 } IEM_MC_ELSE() {
5566 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5567 } IEM_MC_ENDIF();
5568 IEM_MC_ADVANCE_RIP();
5569 IEM_MC_END();
5570 }
5571 return VINF_SUCCESS;
5572}
5573
5574
5575/** Opcode 0x0f 0x98. */
5576FNIEMOP_DEF(iemOp_sets_Eb)
5577{
5578 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5579 IEMOP_HLP_MIN_386();
5580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5581
5582 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5583 * any way. AMD says it's "unused", whatever that means. We're
5584 * ignoring for now. */
5585 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5586 {
5587 /* register target */
5588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5589 IEM_MC_BEGIN(0, 0);
5590 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5591 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5592 } IEM_MC_ELSE() {
5593 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5594 } IEM_MC_ENDIF();
5595 IEM_MC_ADVANCE_RIP();
5596 IEM_MC_END();
5597 }
5598 else
5599 {
5600 /* memory target */
5601 IEM_MC_BEGIN(0, 1);
5602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5605 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5606 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5607 } IEM_MC_ELSE() {
5608 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5609 } IEM_MC_ENDIF();
5610 IEM_MC_ADVANCE_RIP();
5611 IEM_MC_END();
5612 }
5613 return VINF_SUCCESS;
5614}
5615
5616
5617/** Opcode 0x0f 0x99. */
5618FNIEMOP_DEF(iemOp_setns_Eb)
5619{
5620 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5621 IEMOP_HLP_MIN_386();
5622 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5623
5624 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5625 * any way. AMD says it's "unused", whatever that means. We're
5626 * ignoring for now. */
5627 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5628 {
5629 /* register target */
5630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5631 IEM_MC_BEGIN(0, 0);
5632 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5633 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5634 } IEM_MC_ELSE() {
5635 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5636 } IEM_MC_ENDIF();
5637 IEM_MC_ADVANCE_RIP();
5638 IEM_MC_END();
5639 }
5640 else
5641 {
5642 /* memory target */
5643 IEM_MC_BEGIN(0, 1);
5644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5647 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5648 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5649 } IEM_MC_ELSE() {
5650 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5651 } IEM_MC_ENDIF();
5652 IEM_MC_ADVANCE_RIP();
5653 IEM_MC_END();
5654 }
5655 return VINF_SUCCESS;
5656}
5657
5658
5659/** Opcode 0x0f 0x9a. */
5660FNIEMOP_DEF(iemOp_setp_Eb)
5661{
5662 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5663 IEMOP_HLP_MIN_386();
5664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5665
5666 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5667 * any way. AMD says it's "unused", whatever that means. We're
5668 * ignoring for now. */
5669 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5670 {
5671 /* register target */
5672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5673 IEM_MC_BEGIN(0, 0);
5674 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5675 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5676 } IEM_MC_ELSE() {
5677 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5678 } IEM_MC_ENDIF();
5679 IEM_MC_ADVANCE_RIP();
5680 IEM_MC_END();
5681 }
5682 else
5683 {
5684 /* memory target */
5685 IEM_MC_BEGIN(0, 1);
5686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5689 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5690 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5691 } IEM_MC_ELSE() {
5692 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5693 } IEM_MC_ENDIF();
5694 IEM_MC_ADVANCE_RIP();
5695 IEM_MC_END();
5696 }
5697 return VINF_SUCCESS;
5698}
5699
5700
5701/** Opcode 0x0f 0x9b. */
5702FNIEMOP_DEF(iemOp_setnp_Eb)
5703{
5704 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5705 IEMOP_HLP_MIN_386();
5706 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5707
5708 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5709 * any way. AMD says it's "unused", whatever that means. We're
5710 * ignoring for now. */
5711 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5712 {
5713 /* register target */
5714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5715 IEM_MC_BEGIN(0, 0);
5716 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5717 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5718 } IEM_MC_ELSE() {
5719 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5720 } IEM_MC_ENDIF();
5721 IEM_MC_ADVANCE_RIP();
5722 IEM_MC_END();
5723 }
5724 else
5725 {
5726 /* memory target */
5727 IEM_MC_BEGIN(0, 1);
5728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5731 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5732 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5733 } IEM_MC_ELSE() {
5734 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5735 } IEM_MC_ENDIF();
5736 IEM_MC_ADVANCE_RIP();
5737 IEM_MC_END();
5738 }
5739 return VINF_SUCCESS;
5740}
5741
5742
5743/** Opcode 0x0f 0x9c. */
5744FNIEMOP_DEF(iemOp_setl_Eb)
5745{
5746 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5747 IEMOP_HLP_MIN_386();
5748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5749
5750 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5751 * any way. AMD says it's "unused", whatever that means. We're
5752 * ignoring for now. */
5753 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5754 {
5755 /* register target */
5756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5757 IEM_MC_BEGIN(0, 0);
5758 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5759 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5760 } IEM_MC_ELSE() {
5761 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5762 } IEM_MC_ENDIF();
5763 IEM_MC_ADVANCE_RIP();
5764 IEM_MC_END();
5765 }
5766 else
5767 {
5768 /* memory target */
5769 IEM_MC_BEGIN(0, 1);
5770 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5773 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5774 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5775 } IEM_MC_ELSE() {
5776 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5777 } IEM_MC_ENDIF();
5778 IEM_MC_ADVANCE_RIP();
5779 IEM_MC_END();
5780 }
5781 return VINF_SUCCESS;
5782}
5783
5784
5785/** Opcode 0x0f 0x9d. */
5786FNIEMOP_DEF(iemOp_setnl_Eb)
5787{
5788 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5789 IEMOP_HLP_MIN_386();
5790 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5791
5792 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5793 * any way. AMD says it's "unused", whatever that means. We're
5794 * ignoring for now. */
5795 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5796 {
5797 /* register target */
5798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5799 IEM_MC_BEGIN(0, 0);
5800 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5801 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5802 } IEM_MC_ELSE() {
5803 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5804 } IEM_MC_ENDIF();
5805 IEM_MC_ADVANCE_RIP();
5806 IEM_MC_END();
5807 }
5808 else
5809 {
5810 /* memory target */
5811 IEM_MC_BEGIN(0, 1);
5812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5815 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5816 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5817 } IEM_MC_ELSE() {
5818 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5819 } IEM_MC_ENDIF();
5820 IEM_MC_ADVANCE_RIP();
5821 IEM_MC_END();
5822 }
5823 return VINF_SUCCESS;
5824}
5825
5826
5827/** Opcode 0x0f 0x9e. */
5828FNIEMOP_DEF(iemOp_setle_Eb)
5829{
5830 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5831 IEMOP_HLP_MIN_386();
5832 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5833
5834 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5835 * any way. AMD says it's "unused", whatever that means. We're
5836 * ignoring for now. */
5837 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5838 {
5839 /* register target */
5840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5841 IEM_MC_BEGIN(0, 0);
5842 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5843 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5844 } IEM_MC_ELSE() {
5845 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5846 } IEM_MC_ENDIF();
5847 IEM_MC_ADVANCE_RIP();
5848 IEM_MC_END();
5849 }
5850 else
5851 {
5852 /* memory target */
5853 IEM_MC_BEGIN(0, 1);
5854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5857 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5858 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5859 } IEM_MC_ELSE() {
5860 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5861 } IEM_MC_ENDIF();
5862 IEM_MC_ADVANCE_RIP();
5863 IEM_MC_END();
5864 }
5865 return VINF_SUCCESS;
5866}
5867
5868
5869/** Opcode 0x0f 0x9f. */
5870FNIEMOP_DEF(iemOp_setnle_Eb)
5871{
5872 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5873 IEMOP_HLP_MIN_386();
5874 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5875
5876 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5877 * any way. AMD says it's "unused", whatever that means. We're
5878 * ignoring for now. */
5879 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5880 {
5881 /* register target */
5882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5883 IEM_MC_BEGIN(0, 0);
5884 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5885 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5886 } IEM_MC_ELSE() {
5887 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5888 } IEM_MC_ENDIF();
5889 IEM_MC_ADVANCE_RIP();
5890 IEM_MC_END();
5891 }
5892 else
5893 {
5894 /* memory target */
5895 IEM_MC_BEGIN(0, 1);
5896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5899 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5900 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5901 } IEM_MC_ELSE() {
5902 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5903 } IEM_MC_ENDIF();
5904 IEM_MC_ADVANCE_RIP();
5905 IEM_MC_END();
5906 }
5907 return VINF_SUCCESS;
5908}
5909
5910
5911/**
5912 * Common 'push segment-register' helper.
5913 */
5914FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5915{
5916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5917 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5918 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5919
5920 switch (pVCpu->iem.s.enmEffOpSize)
5921 {
5922 case IEMMODE_16BIT:
5923 IEM_MC_BEGIN(0, 1);
5924 IEM_MC_LOCAL(uint16_t, u16Value);
5925 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5926 IEM_MC_PUSH_U16(u16Value);
5927 IEM_MC_ADVANCE_RIP();
5928 IEM_MC_END();
5929 break;
5930
5931 case IEMMODE_32BIT:
5932 IEM_MC_BEGIN(0, 1);
5933 IEM_MC_LOCAL(uint32_t, u32Value);
5934 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5935 IEM_MC_PUSH_U32_SREG(u32Value);
5936 IEM_MC_ADVANCE_RIP();
5937 IEM_MC_END();
5938 break;
5939
5940 case IEMMODE_64BIT:
5941 IEM_MC_BEGIN(0, 1);
5942 IEM_MC_LOCAL(uint64_t, u64Value);
5943 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5944 IEM_MC_PUSH_U64(u64Value);
5945 IEM_MC_ADVANCE_RIP();
5946 IEM_MC_END();
5947 break;
5948 }
5949
5950 return VINF_SUCCESS;
5951}
5952
5953
5954/** Opcode 0x0f 0xa0. */
5955FNIEMOP_DEF(iemOp_push_fs)
5956{
5957 IEMOP_MNEMONIC(push_fs, "push fs");
5958 IEMOP_HLP_MIN_386();
5959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5960 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5961}
5962
5963
5964/** Opcode 0x0f 0xa1. */
5965FNIEMOP_DEF(iemOp_pop_fs)
5966{
5967 IEMOP_MNEMONIC(pop_fs, "pop fs");
5968 IEMOP_HLP_MIN_386();
5969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5970 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5971}
5972
5973
5974/** Opcode 0x0f 0xa2. */
5975FNIEMOP_DEF(iemOp_cpuid)
5976{
5977 IEMOP_MNEMONIC(cpuid, "cpuid");
5978 IEMOP_HLP_MIN_486(); /* not all 486es. */
5979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5980 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5981}
5982
5983
5984/**
5985 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5986 * iemOp_bts_Ev_Gv.
5987 */
5988FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5989{
5990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5991 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5992
5993 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5994 {
5995 /* register destination. */
5996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5997 switch (pVCpu->iem.s.enmEffOpSize)
5998 {
5999 case IEMMODE_16BIT:
6000 IEM_MC_BEGIN(3, 0);
6001 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6002 IEM_MC_ARG(uint16_t, u16Src, 1);
6003 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6004
6005 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6006 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6007 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6008 IEM_MC_REF_EFLAGS(pEFlags);
6009 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6010
6011 IEM_MC_ADVANCE_RIP();
6012 IEM_MC_END();
6013 return VINF_SUCCESS;
6014
6015 case IEMMODE_32BIT:
6016 IEM_MC_BEGIN(3, 0);
6017 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6018 IEM_MC_ARG(uint32_t, u32Src, 1);
6019 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6020
6021 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6022 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6023 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6024 IEM_MC_REF_EFLAGS(pEFlags);
6025 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6026
6027 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6028 IEM_MC_ADVANCE_RIP();
6029 IEM_MC_END();
6030 return VINF_SUCCESS;
6031
6032 case IEMMODE_64BIT:
6033 IEM_MC_BEGIN(3, 0);
6034 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6035 IEM_MC_ARG(uint64_t, u64Src, 1);
6036 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6037
6038 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6039 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6040 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6041 IEM_MC_REF_EFLAGS(pEFlags);
6042 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6043
6044 IEM_MC_ADVANCE_RIP();
6045 IEM_MC_END();
6046 return VINF_SUCCESS;
6047
6048 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6049 }
6050 }
6051 else
6052 {
6053 /* memory destination. */
6054
6055 uint32_t fAccess;
6056 if (pImpl->pfnLockedU16)
6057 fAccess = IEM_ACCESS_DATA_RW;
6058 else /* BT */
6059 fAccess = IEM_ACCESS_DATA_R;
6060
6061 /** @todo test negative bit offsets! */
6062 switch (pVCpu->iem.s.enmEffOpSize)
6063 {
6064 case IEMMODE_16BIT:
6065 IEM_MC_BEGIN(3, 2);
6066 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6067 IEM_MC_ARG(uint16_t, u16Src, 1);
6068 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6069 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6070 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6071
6072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6073 if (pImpl->pfnLockedU16)
6074 IEMOP_HLP_DONE_DECODING();
6075 else
6076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6077 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6078 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6079 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6080 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6081 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6082 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6083 IEM_MC_FETCH_EFLAGS(EFlags);
6084
6085 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6086 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6087 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6088 else
6089 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6090 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6091
6092 IEM_MC_COMMIT_EFLAGS(EFlags);
6093 IEM_MC_ADVANCE_RIP();
6094 IEM_MC_END();
6095 return VINF_SUCCESS;
6096
6097 case IEMMODE_32BIT:
6098 IEM_MC_BEGIN(3, 2);
6099 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6100 IEM_MC_ARG(uint32_t, u32Src, 1);
6101 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6103 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6104
6105 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6106 if (pImpl->pfnLockedU16)
6107 IEMOP_HLP_DONE_DECODING();
6108 else
6109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6110 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6111 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6112 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6113 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6114 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6115 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6116 IEM_MC_FETCH_EFLAGS(EFlags);
6117
6118 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6119 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6120 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6121 else
6122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6123 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6124
6125 IEM_MC_COMMIT_EFLAGS(EFlags);
6126 IEM_MC_ADVANCE_RIP();
6127 IEM_MC_END();
6128 return VINF_SUCCESS;
6129
6130 case IEMMODE_64BIT:
6131 IEM_MC_BEGIN(3, 2);
6132 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6133 IEM_MC_ARG(uint64_t, u64Src, 1);
6134 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6136 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6137
6138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6139 if (pImpl->pfnLockedU16)
6140 IEMOP_HLP_DONE_DECODING();
6141 else
6142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6143 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6144 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6145 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6146 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6147 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6148 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6149 IEM_MC_FETCH_EFLAGS(EFlags);
6150
6151 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6152 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6153 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6154 else
6155 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6156 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6157
6158 IEM_MC_COMMIT_EFLAGS(EFlags);
6159 IEM_MC_ADVANCE_RIP();
6160 IEM_MC_END();
6161 return VINF_SUCCESS;
6162
6163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6164 }
6165 }
6166}
6167
6168
6169/** Opcode 0x0f 0xa3. */
6170FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6171{
6172 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6173 IEMOP_HLP_MIN_386();
6174 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6175}
6176
6177
6178/**
6179 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6180 */
6181FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6182{
6183 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6184 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6185
6186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6187 {
6188 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6190
6191 switch (pVCpu->iem.s.enmEffOpSize)
6192 {
6193 case IEMMODE_16BIT:
6194 IEM_MC_BEGIN(4, 0);
6195 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6196 IEM_MC_ARG(uint16_t, u16Src, 1);
6197 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6198 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6199
6200 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6201 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6202 IEM_MC_REF_EFLAGS(pEFlags);
6203 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6204
6205 IEM_MC_ADVANCE_RIP();
6206 IEM_MC_END();
6207 return VINF_SUCCESS;
6208
6209 case IEMMODE_32BIT:
6210 IEM_MC_BEGIN(4, 0);
6211 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6212 IEM_MC_ARG(uint32_t, u32Src, 1);
6213 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6214 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6215
6216 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6217 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6218 IEM_MC_REF_EFLAGS(pEFlags);
6219 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6220
6221 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6222 IEM_MC_ADVANCE_RIP();
6223 IEM_MC_END();
6224 return VINF_SUCCESS;
6225
6226 case IEMMODE_64BIT:
6227 IEM_MC_BEGIN(4, 0);
6228 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6229 IEM_MC_ARG(uint64_t, u64Src, 1);
6230 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6231 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6232
6233 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6234 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6235 IEM_MC_REF_EFLAGS(pEFlags);
6236 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6237
6238 IEM_MC_ADVANCE_RIP();
6239 IEM_MC_END();
6240 return VINF_SUCCESS;
6241
6242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6243 }
6244 }
6245 else
6246 {
6247 switch (pVCpu->iem.s.enmEffOpSize)
6248 {
6249 case IEMMODE_16BIT:
6250 IEM_MC_BEGIN(4, 2);
6251 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6252 IEM_MC_ARG(uint16_t, u16Src, 1);
6253 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6254 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6256
6257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6258 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6259 IEM_MC_ASSIGN(cShiftArg, cShift);
6260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6261 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6262 IEM_MC_FETCH_EFLAGS(EFlags);
6263 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6264 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6265
6266 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6267 IEM_MC_COMMIT_EFLAGS(EFlags);
6268 IEM_MC_ADVANCE_RIP();
6269 IEM_MC_END();
6270 return VINF_SUCCESS;
6271
6272 case IEMMODE_32BIT:
6273 IEM_MC_BEGIN(4, 2);
6274 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6275 IEM_MC_ARG(uint32_t, u32Src, 1);
6276 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6277 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6279
6280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6281 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6282 IEM_MC_ASSIGN(cShiftArg, cShift);
6283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6284 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6285 IEM_MC_FETCH_EFLAGS(EFlags);
6286 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6287 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6288
6289 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6290 IEM_MC_COMMIT_EFLAGS(EFlags);
6291 IEM_MC_ADVANCE_RIP();
6292 IEM_MC_END();
6293 return VINF_SUCCESS;
6294
6295 case IEMMODE_64BIT:
6296 IEM_MC_BEGIN(4, 2);
6297 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6298 IEM_MC_ARG(uint64_t, u64Src, 1);
6299 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6300 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6302
6303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6304 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6305 IEM_MC_ASSIGN(cShiftArg, cShift);
6306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6307 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6308 IEM_MC_FETCH_EFLAGS(EFlags);
6309 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6310 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6311
6312 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6313 IEM_MC_COMMIT_EFLAGS(EFlags);
6314 IEM_MC_ADVANCE_RIP();
6315 IEM_MC_END();
6316 return VINF_SUCCESS;
6317
6318 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6319 }
6320 }
6321}
6322
6323
6324/**
6325 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6326 */
6327FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6328{
6329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6330 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6331
6332 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6333 {
6334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6335
6336 switch (pVCpu->iem.s.enmEffOpSize)
6337 {
6338 case IEMMODE_16BIT:
6339 IEM_MC_BEGIN(4, 0);
6340 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6341 IEM_MC_ARG(uint16_t, u16Src, 1);
6342 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6343 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6344
6345 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6346 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6347 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6348 IEM_MC_REF_EFLAGS(pEFlags);
6349 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6350
6351 IEM_MC_ADVANCE_RIP();
6352 IEM_MC_END();
6353 return VINF_SUCCESS;
6354
6355 case IEMMODE_32BIT:
6356 IEM_MC_BEGIN(4, 0);
6357 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6358 IEM_MC_ARG(uint32_t, u32Src, 1);
6359 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6360 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6361
6362 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6363 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6364 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6365 IEM_MC_REF_EFLAGS(pEFlags);
6366 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6367
6368 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6369 IEM_MC_ADVANCE_RIP();
6370 IEM_MC_END();
6371 return VINF_SUCCESS;
6372
6373 case IEMMODE_64BIT:
6374 IEM_MC_BEGIN(4, 0);
6375 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6376 IEM_MC_ARG(uint64_t, u64Src, 1);
6377 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6378 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6379
6380 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6381 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6382 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6383 IEM_MC_REF_EFLAGS(pEFlags);
6384 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6385
6386 IEM_MC_ADVANCE_RIP();
6387 IEM_MC_END();
6388 return VINF_SUCCESS;
6389
6390 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6391 }
6392 }
6393 else
6394 {
6395 switch (pVCpu->iem.s.enmEffOpSize)
6396 {
6397 case IEMMODE_16BIT:
6398 IEM_MC_BEGIN(4, 2);
6399 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6400 IEM_MC_ARG(uint16_t, u16Src, 1);
6401 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6402 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6404
6405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6407 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6408 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6409 IEM_MC_FETCH_EFLAGS(EFlags);
6410 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6411 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6412
6413 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6414 IEM_MC_COMMIT_EFLAGS(EFlags);
6415 IEM_MC_ADVANCE_RIP();
6416 IEM_MC_END();
6417 return VINF_SUCCESS;
6418
6419 case IEMMODE_32BIT:
6420 IEM_MC_BEGIN(4, 2);
6421 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6422 IEM_MC_ARG(uint32_t, u32Src, 1);
6423 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6424 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6426
6427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6429 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6430 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6431 IEM_MC_FETCH_EFLAGS(EFlags);
6432 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6433 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6434
6435 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6436 IEM_MC_COMMIT_EFLAGS(EFlags);
6437 IEM_MC_ADVANCE_RIP();
6438 IEM_MC_END();
6439 return VINF_SUCCESS;
6440
6441 case IEMMODE_64BIT:
6442 IEM_MC_BEGIN(4, 2);
6443 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6444 IEM_MC_ARG(uint64_t, u64Src, 1);
6445 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6446 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6448
6449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6451 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6452 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6453 IEM_MC_FETCH_EFLAGS(EFlags);
6454 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6455 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6456
6457 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6458 IEM_MC_COMMIT_EFLAGS(EFlags);
6459 IEM_MC_ADVANCE_RIP();
6460 IEM_MC_END();
6461 return VINF_SUCCESS;
6462
6463 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6464 }
6465 }
6466}
6467
6468
6469
6470/** Opcode 0x0f 0xa4. */
6471FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6472{
6473 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6474 IEMOP_HLP_MIN_386();
6475 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6476}
6477
6478
6479/** Opcode 0x0f 0xa5. */
6480FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6481{
6482 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6483 IEMOP_HLP_MIN_386();
6484 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6485}
6486
6487
6488/** Opcode 0x0f 0xa8. */
6489FNIEMOP_DEF(iemOp_push_gs)
6490{
6491 IEMOP_MNEMONIC(push_gs, "push gs");
6492 IEMOP_HLP_MIN_386();
6493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6494 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6495}
6496
6497
6498/** Opcode 0x0f 0xa9. */
6499FNIEMOP_DEF(iemOp_pop_gs)
6500{
6501 IEMOP_MNEMONIC(pop_gs, "pop gs");
6502 IEMOP_HLP_MIN_386();
6503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6504 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6505}
6506
6507
6508/** Opcode 0x0f 0xaa. */
6509FNIEMOP_DEF(iemOp_rsm)
6510{
6511 IEMOP_MNEMONIC(rsm, "rsm");
6512 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
6513 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
6514 * intercept). */
6515 IEMOP_BITCH_ABOUT_STUB();
6516 return IEMOP_RAISE_INVALID_OPCODE();
6517}
6518
6519//IEMOP_HLP_MIN_386();
6520
6521
6522/** Opcode 0x0f 0xab. */
6523FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6524{
6525 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6526 IEMOP_HLP_MIN_386();
6527 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6528}
6529
6530
6531/** Opcode 0x0f 0xac. */
6532FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6533{
6534 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6535 IEMOP_HLP_MIN_386();
6536 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6537}
6538
6539
6540/** Opcode 0x0f 0xad. */
6541FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6542{
6543 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6544 IEMOP_HLP_MIN_386();
6545 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6546}
6547
6548
6549/** Opcode 0x0f 0xae mem/0. */
6550FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6551{
6552 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6553 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6554 return IEMOP_RAISE_INVALID_OPCODE();
6555
6556 IEM_MC_BEGIN(3, 1);
6557 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6558 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6559 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6562 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6563 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6564 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6565 IEM_MC_END();
6566 return VINF_SUCCESS;
6567}
6568
6569
6570/** Opcode 0x0f 0xae mem/1. */
6571FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6572{
6573 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6574 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6575 return IEMOP_RAISE_INVALID_OPCODE();
6576
6577 IEM_MC_BEGIN(3, 1);
6578 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6579 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6580 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6583 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6584 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6585 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6586 IEM_MC_END();
6587 return VINF_SUCCESS;
6588}
6589
6590
6591/**
6592 * @opmaps grp15
6593 * @opcode !11/2
6594 * @oppfx none
6595 * @opcpuid sse
6596 * @opgroup og_sse_mxcsrsm
6597 * @opxcpttype 5
6598 * @optest op1=0 -> mxcsr=0
6599 * @optest op1=0x2083 -> mxcsr=0x2083
6600 * @optest op1=0xfffffffe -> value.xcpt=0xd
6601 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6602 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6603 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6604 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6605 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6606 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6607 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6608 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6609 */
6610FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6611{
6612 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6613 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6614 return IEMOP_RAISE_INVALID_OPCODE();
6615
6616 IEM_MC_BEGIN(2, 0);
6617 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6618 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6621 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6622 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6623 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6624 IEM_MC_END();
6625 return VINF_SUCCESS;
6626}
6627
6628
6629/**
6630 * @opmaps grp15
6631 * @opcode !11/3
6632 * @oppfx none
6633 * @opcpuid sse
6634 * @opgroup og_sse_mxcsrsm
6635 * @opxcpttype 5
6636 * @optest mxcsr=0 -> op1=0
6637 * @optest mxcsr=0x2083 -> op1=0x2083
6638 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6639 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6640 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6641 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6642 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6643 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6644 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6645 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6646 */
6647FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6648{
6649 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6650 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6651 return IEMOP_RAISE_INVALID_OPCODE();
6652
6653 IEM_MC_BEGIN(2, 0);
6654 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6655 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6658 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6659 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6660 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6661 IEM_MC_END();
6662 return VINF_SUCCESS;
6663}
6664
6665
6666/**
6667 * @opmaps grp15
6668 * @opcode !11/4
6669 * @oppfx none
6670 * @opcpuid xsave
6671 * @opgroup og_system
6672 * @opxcpttype none
6673 */
6674FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6675{
6676 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6677 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6678 return IEMOP_RAISE_INVALID_OPCODE();
6679
6680 IEM_MC_BEGIN(3, 0);
6681 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6682 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6683 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6686 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6687 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6688 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6689 IEM_MC_END();
6690 return VINF_SUCCESS;
6691}
6692
6693
6694/**
6695 * @opmaps grp15
6696 * @opcode !11/5
6697 * @oppfx none
6698 * @opcpuid xsave
6699 * @opgroup og_system
6700 * @opxcpttype none
6701 */
6702FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6703{
6704 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6705 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6706 return IEMOP_RAISE_INVALID_OPCODE();
6707
6708 IEM_MC_BEGIN(3, 0);
6709 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6710 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6711 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6714 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6715 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6716 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6717 IEM_MC_END();
6718 return VINF_SUCCESS;
6719}
6720
6721/** Opcode 0x0f 0xae mem/6. */
6722FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6723
6724/**
6725 * @opmaps grp15
6726 * @opcode !11/7
6727 * @oppfx none
6728 * @opcpuid clfsh
6729 * @opgroup og_cachectl
6730 * @optest op1=1 ->
6731 */
6732FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6733{
6734 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6735 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6736 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6737
6738 IEM_MC_BEGIN(2, 0);
6739 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6740 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6743 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6744 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6745 IEM_MC_END();
6746 return VINF_SUCCESS;
6747}
6748
6749/**
6750 * @opmaps grp15
6751 * @opcode !11/7
6752 * @oppfx 0x66
6753 * @opcpuid clflushopt
6754 * @opgroup og_cachectl
6755 * @optest op1=1 ->
6756 */
6757FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6758{
6759 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6760 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6761 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6762
6763 IEM_MC_BEGIN(2, 0);
6764 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6765 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6768 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6769 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6770 IEM_MC_END();
6771 return VINF_SUCCESS;
6772}
6773
6774
6775/** Opcode 0x0f 0xae 11b/5. */
6776FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6777{
6778 RT_NOREF_PV(bRm);
6779 IEMOP_MNEMONIC(lfence, "lfence");
6780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6781 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6782 return IEMOP_RAISE_INVALID_OPCODE();
6783
6784 IEM_MC_BEGIN(0, 0);
6785 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6786 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6787 else
6788 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6789 IEM_MC_ADVANCE_RIP();
6790 IEM_MC_END();
6791 return VINF_SUCCESS;
6792}
6793
6794
6795/** Opcode 0x0f 0xae 11b/6. */
6796FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6797{
6798 RT_NOREF_PV(bRm);
6799 IEMOP_MNEMONIC(mfence, "mfence");
6800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6801 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6802 return IEMOP_RAISE_INVALID_OPCODE();
6803
6804 IEM_MC_BEGIN(0, 0);
6805 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6806 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6807 else
6808 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6809 IEM_MC_ADVANCE_RIP();
6810 IEM_MC_END();
6811 return VINF_SUCCESS;
6812}
6813
6814
6815/** Opcode 0x0f 0xae 11b/7. */
6816FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6817{
6818 RT_NOREF_PV(bRm);
6819 IEMOP_MNEMONIC(sfence, "sfence");
6820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6821 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6822 return IEMOP_RAISE_INVALID_OPCODE();
6823
6824 IEM_MC_BEGIN(0, 0);
6825 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6826 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6827 else
6828 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6829 IEM_MC_ADVANCE_RIP();
6830 IEM_MC_END();
6831 return VINF_SUCCESS;
6832}
6833
6834
6835/** Opcode 0xf3 0x0f 0xae 11b/0. */
6836FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6837
6838/** Opcode 0xf3 0x0f 0xae 11b/1. */
6839FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6840
6841/** Opcode 0xf3 0x0f 0xae 11b/2. */
6842FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6843
6844/** Opcode 0xf3 0x0f 0xae 11b/3. */
6845FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6846
6847
6848/**
6849 * Group 15 jump table for register variant.
6850 */
6851IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6852{ /* pfx: none, 066h, 0f3h, 0f2h */
6853 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6854 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6855 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6856 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6857 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6858 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6859 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6860 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6861};
6862AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6863
6864
6865/**
6866 * Group 15 jump table for memory variant.
6867 */
6868IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6869{ /* pfx: none, 066h, 0f3h, 0f2h */
6870 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6871 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6872 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6873 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6874 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6875 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6876 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6877 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6878};
6879AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6880
6881
6882/** Opcode 0x0f 0xae. */
6883FNIEMOP_DEF(iemOp_Grp15)
6884{
6885 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6886 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6887 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6888 /* register, register */
6889 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6890 + pVCpu->iem.s.idxPrefix], bRm);
6891 /* memory, register */
6892 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6893 + pVCpu->iem.s.idxPrefix], bRm);
6894}
6895
6896
6897/** Opcode 0x0f 0xaf. */
6898FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6899{
6900 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6901 IEMOP_HLP_MIN_386();
6902 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6903 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6904}
6905
6906
6907/** Opcode 0x0f 0xb0. */
6908FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6909{
6910 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6911 IEMOP_HLP_MIN_486();
6912 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6913
6914 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6915 {
6916 IEMOP_HLP_DONE_DECODING();
6917 IEM_MC_BEGIN(4, 0);
6918 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6919 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6920 IEM_MC_ARG(uint8_t, u8Src, 2);
6921 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6922
6923 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6924 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6925 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6926 IEM_MC_REF_EFLAGS(pEFlags);
6927 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6928 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6929 else
6930 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6931
6932 IEM_MC_ADVANCE_RIP();
6933 IEM_MC_END();
6934 }
6935 else
6936 {
6937 IEM_MC_BEGIN(4, 3);
6938 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6939 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6940 IEM_MC_ARG(uint8_t, u8Src, 2);
6941 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6943 IEM_MC_LOCAL(uint8_t, u8Al);
6944
6945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6946 IEMOP_HLP_DONE_DECODING();
6947 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6948 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6949 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6950 IEM_MC_FETCH_EFLAGS(EFlags);
6951 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6952 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6953 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6954 else
6955 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6956
6957 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6958 IEM_MC_COMMIT_EFLAGS(EFlags);
6959 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6960 IEM_MC_ADVANCE_RIP();
6961 IEM_MC_END();
6962 }
6963 return VINF_SUCCESS;
6964}
6965
6966/** Opcode 0x0f 0xb1. */
6967FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6968{
6969 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6970 IEMOP_HLP_MIN_486();
6971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6972
6973 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6974 {
6975 IEMOP_HLP_DONE_DECODING();
6976 switch (pVCpu->iem.s.enmEffOpSize)
6977 {
6978 case IEMMODE_16BIT:
6979 IEM_MC_BEGIN(4, 0);
6980 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6981 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6982 IEM_MC_ARG(uint16_t, u16Src, 2);
6983 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6984
6985 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6986 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6987 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6988 IEM_MC_REF_EFLAGS(pEFlags);
6989 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6990 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6991 else
6992 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6993
6994 IEM_MC_ADVANCE_RIP();
6995 IEM_MC_END();
6996 return VINF_SUCCESS;
6997
6998 case IEMMODE_32BIT:
6999 IEM_MC_BEGIN(4, 0);
7000 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7001 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7002 IEM_MC_ARG(uint32_t, u32Src, 2);
7003 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7004
7005 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7006 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7007 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7008 IEM_MC_REF_EFLAGS(pEFlags);
7009 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7010 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7011 else
7012 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7013
7014 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7015 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7016 IEM_MC_ADVANCE_RIP();
7017 IEM_MC_END();
7018 return VINF_SUCCESS;
7019
7020 case IEMMODE_64BIT:
7021 IEM_MC_BEGIN(4, 0);
7022 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7023 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7024#ifdef RT_ARCH_X86
7025 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7026#else
7027 IEM_MC_ARG(uint64_t, u64Src, 2);
7028#endif
7029 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7030
7031 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7032 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7033 IEM_MC_REF_EFLAGS(pEFlags);
7034#ifdef RT_ARCH_X86
7035 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7036 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7037 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7038 else
7039 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7040#else
7041 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7042 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7043 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7044 else
7045 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7046#endif
7047
7048 IEM_MC_ADVANCE_RIP();
7049 IEM_MC_END();
7050 return VINF_SUCCESS;
7051
7052 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7053 }
7054 }
7055 else
7056 {
7057 switch (pVCpu->iem.s.enmEffOpSize)
7058 {
7059 case IEMMODE_16BIT:
7060 IEM_MC_BEGIN(4, 3);
7061 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7062 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7063 IEM_MC_ARG(uint16_t, u16Src, 2);
7064 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7066 IEM_MC_LOCAL(uint16_t, u16Ax);
7067
7068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7069 IEMOP_HLP_DONE_DECODING();
7070 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7071 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7072 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7073 IEM_MC_FETCH_EFLAGS(EFlags);
7074 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7075 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7076 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7077 else
7078 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7079
7080 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7081 IEM_MC_COMMIT_EFLAGS(EFlags);
7082 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7083 IEM_MC_ADVANCE_RIP();
7084 IEM_MC_END();
7085 return VINF_SUCCESS;
7086
7087 case IEMMODE_32BIT:
7088 IEM_MC_BEGIN(4, 3);
7089 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7090 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7091 IEM_MC_ARG(uint32_t, u32Src, 2);
7092 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7094 IEM_MC_LOCAL(uint32_t, u32Eax);
7095
7096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7097 IEMOP_HLP_DONE_DECODING();
7098 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7099 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7100 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7101 IEM_MC_FETCH_EFLAGS(EFlags);
7102 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7103 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7104 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7105 else
7106 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7107
7108 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7109 IEM_MC_COMMIT_EFLAGS(EFlags);
7110 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7111 IEM_MC_ADVANCE_RIP();
7112 IEM_MC_END();
7113 return VINF_SUCCESS;
7114
7115 case IEMMODE_64BIT:
7116 IEM_MC_BEGIN(4, 3);
7117 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7118 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7119#ifdef RT_ARCH_X86
7120 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7121#else
7122 IEM_MC_ARG(uint64_t, u64Src, 2);
7123#endif
7124 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7126 IEM_MC_LOCAL(uint64_t, u64Rax);
7127
7128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7129 IEMOP_HLP_DONE_DECODING();
7130 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7131 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7132 IEM_MC_FETCH_EFLAGS(EFlags);
7133 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7134#ifdef RT_ARCH_X86
7135 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7136 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7137 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7138 else
7139 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7140#else
7141 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7142 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7143 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7144 else
7145 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7146#endif
7147
7148 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7149 IEM_MC_COMMIT_EFLAGS(EFlags);
7150 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7151 IEM_MC_ADVANCE_RIP();
7152 IEM_MC_END();
7153 return VINF_SUCCESS;
7154
7155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7156 }
7157 }
7158}
7159
7160
7161FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7162{
7163 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7164 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7165
7166 switch (pVCpu->iem.s.enmEffOpSize)
7167 {
7168 case IEMMODE_16BIT:
7169 IEM_MC_BEGIN(5, 1);
7170 IEM_MC_ARG(uint16_t, uSel, 0);
7171 IEM_MC_ARG(uint16_t, offSeg, 1);
7172 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7173 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7174 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7175 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7178 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7179 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7180 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7181 IEM_MC_END();
7182 return VINF_SUCCESS;
7183
7184 case IEMMODE_32BIT:
7185 IEM_MC_BEGIN(5, 1);
7186 IEM_MC_ARG(uint16_t, uSel, 0);
7187 IEM_MC_ARG(uint32_t, offSeg, 1);
7188 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7189 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7190 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7191 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7194 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7195 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7196 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7197 IEM_MC_END();
7198 return VINF_SUCCESS;
7199
7200 case IEMMODE_64BIT:
7201 IEM_MC_BEGIN(5, 1);
7202 IEM_MC_ARG(uint16_t, uSel, 0);
7203 IEM_MC_ARG(uint64_t, offSeg, 1);
7204 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7205 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7206 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7207 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7210 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7211 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7212 else
7213 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7214 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7215 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7216 IEM_MC_END();
7217 return VINF_SUCCESS;
7218
7219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7220 }
7221}
7222
7223
7224/** Opcode 0x0f 0xb2. */
7225FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7226{
7227 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7228 IEMOP_HLP_MIN_386();
7229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7230 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7231 return IEMOP_RAISE_INVALID_OPCODE();
7232 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7233}
7234
7235
7236/** Opcode 0x0f 0xb3. */
7237FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7238{
7239 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7240 IEMOP_HLP_MIN_386();
7241 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7242}
7243
7244
7245/** Opcode 0x0f 0xb4. */
7246FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7247{
7248 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7249 IEMOP_HLP_MIN_386();
7250 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7251 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7252 return IEMOP_RAISE_INVALID_OPCODE();
7253 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7254}
7255
7256
7257/** Opcode 0x0f 0xb5. */
7258FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7259{
7260 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7261 IEMOP_HLP_MIN_386();
7262 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7263 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7264 return IEMOP_RAISE_INVALID_OPCODE();
7265 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7266}
7267
7268
7269/** Opcode 0x0f 0xb6. */
7270FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7271{
7272 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7273 IEMOP_HLP_MIN_386();
7274
7275 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7276
7277 /*
7278 * If rm is denoting a register, no more instruction bytes.
7279 */
7280 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7281 {
7282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7283 switch (pVCpu->iem.s.enmEffOpSize)
7284 {
7285 case IEMMODE_16BIT:
7286 IEM_MC_BEGIN(0, 1);
7287 IEM_MC_LOCAL(uint16_t, u16Value);
7288 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7289 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7290 IEM_MC_ADVANCE_RIP();
7291 IEM_MC_END();
7292 return VINF_SUCCESS;
7293
7294 case IEMMODE_32BIT:
7295 IEM_MC_BEGIN(0, 1);
7296 IEM_MC_LOCAL(uint32_t, u32Value);
7297 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7298 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7299 IEM_MC_ADVANCE_RIP();
7300 IEM_MC_END();
7301 return VINF_SUCCESS;
7302
7303 case IEMMODE_64BIT:
7304 IEM_MC_BEGIN(0, 1);
7305 IEM_MC_LOCAL(uint64_t, u64Value);
7306 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7307 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7308 IEM_MC_ADVANCE_RIP();
7309 IEM_MC_END();
7310 return VINF_SUCCESS;
7311
7312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7313 }
7314 }
7315 else
7316 {
7317 /*
7318 * We're loading a register from memory.
7319 */
7320 switch (pVCpu->iem.s.enmEffOpSize)
7321 {
7322 case IEMMODE_16BIT:
7323 IEM_MC_BEGIN(0, 2);
7324 IEM_MC_LOCAL(uint16_t, u16Value);
7325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7328 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7329 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7330 IEM_MC_ADVANCE_RIP();
7331 IEM_MC_END();
7332 return VINF_SUCCESS;
7333
7334 case IEMMODE_32BIT:
7335 IEM_MC_BEGIN(0, 2);
7336 IEM_MC_LOCAL(uint32_t, u32Value);
7337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7340 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7341 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7342 IEM_MC_ADVANCE_RIP();
7343 IEM_MC_END();
7344 return VINF_SUCCESS;
7345
7346 case IEMMODE_64BIT:
7347 IEM_MC_BEGIN(0, 2);
7348 IEM_MC_LOCAL(uint64_t, u64Value);
7349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7352 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7353 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7354 IEM_MC_ADVANCE_RIP();
7355 IEM_MC_END();
7356 return VINF_SUCCESS;
7357
7358 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7359 }
7360 }
7361}
7362
7363
7364/** Opcode 0x0f 0xb7. */
7365FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7366{
7367 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7368 IEMOP_HLP_MIN_386();
7369
7370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7371
7372 /** @todo Not entirely sure how the operand size prefix is handled here,
7373 * assuming that it will be ignored. Would be nice to have a few
7374 * test for this. */
7375 /*
7376 * If rm is denoting a register, no more instruction bytes.
7377 */
7378 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7379 {
7380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7381 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7382 {
7383 IEM_MC_BEGIN(0, 1);
7384 IEM_MC_LOCAL(uint32_t, u32Value);
7385 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7386 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7387 IEM_MC_ADVANCE_RIP();
7388 IEM_MC_END();
7389 }
7390 else
7391 {
7392 IEM_MC_BEGIN(0, 1);
7393 IEM_MC_LOCAL(uint64_t, u64Value);
7394 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7395 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7396 IEM_MC_ADVANCE_RIP();
7397 IEM_MC_END();
7398 }
7399 }
7400 else
7401 {
7402 /*
7403 * We're loading a register from memory.
7404 */
7405 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7406 {
7407 IEM_MC_BEGIN(0, 2);
7408 IEM_MC_LOCAL(uint32_t, u32Value);
7409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7412 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7413 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7414 IEM_MC_ADVANCE_RIP();
7415 IEM_MC_END();
7416 }
7417 else
7418 {
7419 IEM_MC_BEGIN(0, 2);
7420 IEM_MC_LOCAL(uint64_t, u64Value);
7421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7424 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7425 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7426 IEM_MC_ADVANCE_RIP();
7427 IEM_MC_END();
7428 }
7429 }
7430 return VINF_SUCCESS;
7431}
7432
7433
7434/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7435FNIEMOP_UD_STUB(iemOp_jmpe);
7436/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7437FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7438
7439
7440/**
7441 * @opcode 0xb9
7442 * @opinvalid intel-modrm
7443 * @optest ->
7444 */
7445FNIEMOP_DEF(iemOp_Grp10)
7446{
7447 /*
7448 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7449 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7450 */
7451 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7452 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7453 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7454}
7455
7456
7457/** Opcode 0x0f 0xba. */
7458FNIEMOP_DEF(iemOp_Grp8)
7459{
7460 IEMOP_HLP_MIN_386();
7461 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7462 PCIEMOPBINSIZES pImpl;
7463 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7464 {
7465 case 0: case 1: case 2: case 3:
7466 /* Both AMD and Intel want full modr/m decoding and imm8. */
7467 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7468 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7469 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7470 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7471 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7472 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7473 }
7474 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7475
7476 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7477 {
7478 /* register destination. */
7479 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7481
7482 switch (pVCpu->iem.s.enmEffOpSize)
7483 {
7484 case IEMMODE_16BIT:
7485 IEM_MC_BEGIN(3, 0);
7486 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7487 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7488 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7489
7490 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7491 IEM_MC_REF_EFLAGS(pEFlags);
7492 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7493
7494 IEM_MC_ADVANCE_RIP();
7495 IEM_MC_END();
7496 return VINF_SUCCESS;
7497
7498 case IEMMODE_32BIT:
7499 IEM_MC_BEGIN(3, 0);
7500 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7501 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7502 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7503
7504 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7505 IEM_MC_REF_EFLAGS(pEFlags);
7506 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7507
7508 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7509 IEM_MC_ADVANCE_RIP();
7510 IEM_MC_END();
7511 return VINF_SUCCESS;
7512
7513 case IEMMODE_64BIT:
7514 IEM_MC_BEGIN(3, 0);
7515 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7516 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7517 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7518
7519 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7520 IEM_MC_REF_EFLAGS(pEFlags);
7521 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7522
7523 IEM_MC_ADVANCE_RIP();
7524 IEM_MC_END();
7525 return VINF_SUCCESS;
7526
7527 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7528 }
7529 }
7530 else
7531 {
7532 /* memory destination. */
7533
7534 uint32_t fAccess;
7535 if (pImpl->pfnLockedU16)
7536 fAccess = IEM_ACCESS_DATA_RW;
7537 else /* BT */
7538 fAccess = IEM_ACCESS_DATA_R;
7539
7540 /** @todo test negative bit offsets! */
7541 switch (pVCpu->iem.s.enmEffOpSize)
7542 {
7543 case IEMMODE_16BIT:
7544 IEM_MC_BEGIN(3, 1);
7545 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7546 IEM_MC_ARG(uint16_t, u16Src, 1);
7547 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7549
7550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7551 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7552 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7553 if (pImpl->pfnLockedU16)
7554 IEMOP_HLP_DONE_DECODING();
7555 else
7556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7557 IEM_MC_FETCH_EFLAGS(EFlags);
7558 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7559 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7560 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7561 else
7562 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7563 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7564
7565 IEM_MC_COMMIT_EFLAGS(EFlags);
7566 IEM_MC_ADVANCE_RIP();
7567 IEM_MC_END();
7568 return VINF_SUCCESS;
7569
7570 case IEMMODE_32BIT:
7571 IEM_MC_BEGIN(3, 1);
7572 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7573 IEM_MC_ARG(uint32_t, u32Src, 1);
7574 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7576
7577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7578 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7579 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7580 if (pImpl->pfnLockedU16)
7581 IEMOP_HLP_DONE_DECODING();
7582 else
7583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7584 IEM_MC_FETCH_EFLAGS(EFlags);
7585 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7586 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7587 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7588 else
7589 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7590 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7591
7592 IEM_MC_COMMIT_EFLAGS(EFlags);
7593 IEM_MC_ADVANCE_RIP();
7594 IEM_MC_END();
7595 return VINF_SUCCESS;
7596
7597 case IEMMODE_64BIT:
7598 IEM_MC_BEGIN(3, 1);
7599 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7600 IEM_MC_ARG(uint64_t, u64Src, 1);
7601 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7603
7604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7605 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7606 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7607 if (pImpl->pfnLockedU16)
7608 IEMOP_HLP_DONE_DECODING();
7609 else
7610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7611 IEM_MC_FETCH_EFLAGS(EFlags);
7612 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7613 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7614 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7615 else
7616 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7617 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7618
7619 IEM_MC_COMMIT_EFLAGS(EFlags);
7620 IEM_MC_ADVANCE_RIP();
7621 IEM_MC_END();
7622 return VINF_SUCCESS;
7623
7624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7625 }
7626 }
7627}
7628
7629
7630/** Opcode 0x0f 0xbb. */
7631FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7632{
7633 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7634 IEMOP_HLP_MIN_386();
7635 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7636}
7637
7638
7639/** Opcode 0x0f 0xbc. */
7640FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7641{
7642 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7643 IEMOP_HLP_MIN_386();
7644 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7645 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7646}
7647
7648
7649/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7650FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7651
7652
7653/** Opcode 0x0f 0xbd. */
7654FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7655{
7656 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7657 IEMOP_HLP_MIN_386();
7658 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7659 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7660}
7661
7662
7663/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7664FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7665
7666
7667/** Opcode 0x0f 0xbe. */
7668FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7669{
7670 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7671 IEMOP_HLP_MIN_386();
7672
7673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7674
7675 /*
7676 * If rm is denoting a register, no more instruction bytes.
7677 */
7678 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7679 {
7680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7681 switch (pVCpu->iem.s.enmEffOpSize)
7682 {
7683 case IEMMODE_16BIT:
7684 IEM_MC_BEGIN(0, 1);
7685 IEM_MC_LOCAL(uint16_t, u16Value);
7686 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7687 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7688 IEM_MC_ADVANCE_RIP();
7689 IEM_MC_END();
7690 return VINF_SUCCESS;
7691
7692 case IEMMODE_32BIT:
7693 IEM_MC_BEGIN(0, 1);
7694 IEM_MC_LOCAL(uint32_t, u32Value);
7695 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7696 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7697 IEM_MC_ADVANCE_RIP();
7698 IEM_MC_END();
7699 return VINF_SUCCESS;
7700
7701 case IEMMODE_64BIT:
7702 IEM_MC_BEGIN(0, 1);
7703 IEM_MC_LOCAL(uint64_t, u64Value);
7704 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7705 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7706 IEM_MC_ADVANCE_RIP();
7707 IEM_MC_END();
7708 return VINF_SUCCESS;
7709
7710 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7711 }
7712 }
7713 else
7714 {
7715 /*
7716 * We're loading a register from memory.
7717 */
7718 switch (pVCpu->iem.s.enmEffOpSize)
7719 {
7720 case IEMMODE_16BIT:
7721 IEM_MC_BEGIN(0, 2);
7722 IEM_MC_LOCAL(uint16_t, u16Value);
7723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7726 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7727 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7728 IEM_MC_ADVANCE_RIP();
7729 IEM_MC_END();
7730 return VINF_SUCCESS;
7731
7732 case IEMMODE_32BIT:
7733 IEM_MC_BEGIN(0, 2);
7734 IEM_MC_LOCAL(uint32_t, u32Value);
7735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7738 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7739 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7740 IEM_MC_ADVANCE_RIP();
7741 IEM_MC_END();
7742 return VINF_SUCCESS;
7743
7744 case IEMMODE_64BIT:
7745 IEM_MC_BEGIN(0, 2);
7746 IEM_MC_LOCAL(uint64_t, u64Value);
7747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7750 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7751 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7752 IEM_MC_ADVANCE_RIP();
7753 IEM_MC_END();
7754 return VINF_SUCCESS;
7755
7756 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7757 }
7758 }
7759}
7760
7761
7762/** Opcode 0x0f 0xbf. */
7763FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7764{
7765 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7766 IEMOP_HLP_MIN_386();
7767
7768 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7769
7770 /** @todo Not entirely sure how the operand size prefix is handled here,
7771 * assuming that it will be ignored. Would be nice to have a few
7772 * test for this. */
7773 /*
7774 * If rm is denoting a register, no more instruction bytes.
7775 */
7776 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7777 {
7778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7779 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7780 {
7781 IEM_MC_BEGIN(0, 1);
7782 IEM_MC_LOCAL(uint32_t, u32Value);
7783 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7784 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7785 IEM_MC_ADVANCE_RIP();
7786 IEM_MC_END();
7787 }
7788 else
7789 {
7790 IEM_MC_BEGIN(0, 1);
7791 IEM_MC_LOCAL(uint64_t, u64Value);
7792 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7793 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7794 IEM_MC_ADVANCE_RIP();
7795 IEM_MC_END();
7796 }
7797 }
7798 else
7799 {
7800 /*
7801 * We're loading a register from memory.
7802 */
7803 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7804 {
7805 IEM_MC_BEGIN(0, 2);
7806 IEM_MC_LOCAL(uint32_t, u32Value);
7807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7810 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7811 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7812 IEM_MC_ADVANCE_RIP();
7813 IEM_MC_END();
7814 }
7815 else
7816 {
7817 IEM_MC_BEGIN(0, 2);
7818 IEM_MC_LOCAL(uint64_t, u64Value);
7819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7822 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7823 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7824 IEM_MC_ADVANCE_RIP();
7825 IEM_MC_END();
7826 }
7827 }
7828 return VINF_SUCCESS;
7829}
7830
7831
7832/** Opcode 0x0f 0xc0. */
7833FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7834{
7835 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7836 IEMOP_HLP_MIN_486();
7837 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7838
7839 /*
7840 * If rm is denoting a register, no more instruction bytes.
7841 */
7842 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7843 {
7844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7845
7846 IEM_MC_BEGIN(3, 0);
7847 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7848 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7849 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7850
7851 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7852 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7853 IEM_MC_REF_EFLAGS(pEFlags);
7854 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7855
7856 IEM_MC_ADVANCE_RIP();
7857 IEM_MC_END();
7858 }
7859 else
7860 {
7861 /*
7862 * We're accessing memory.
7863 */
7864 IEM_MC_BEGIN(3, 3);
7865 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7866 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7867 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7868 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7870
7871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7872 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7873 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7874 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7875 IEM_MC_FETCH_EFLAGS(EFlags);
7876 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7877 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7878 else
7879 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7880
7881 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7882 IEM_MC_COMMIT_EFLAGS(EFlags);
7883 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7884 IEM_MC_ADVANCE_RIP();
7885 IEM_MC_END();
7886 return VINF_SUCCESS;
7887 }
7888 return VINF_SUCCESS;
7889}
7890
7891
7892/** Opcode 0x0f 0xc1. */
7893FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7894{
7895 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7896 IEMOP_HLP_MIN_486();
7897 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7898
7899 /*
7900 * If rm is denoting a register, no more instruction bytes.
7901 */
7902 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7903 {
7904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7905
7906 switch (pVCpu->iem.s.enmEffOpSize)
7907 {
7908 case IEMMODE_16BIT:
7909 IEM_MC_BEGIN(3, 0);
7910 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7911 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7912 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7913
7914 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7915 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7916 IEM_MC_REF_EFLAGS(pEFlags);
7917 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7918
7919 IEM_MC_ADVANCE_RIP();
7920 IEM_MC_END();
7921 return VINF_SUCCESS;
7922
7923 case IEMMODE_32BIT:
7924 IEM_MC_BEGIN(3, 0);
7925 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7926 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7927 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7928
7929 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7930 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7931 IEM_MC_REF_EFLAGS(pEFlags);
7932 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7933
7934 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7935 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7936 IEM_MC_ADVANCE_RIP();
7937 IEM_MC_END();
7938 return VINF_SUCCESS;
7939
7940 case IEMMODE_64BIT:
7941 IEM_MC_BEGIN(3, 0);
7942 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7943 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7944 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7945
7946 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7947 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7948 IEM_MC_REF_EFLAGS(pEFlags);
7949 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7950
7951 IEM_MC_ADVANCE_RIP();
7952 IEM_MC_END();
7953 return VINF_SUCCESS;
7954
7955 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7956 }
7957 }
7958 else
7959 {
7960 /*
7961 * We're accessing memory.
7962 */
7963 switch (pVCpu->iem.s.enmEffOpSize)
7964 {
7965 case IEMMODE_16BIT:
7966 IEM_MC_BEGIN(3, 3);
7967 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7968 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7969 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7970 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7972
7973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7974 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7975 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7976 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7977 IEM_MC_FETCH_EFLAGS(EFlags);
7978 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7979 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7980 else
7981 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7982
7983 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7984 IEM_MC_COMMIT_EFLAGS(EFlags);
7985 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7986 IEM_MC_ADVANCE_RIP();
7987 IEM_MC_END();
7988 return VINF_SUCCESS;
7989
7990 case IEMMODE_32BIT:
7991 IEM_MC_BEGIN(3, 3);
7992 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7993 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7994 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7995 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7996 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7997
7998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7999 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8000 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8001 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8002 IEM_MC_FETCH_EFLAGS(EFlags);
8003 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8004 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8005 else
8006 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8007
8008 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8009 IEM_MC_COMMIT_EFLAGS(EFlags);
8010 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8011 IEM_MC_ADVANCE_RIP();
8012 IEM_MC_END();
8013 return VINF_SUCCESS;
8014
8015 case IEMMODE_64BIT:
8016 IEM_MC_BEGIN(3, 3);
8017 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8018 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8019 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8020 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8022
8023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8024 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8025 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8026 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8027 IEM_MC_FETCH_EFLAGS(EFlags);
8028 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8029 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8030 else
8031 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8032
8033 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8034 IEM_MC_COMMIT_EFLAGS(EFlags);
8035 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8036 IEM_MC_ADVANCE_RIP();
8037 IEM_MC_END();
8038 return VINF_SUCCESS;
8039
8040 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8041 }
8042 }
8043}
8044
8045
8046/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8047FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8048/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8049FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8050/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8051FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8052/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8053FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8054
8055
8056/** Opcode 0x0f 0xc3. */
8057FNIEMOP_DEF(iemOp_movnti_My_Gy)
8058{
8059 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8060
8061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8062
8063 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8064 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8065 {
8066 switch (pVCpu->iem.s.enmEffOpSize)
8067 {
8068 case IEMMODE_32BIT:
8069 IEM_MC_BEGIN(0, 2);
8070 IEM_MC_LOCAL(uint32_t, u32Value);
8071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8072
8073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8075 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8076 return IEMOP_RAISE_INVALID_OPCODE();
8077
8078 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8079 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8080 IEM_MC_ADVANCE_RIP();
8081 IEM_MC_END();
8082 break;
8083
8084 case IEMMODE_64BIT:
8085 IEM_MC_BEGIN(0, 2);
8086 IEM_MC_LOCAL(uint64_t, u64Value);
8087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8088
8089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8091 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8092 return IEMOP_RAISE_INVALID_OPCODE();
8093
8094 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8095 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8096 IEM_MC_ADVANCE_RIP();
8097 IEM_MC_END();
8098 break;
8099
8100 case IEMMODE_16BIT:
8101 /** @todo check this form. */
8102 return IEMOP_RAISE_INVALID_OPCODE();
8103 }
8104 }
8105 else
8106 return IEMOP_RAISE_INVALID_OPCODE();
8107 return VINF_SUCCESS;
8108}
8109/* Opcode 0x66 0x0f 0xc3 - invalid */
8110/* Opcode 0xf3 0x0f 0xc3 - invalid */
8111/* Opcode 0xf2 0x0f 0xc3 - invalid */
8112
8113/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8114FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8115/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8116FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8117/* Opcode 0xf3 0x0f 0xc4 - invalid */
8118/* Opcode 0xf2 0x0f 0xc4 - invalid */
8119
8120/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8121FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8122/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8123FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8124/* Opcode 0xf3 0x0f 0xc5 - invalid */
8125/* Opcode 0xf2 0x0f 0xc5 - invalid */
8126
8127/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8128FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8129/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8130FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8131/* Opcode 0xf3 0x0f 0xc6 - invalid */
8132/* Opcode 0xf2 0x0f 0xc6 - invalid */
8133
8134
8135/** Opcode 0x0f 0xc7 !11/1. */
8136FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8137{
8138 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8139
8140 IEM_MC_BEGIN(4, 3);
8141 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8142 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8143 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8144 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8145 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8146 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8148
8149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8150 IEMOP_HLP_DONE_DECODING();
8151 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8152
8153 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8154 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8155 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8156
8157 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8158 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8159 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8160
8161 IEM_MC_FETCH_EFLAGS(EFlags);
8162 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8163 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8164 else
8165 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8166
8167 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8168 IEM_MC_COMMIT_EFLAGS(EFlags);
8169 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8170 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8171 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8172 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8173 IEM_MC_ENDIF();
8174 IEM_MC_ADVANCE_RIP();
8175
8176 IEM_MC_END();
8177 return VINF_SUCCESS;
8178}
8179
8180
8181/** Opcode REX.W 0x0f 0xc7 !11/1. */
8182FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8183{
8184 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8185 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8186 {
8187#if 0
8188 RT_NOREF(bRm);
8189 IEMOP_BITCH_ABOUT_STUB();
8190 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8191#else
8192 IEM_MC_BEGIN(4, 3);
8193 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8194 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8195 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8196 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8197 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8198 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8200
8201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8202 IEMOP_HLP_DONE_DECODING();
8203 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8204 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8205
8206 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8207 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8208 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8209
8210 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8211 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8212 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8213
8214 IEM_MC_FETCH_EFLAGS(EFlags);
8215# ifdef RT_ARCH_AMD64
8216 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8217 {
8218 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8219 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8220 else
8221 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8222 }
8223 else
8224# endif
8225 {
8226 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8227 accesses and not all all atomic, which works fine on in UNI CPU guest
8228 configuration (ignoring DMA). If guest SMP is active we have no choice
8229 but to use a rendezvous callback here. Sigh. */
8230 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8231 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8232 else
8233 {
8234 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8235 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8236 }
8237 }
8238
8239 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8240 IEM_MC_COMMIT_EFLAGS(EFlags);
8241 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8242 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8243 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8244 IEM_MC_ENDIF();
8245 IEM_MC_ADVANCE_RIP();
8246
8247 IEM_MC_END();
8248 return VINF_SUCCESS;
8249#endif
8250 }
8251 Log(("cmpxchg16b -> #UD\n"));
8252 return IEMOP_RAISE_INVALID_OPCODE();
8253}
8254
8255FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8256{
8257 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8258 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8259 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8260}
8261
8262/** Opcode 0x0f 0xc7 11/6. */
8263FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8264
8265/** Opcode 0x0f 0xc7 !11/6. */
8266FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8267
8268/** Opcode 0x66 0x0f 0xc7 !11/6. */
8269FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8270
8271/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8272FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8273
8274/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8275FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8276
8277/** Opcode 0x0f 0xc7 11/7. */
8278FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8279
8280
8281/**
8282 * Group 9 jump table for register variant.
8283 */
8284IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8285{ /* pfx: none, 066h, 0f3h, 0f2h */
8286 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8287 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8288 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8289 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8290 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8291 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8292 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8293 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8294};
8295AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8296
8297
8298/**
8299 * Group 9 jump table for memory variant.
8300 */
8301IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8302{ /* pfx: none, 066h, 0f3h, 0f2h */
8303 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8304 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8305 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8306 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8307 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8308 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8309 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8310 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8311};
8312AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8313
8314
8315/** Opcode 0x0f 0xc7. */
8316FNIEMOP_DEF(iemOp_Grp9)
8317{
8318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8319 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8320 /* register, register */
8321 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8322 + pVCpu->iem.s.idxPrefix], bRm);
8323 /* memory, register */
8324 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8325 + pVCpu->iem.s.idxPrefix], bRm);
8326}
8327
8328
8329/**
8330 * Common 'bswap register' helper.
8331 */
8332FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8333{
8334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8335 switch (pVCpu->iem.s.enmEffOpSize)
8336 {
8337 case IEMMODE_16BIT:
8338 IEM_MC_BEGIN(1, 0);
8339 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8340 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8341 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8342 IEM_MC_ADVANCE_RIP();
8343 IEM_MC_END();
8344 return VINF_SUCCESS;
8345
8346 case IEMMODE_32BIT:
8347 IEM_MC_BEGIN(1, 0);
8348 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8349 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8350 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8351 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8352 IEM_MC_ADVANCE_RIP();
8353 IEM_MC_END();
8354 return VINF_SUCCESS;
8355
8356 case IEMMODE_64BIT:
8357 IEM_MC_BEGIN(1, 0);
8358 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8359 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8360 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8361 IEM_MC_ADVANCE_RIP();
8362 IEM_MC_END();
8363 return VINF_SUCCESS;
8364
8365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8366 }
8367}
8368
8369
8370/** Opcode 0x0f 0xc8. */
8371FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8372{
8373 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8374 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8375 prefix. REX.B is the correct prefix it appears. For a parallel
8376 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8377 IEMOP_HLP_MIN_486();
8378 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8379}
8380
8381
8382/** Opcode 0x0f 0xc9. */
8383FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8384{
8385 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8386 IEMOP_HLP_MIN_486();
8387 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8388}
8389
8390
8391/** Opcode 0x0f 0xca. */
8392FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8393{
8394 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8395 IEMOP_HLP_MIN_486();
8396 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8397}
8398
8399
8400/** Opcode 0x0f 0xcb. */
8401FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8402{
8403 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8404 IEMOP_HLP_MIN_486();
8405 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8406}
8407
8408
8409/** Opcode 0x0f 0xcc. */
8410FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8411{
8412 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8413 IEMOP_HLP_MIN_486();
8414 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8415}
8416
8417
8418/** Opcode 0x0f 0xcd. */
8419FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8420{
8421 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8422 IEMOP_HLP_MIN_486();
8423 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8424}
8425
8426
8427/** Opcode 0x0f 0xce. */
8428FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8429{
8430 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8431 IEMOP_HLP_MIN_486();
8432 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8433}
8434
8435
8436/** Opcode 0x0f 0xcf. */
8437FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8438{
8439 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8440 IEMOP_HLP_MIN_486();
8441 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8442}
8443
8444
8445/* Opcode 0x0f 0xd0 - invalid */
8446/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8447FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8448/* Opcode 0xf3 0x0f 0xd0 - invalid */
8449/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8450FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8451
8452/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8453FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8454/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8455FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8456/* Opcode 0xf3 0x0f 0xd1 - invalid */
8457/* Opcode 0xf2 0x0f 0xd1 - invalid */
8458
8459/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8460FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8461/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8462FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8463/* Opcode 0xf3 0x0f 0xd2 - invalid */
8464/* Opcode 0xf2 0x0f 0xd2 - invalid */
8465
8466/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8467FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8468/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8469FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8470/* Opcode 0xf3 0x0f 0xd3 - invalid */
8471/* Opcode 0xf2 0x0f 0xd3 - invalid */
8472
8473/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8474FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8475/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8476FNIEMOP_STUB(iemOp_paddq_Vx_W);
8477/* Opcode 0xf3 0x0f 0xd4 - invalid */
8478/* Opcode 0xf2 0x0f 0xd4 - invalid */
8479
8480/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8481FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8482/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8483FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8484/* Opcode 0xf3 0x0f 0xd5 - invalid */
8485/* Opcode 0xf2 0x0f 0xd5 - invalid */
8486
8487/* Opcode 0x0f 0xd6 - invalid */
8488
8489/**
8490 * @opcode 0xd6
8491 * @oppfx 0x66
8492 * @opcpuid sse2
8493 * @opgroup og_sse2_pcksclr_datamove
8494 * @opxcpttype none
8495 * @optest op1=-1 op2=2 -> op1=2
8496 * @optest op1=0 op2=-42 -> op1=-42
8497 */
8498FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8499{
8500 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8502 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8503 {
8504 /*
8505 * Register, register.
8506 */
8507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8508 IEM_MC_BEGIN(0, 2);
8509 IEM_MC_LOCAL(uint64_t, uSrc);
8510
8511 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8512 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8513
8514 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8515 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8516
8517 IEM_MC_ADVANCE_RIP();
8518 IEM_MC_END();
8519 }
8520 else
8521 {
8522 /*
8523 * Memory, register.
8524 */
8525 IEM_MC_BEGIN(0, 2);
8526 IEM_MC_LOCAL(uint64_t, uSrc);
8527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8528
8529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8531 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8532 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8533
8534 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8535 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8536
8537 IEM_MC_ADVANCE_RIP();
8538 IEM_MC_END();
8539 }
8540 return VINF_SUCCESS;
8541}
8542
8543
8544/**
8545 * @opcode 0xd6
8546 * @opcodesub 11 mr/reg
8547 * @oppfx f3
8548 * @opcpuid sse2
8549 * @opgroup og_sse2_simdint_datamove
8550 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8551 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8552 */
8553FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8554{
8555 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8556 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8557 {
8558 /*
8559 * Register, register.
8560 */
8561 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8563 IEM_MC_BEGIN(0, 1);
8564 IEM_MC_LOCAL(uint64_t, uSrc);
8565
8566 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8567 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8568
8569 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8570 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8571 IEM_MC_FPU_TO_MMX_MODE();
8572
8573 IEM_MC_ADVANCE_RIP();
8574 IEM_MC_END();
8575 return VINF_SUCCESS;
8576 }
8577
8578 /**
8579 * @opdone
8580 * @opmnemonic udf30fd6mem
8581 * @opcode 0xd6
8582 * @opcodesub !11 mr/reg
8583 * @oppfx f3
8584 * @opunused intel-modrm
8585 * @opcpuid sse
8586 * @optest ->
8587 */
8588 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8589}
8590
8591
8592/**
8593 * @opcode 0xd6
8594 * @opcodesub 11 mr/reg
8595 * @oppfx f2
8596 * @opcpuid sse2
8597 * @opgroup og_sse2_simdint_datamove
8598 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8599 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8600 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
8601 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
8602 * @optest op1=-42 op2=0xfedcba9876543210
8603 * -> op1=0xfedcba9876543210 ftw=0xff
8604 */
8605FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
8606{
8607 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8608 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8609 {
8610 /*
8611 * Register, register.
8612 */
8613 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8615 IEM_MC_BEGIN(0, 1);
8616 IEM_MC_LOCAL(uint64_t, uSrc);
8617
8618 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8619 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8620
8621 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8622 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
8623 IEM_MC_FPU_TO_MMX_MODE();
8624
8625 IEM_MC_ADVANCE_RIP();
8626 IEM_MC_END();
8627 return VINF_SUCCESS;
8628 }
8629
8630 /**
8631 * @opdone
8632 * @opmnemonic udf20fd6mem
8633 * @opcode 0xd6
8634 * @opcodesub !11 mr/reg
8635 * @oppfx f2
8636 * @opunused intel-modrm
8637 * @opcpuid sse
8638 * @optest ->
8639 */
8640 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8641}
8642
8643/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8644FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8645{
8646 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8647 /** @todo testcase: Check that the instruction implicitly clears the high
8648 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8649 * and opcode modifications are made to work with the whole width (not
8650 * just 128). */
8651 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8652 /* Docs says register only. */
8653 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8654 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8655 {
8656 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8657 IEM_MC_BEGIN(2, 0);
8658 IEM_MC_ARG(uint64_t *, pDst, 0);
8659 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8660 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8661 IEM_MC_PREPARE_FPU_USAGE();
8662 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8663 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8664 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8665 IEM_MC_ADVANCE_RIP();
8666 IEM_MC_END();
8667 return VINF_SUCCESS;
8668 }
8669 return IEMOP_RAISE_INVALID_OPCODE();
8670}
8671
8672/** Opcode 0x66 0x0f 0xd7 - */
8673FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8674{
8675 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8676 /** @todo testcase: Check that the instruction implicitly clears the high
8677 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8678 * and opcode modifications are made to work with the whole width (not
8679 * just 128). */
8680 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8681 /* Docs says register only. */
8682 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8683 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8684 {
8685 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8686 IEM_MC_BEGIN(2, 0);
8687 IEM_MC_ARG(uint64_t *, pDst, 0);
8688 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8689 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8690 IEM_MC_PREPARE_SSE_USAGE();
8691 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8692 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8693 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8694 IEM_MC_ADVANCE_RIP();
8695 IEM_MC_END();
8696 return VINF_SUCCESS;
8697 }
8698 return IEMOP_RAISE_INVALID_OPCODE();
8699}
8700
8701/* Opcode 0xf3 0x0f 0xd7 - invalid */
8702/* Opcode 0xf2 0x0f 0xd7 - invalid */
8703
8704
8705/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8706FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8707/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8708FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8709/* Opcode 0xf3 0x0f 0xd8 - invalid */
8710/* Opcode 0xf2 0x0f 0xd8 - invalid */
8711
8712/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8713FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8714/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8715FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8716/* Opcode 0xf3 0x0f 0xd9 - invalid */
8717/* Opcode 0xf2 0x0f 0xd9 - invalid */
8718
8719/** Opcode 0x0f 0xda - pminub Pq, Qq */
8720FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8721/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8722FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8723/* Opcode 0xf3 0x0f 0xda - invalid */
8724/* Opcode 0xf2 0x0f 0xda - invalid */
8725
8726/** Opcode 0x0f 0xdb - pand Pq, Qq */
8727FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8728/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8729FNIEMOP_STUB(iemOp_pand_Vx_W);
8730/* Opcode 0xf3 0x0f 0xdb - invalid */
8731/* Opcode 0xf2 0x0f 0xdb - invalid */
8732
8733/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8734FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8735/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8736FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8737/* Opcode 0xf3 0x0f 0xdc - invalid */
8738/* Opcode 0xf2 0x0f 0xdc - invalid */
8739
8740/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8741FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8742/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8743FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8744/* Opcode 0xf3 0x0f 0xdd - invalid */
8745/* Opcode 0xf2 0x0f 0xdd - invalid */
8746
8747/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8748FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8749/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8750FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8751/* Opcode 0xf3 0x0f 0xde - invalid */
8752/* Opcode 0xf2 0x0f 0xde - invalid */
8753
8754/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8755FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8756/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8757FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8758/* Opcode 0xf3 0x0f 0xdf - invalid */
8759/* Opcode 0xf2 0x0f 0xdf - invalid */
8760
8761/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8762FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8763/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8764FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8765/* Opcode 0xf3 0x0f 0xe0 - invalid */
8766/* Opcode 0xf2 0x0f 0xe0 - invalid */
8767
8768/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8769FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8770/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8771FNIEMOP_STUB(iemOp_psraw_Vx_W);
8772/* Opcode 0xf3 0x0f 0xe1 - invalid */
8773/* Opcode 0xf2 0x0f 0xe1 - invalid */
8774
8775/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8776FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8777/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8778FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8779/* Opcode 0xf3 0x0f 0xe2 - invalid */
8780/* Opcode 0xf2 0x0f 0xe2 - invalid */
8781
8782/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8783FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8784/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8785FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8786/* Opcode 0xf3 0x0f 0xe3 - invalid */
8787/* Opcode 0xf2 0x0f 0xe3 - invalid */
8788
8789/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8790FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8791/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8792FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8793/* Opcode 0xf3 0x0f 0xe4 - invalid */
8794/* Opcode 0xf2 0x0f 0xe4 - invalid */
8795
8796/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8797FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8798/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8799FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8800/* Opcode 0xf3 0x0f 0xe5 - invalid */
8801/* Opcode 0xf2 0x0f 0xe5 - invalid */
8802
8803/* Opcode 0x0f 0xe6 - invalid */
8804/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8805FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8806/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8807FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8808/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8809FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8810
8811
8812/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8813FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8814{
8815 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8816 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8817 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8818 {
8819 /* Register, memory. */
8820 IEM_MC_BEGIN(0, 2);
8821 IEM_MC_LOCAL(uint64_t, uSrc);
8822 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8823
8824 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8826 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8827 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8828
8829 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8830 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8831
8832 IEM_MC_ADVANCE_RIP();
8833 IEM_MC_END();
8834 return VINF_SUCCESS;
8835 }
8836 /* The register, register encoding is invalid. */
8837 return IEMOP_RAISE_INVALID_OPCODE();
8838}
8839
8840/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
8841FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
8842{
8843 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8844 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8845 {
8846 /* Register, memory. */
8847 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
8848 IEM_MC_BEGIN(0, 2);
8849 IEM_MC_LOCAL(RTUINT128U, uSrc);
8850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8851
8852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8854 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8855 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8856
8857 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8858 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8859
8860 IEM_MC_ADVANCE_RIP();
8861 IEM_MC_END();
8862 return VINF_SUCCESS;
8863 }
8864
8865 /* The register, register encoding is invalid. */
8866 return IEMOP_RAISE_INVALID_OPCODE();
8867}
8868
8869/* Opcode 0xf3 0x0f 0xe7 - invalid */
8870/* Opcode 0xf2 0x0f 0xe7 - invalid */
8871
8872
8873/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8874FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8875/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8876FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8877/* Opcode 0xf3 0x0f 0xe8 - invalid */
8878/* Opcode 0xf2 0x0f 0xe8 - invalid */
8879
8880/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8881FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8882/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8883FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8884/* Opcode 0xf3 0x0f 0xe9 - invalid */
8885/* Opcode 0xf2 0x0f 0xe9 - invalid */
8886
8887/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8888FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8889/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8890FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8891/* Opcode 0xf3 0x0f 0xea - invalid */
8892/* Opcode 0xf2 0x0f 0xea - invalid */
8893
8894/** Opcode 0x0f 0xeb - por Pq, Qq */
8895FNIEMOP_STUB(iemOp_por_Pq_Qq);
8896/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8897FNIEMOP_STUB(iemOp_por_Vx_W);
8898/* Opcode 0xf3 0x0f 0xeb - invalid */
8899/* Opcode 0xf2 0x0f 0xeb - invalid */
8900
8901/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8902FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8903/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8904FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8905/* Opcode 0xf3 0x0f 0xec - invalid */
8906/* Opcode 0xf2 0x0f 0xec - invalid */
8907
8908/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8909FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8910/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8911FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8912/* Opcode 0xf3 0x0f 0xed - invalid */
8913/* Opcode 0xf2 0x0f 0xed - invalid */
8914
8915/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8916FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8917/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8918FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8919/* Opcode 0xf3 0x0f 0xee - invalid */
8920/* Opcode 0xf2 0x0f 0xee - invalid */
8921
8922
8923/** Opcode 0x0f 0xef - pxor Pq, Qq */
8924FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8925{
8926 IEMOP_MNEMONIC(pxor, "pxor");
8927 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8928}
8929
8930/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8931FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8932{
8933 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8934 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8935}
8936
8937/* Opcode 0xf3 0x0f 0xef - invalid */
8938/* Opcode 0xf2 0x0f 0xef - invalid */
8939
8940/* Opcode 0x0f 0xf0 - invalid */
8941/* Opcode 0x66 0x0f 0xf0 - invalid */
8942/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8943FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8944
8945/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8946FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8947/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8948FNIEMOP_STUB(iemOp_psllw_Vx_W);
8949/* Opcode 0xf2 0x0f 0xf1 - invalid */
8950
8951/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8952FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8953/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8954FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8955/* Opcode 0xf2 0x0f 0xf2 - invalid */
8956
8957/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8958FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8959/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8960FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8961/* Opcode 0xf2 0x0f 0xf3 - invalid */
8962
8963/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8964FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8965/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8966FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8967/* Opcode 0xf2 0x0f 0xf4 - invalid */
8968
8969/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8970FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8971/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
8972FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
8973/* Opcode 0xf2 0x0f 0xf5 - invalid */
8974
8975/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8976FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8977/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
8978FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
8979/* Opcode 0xf2 0x0f 0xf6 - invalid */
8980
8981/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8982FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8983/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
8984FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
8985/* Opcode 0xf2 0x0f 0xf7 - invalid */
8986
8987/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8988FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8989/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
8990FNIEMOP_STUB(iemOp_psubb_Vx_W);
8991/* Opcode 0xf2 0x0f 0xf8 - invalid */
8992
8993/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8994FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8995/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
8996FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
8997/* Opcode 0xf2 0x0f 0xf9 - invalid */
8998
8999/** Opcode 0x0f 0xfa - psubd Pq, Qq */
9000FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
9001/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9002FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9003/* Opcode 0xf2 0x0f 0xfa - invalid */
9004
9005/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9006FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9007/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9008FNIEMOP_STUB(iemOp_psubq_Vx_W);
9009/* Opcode 0xf2 0x0f 0xfb - invalid */
9010
9011/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9012FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9013/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9014FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9015/* Opcode 0xf2 0x0f 0xfc - invalid */
9016
9017/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9018FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9019/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9020FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9021/* Opcode 0xf2 0x0f 0xfd - invalid */
9022
9023/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9024FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9025/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9026FNIEMOP_STUB(iemOp_paddd_Vx_W);
9027/* Opcode 0xf2 0x0f 0xfe - invalid */
9028
9029
9030/** Opcode **** 0x0f 0xff - UD0 */
9031FNIEMOP_DEF(iemOp_ud0)
9032{
9033 IEMOP_MNEMONIC(ud0, "ud0");
9034 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9035 {
9036 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9037#ifndef TST_IEM_CHECK_MC
9038 RTGCPTR GCPtrEff;
9039 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9040 if (rcStrict != VINF_SUCCESS)
9041 return rcStrict;
9042#endif
9043 IEMOP_HLP_DONE_DECODING();
9044 }
9045 return IEMOP_RAISE_INVALID_OPCODE();
9046}
9047
9048
9049
9050/**
9051 * Two byte opcode map, first byte 0x0f.
9052 *
9053 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9054 * check if it needs updating as well when making changes.
9055 */
9056IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9057{
9058 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9059 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9060 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9061 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9062 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9063 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9064 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9065 /* 0x06 */ IEMOP_X4(iemOp_clts),
9066 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9067 /* 0x08 */ IEMOP_X4(iemOp_invd),
9068 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9069 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9070 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9071 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9072 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9073 /* 0x0e */ IEMOP_X4(iemOp_femms),
9074 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9075
9076 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9077 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9078 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9079 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9080 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9081 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9082 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9083 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9084 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9085 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9086 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9087 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9088 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9089 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9090 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9091 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9092
9093 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9094 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9095 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9096 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9097 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9098 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9099 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9100 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9101 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9102 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9103 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9104 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9105 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9106 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9107 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9108 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9109
9110 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9111 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9112 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9113 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9114 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9115 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9116 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9117 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9118 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9119 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9120 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9121 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9122 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9123 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9124 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9125 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9126
9127 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9128 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9129 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9130 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9131 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9132 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9133 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9134 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9135 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9136 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9137 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9138 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9139 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9140 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9141 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9142 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9143
9144 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9145 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9146 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9147 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9148 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9149 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9150 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9151 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9152 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9153 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9154 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9155 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9156 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9157 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9158 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9159 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9160
9161 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9162 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9163 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9164 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9165 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9166 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9167 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9168 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9169 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9170 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9171 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9172 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9173 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9174 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9175 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9176 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vx_Wx, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM,
9177
9178 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9179 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9180 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9181 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9182 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9183 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9184 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9185 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9186
9187 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9188 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9189 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9190 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9191 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9192 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9193 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9194 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9195
9196 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9197 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9198 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9199 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9200 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9201 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9202 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9203 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9204 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9205 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9206 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9207 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9208 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9209 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9210 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9211 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9212
9213 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9214 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9215 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9216 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9217 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9218 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9219 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9220 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9221 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9222 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9223 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9224 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9225 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9226 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9227 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9228 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9229
9230 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9231 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9232 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9233 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9234 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9235 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9236 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9237 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9238 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9239 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9240 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9241 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9242 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9243 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9244 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9245 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9246
9247 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9248 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9249 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9250 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9251 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9252 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9253 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9254 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9255 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9256 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9257 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9258 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9259 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9260 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9261 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9262 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9263
9264 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9265 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9266 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9267 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9268 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9269 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9270 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9271 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9272 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9273 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9274 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9275 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9276 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9277 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9278 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9279 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9280
9281 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9282 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9283 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9284 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9285 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9286 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9287 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9288 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9289 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9290 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9291 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9292 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9293 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9294 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9295 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9296 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9297
9298 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9299 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9300 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9301 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9302 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9303 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9304 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9305 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9306 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9307 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9308 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9309 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9310 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9311 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9312 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9313 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9314
9315 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9316 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9317 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9318 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9319 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9320 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9321 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9322 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9323 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9324 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9325 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9326 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9327 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9328 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9329 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9330 /* 0xff */ IEMOP_X4(iemOp_ud0),
9331};
9332AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9333
9334/** @} */
9335
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette