VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 67012

Last change on this file since 67012 was 67012, checked in by vboxsync, 8 years ago

IEM: Implemented vmovd Ed,Vd and vmovq Eq,Vq (VEX.66.0F 7e).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 327.4 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 67012 2017-05-22 12:26:25Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.215389.xyz. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
38 switch (pVCpu->iem.s.enmEffOpSize)
39 {
40 case IEMMODE_16BIT:
41 IEM_MC_BEGIN(0, 1);
42 IEM_MC_LOCAL(uint16_t, u16Ldtr);
43 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
44 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
45 IEM_MC_ADVANCE_RIP();
46 IEM_MC_END();
47 break;
48
49 case IEMMODE_32BIT:
50 IEM_MC_BEGIN(0, 1);
51 IEM_MC_LOCAL(uint32_t, u32Ldtr);
52 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
53 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
54 IEM_MC_ADVANCE_RIP();
55 IEM_MC_END();
56 break;
57
58 case IEMMODE_64BIT:
59 IEM_MC_BEGIN(0, 1);
60 IEM_MC_LOCAL(uint64_t, u64Ldtr);
61 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
62 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 break;
66
67 IEM_NOT_REACHED_DEFAULT_CASE_RET();
68 }
69 }
70 else
71 {
72 IEM_MC_BEGIN(0, 2);
73 IEM_MC_LOCAL(uint16_t, u16Ldtr);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
76 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
77 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
78 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
79 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/** Opcode 0x0f 0x00 /1. */
88FNIEMOPRM_DEF(iemOp_Grp6_str)
89{
90 IEMOP_MNEMONIC(str, "str Rv/Mw");
91 IEMOP_HLP_MIN_286();
92 IEMOP_HLP_NO_REAL_OR_V86_MODE();
93
94 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
95 {
96 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
97 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
98 switch (pVCpu->iem.s.enmEffOpSize)
99 {
100 case IEMMODE_16BIT:
101 IEM_MC_BEGIN(0, 1);
102 IEM_MC_LOCAL(uint16_t, u16Tr);
103 IEM_MC_FETCH_TR_U16(u16Tr);
104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
105 IEM_MC_ADVANCE_RIP();
106 IEM_MC_END();
107 break;
108
109 case IEMMODE_32BIT:
110 IEM_MC_BEGIN(0, 1);
111 IEM_MC_LOCAL(uint32_t, u32Tr);
112 IEM_MC_FETCH_TR_U32(u32Tr);
113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
114 IEM_MC_ADVANCE_RIP();
115 IEM_MC_END();
116 break;
117
118 case IEMMODE_64BIT:
119 IEM_MC_BEGIN(0, 1);
120 IEM_MC_LOCAL(uint64_t, u64Tr);
121 IEM_MC_FETCH_TR_U64(u64Tr);
122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 break;
126
127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
128 }
129 }
130 else
131 {
132 IEM_MC_BEGIN(0, 2);
133 IEM_MC_LOCAL(uint16_t, u16Tr);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
136 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
137 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
138 IEM_MC_FETCH_TR_U16(u16Tr);
139 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
140 IEM_MC_ADVANCE_RIP();
141 IEM_MC_END();
142 }
143 return VINF_SUCCESS;
144}
145
146
147/** Opcode 0x0f 0x00 /2. */
148FNIEMOPRM_DEF(iemOp_Grp6_lldt)
149{
150 IEMOP_MNEMONIC(lldt, "lldt Ew");
151 IEMOP_HLP_MIN_286();
152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
153
154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
155 {
156 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
157 IEM_MC_BEGIN(1, 0);
158 IEM_MC_ARG(uint16_t, u16Sel, 0);
159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
161 IEM_MC_END();
162 }
163 else
164 {
165 IEM_MC_BEGIN(1, 1);
166 IEM_MC_ARG(uint16_t, u16Sel, 0);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
169 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
170 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
171 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
172 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
173 IEM_MC_END();
174 }
175 return VINF_SUCCESS;
176}
177
178
179/** Opcode 0x0f 0x00 /3. */
180FNIEMOPRM_DEF(iemOp_Grp6_ltr)
181{
182 IEMOP_MNEMONIC(ltr, "ltr Ew");
183 IEMOP_HLP_MIN_286();
184 IEMOP_HLP_NO_REAL_OR_V86_MODE();
185
186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
187 {
188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
189 IEM_MC_BEGIN(1, 0);
190 IEM_MC_ARG(uint16_t, u16Sel, 0);
191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
192 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
193 IEM_MC_END();
194 }
195 else
196 {
197 IEM_MC_BEGIN(1, 1);
198 IEM_MC_ARG(uint16_t, u16Sel, 0);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
202 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
203 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
204 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
205 IEM_MC_END();
206 }
207 return VINF_SUCCESS;
208}
209
210
211/** Opcode 0x0f 0x00 /3. */
212FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
213{
214 IEMOP_HLP_MIN_286();
215 IEMOP_HLP_NO_REAL_OR_V86_MODE();
216
217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
218 {
219 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
220 IEM_MC_BEGIN(2, 0);
221 IEM_MC_ARG(uint16_t, u16Sel, 0);
222 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
223 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
224 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
225 IEM_MC_END();
226 }
227 else
228 {
229 IEM_MC_BEGIN(2, 1);
230 IEM_MC_ARG(uint16_t, u16Sel, 0);
231 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
234 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
235 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
236 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
237 IEM_MC_END();
238 }
239 return VINF_SUCCESS;
240}
241
242
243/** Opcode 0x0f 0x00 /4. */
244FNIEMOPRM_DEF(iemOp_Grp6_verr)
245{
246 IEMOP_MNEMONIC(verr, "verr Ew");
247 IEMOP_HLP_MIN_286();
248 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
249}
250
251
252/** Opcode 0x0f 0x00 /5. */
253FNIEMOPRM_DEF(iemOp_Grp6_verw)
254{
255 IEMOP_MNEMONIC(verw, "verw Ew");
256 IEMOP_HLP_MIN_286();
257 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
258}
259
260
261/**
262 * Group 6 jump table.
263 */
264IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
265{
266 iemOp_Grp6_sldt,
267 iemOp_Grp6_str,
268 iemOp_Grp6_lldt,
269 iemOp_Grp6_ltr,
270 iemOp_Grp6_verr,
271 iemOp_Grp6_verw,
272 iemOp_InvalidWithRM,
273 iemOp_InvalidWithRM
274};
275
276/** Opcode 0x0f 0x00. */
277FNIEMOP_DEF(iemOp_Grp6)
278{
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
280 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
281}
282
283
284/** Opcode 0x0f 0x01 /0. */
285FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
286{
287 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
288 IEMOP_HLP_MIN_286();
289 IEMOP_HLP_64BIT_OP_SIZE();
290 IEM_MC_BEGIN(2, 1);
291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
292 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
295 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
296 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
297 IEM_MC_END();
298 return VINF_SUCCESS;
299}
300
301
302/** Opcode 0x0f 0x01 /0. */
303FNIEMOP_DEF(iemOp_Grp7_vmcall)
304{
305 IEMOP_BITCH_ABOUT_STUB();
306 return IEMOP_RAISE_INVALID_OPCODE();
307}
308
309
310/** Opcode 0x0f 0x01 /0. */
311FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
312{
313 IEMOP_BITCH_ABOUT_STUB();
314 return IEMOP_RAISE_INVALID_OPCODE();
315}
316
317
318/** Opcode 0x0f 0x01 /0. */
319FNIEMOP_DEF(iemOp_Grp7_vmresume)
320{
321 IEMOP_BITCH_ABOUT_STUB();
322 return IEMOP_RAISE_INVALID_OPCODE();
323}
324
325
326/** Opcode 0x0f 0x01 /0. */
327FNIEMOP_DEF(iemOp_Grp7_vmxoff)
328{
329 IEMOP_BITCH_ABOUT_STUB();
330 return IEMOP_RAISE_INVALID_OPCODE();
331}
332
333
334/** Opcode 0x0f 0x01 /1. */
335FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
336{
337 IEMOP_MNEMONIC(sidt, "sidt Ms");
338 IEMOP_HLP_MIN_286();
339 IEMOP_HLP_64BIT_OP_SIZE();
340 IEM_MC_BEGIN(2, 1);
341 IEM_MC_ARG(uint8_t, iEffSeg, 0);
342 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
345 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
346 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
347 IEM_MC_END();
348 return VINF_SUCCESS;
349}
350
351
352/** Opcode 0x0f 0x01 /1. */
353FNIEMOP_DEF(iemOp_Grp7_monitor)
354{
355 IEMOP_MNEMONIC(monitor, "monitor");
356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
357 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
358}
359
360
361/** Opcode 0x0f 0x01 /1. */
362FNIEMOP_DEF(iemOp_Grp7_mwait)
363{
364 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
367}
368
369
370/** Opcode 0x0f 0x01 /2. */
371FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
372{
373 IEMOP_MNEMONIC(lgdt, "lgdt");
374 IEMOP_HLP_64BIT_OP_SIZE();
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd0. */
389FNIEMOP_DEF(iemOp_Grp7_xgetbv)
390{
391 IEMOP_MNEMONIC(xgetbv, "xgetbv");
392 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
393 {
394 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
396 }
397 return IEMOP_RAISE_INVALID_OPCODE();
398}
399
400
401/** Opcode 0x0f 0x01 0xd1. */
402FNIEMOP_DEF(iemOp_Grp7_xsetbv)
403{
404 IEMOP_MNEMONIC(xsetbv, "xsetbv");
405 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
406 {
407 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
408 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
409 }
410 return IEMOP_RAISE_INVALID_OPCODE();
411}
412
413
414/** Opcode 0x0f 0x01 /3. */
415FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
416{
417 IEMOP_MNEMONIC(lidt, "lidt");
418 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
419 ? IEMMODE_64BIT
420 : pVCpu->iem.s.enmEffOpSize;
421 IEM_MC_BEGIN(3, 1);
422 IEM_MC_ARG(uint8_t, iEffSeg, 0);
423 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
424 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
428 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
429 IEM_MC_END();
430 return VINF_SUCCESS;
431}
432
433
434#ifdef VBOX_WITH_NESTED_HWVIRT
435/** Opcode 0x0f 0x01 0xd8. */
436FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
437{
438 IEMOP_MNEMONIC(vmrun, "vmrun");
439 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
440}
441
442/** Opcode 0x0f 0x01 0xd9. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
444{
445 IEMOP_MNEMONIC(vmmcall, "vmmcall");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
447}
448
449
450/** Opcode 0x0f 0x01 0xda. */
451FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
452{
453 IEMOP_MNEMONIC(vmload, "vmload");
454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
455}
456
457
458/** Opcode 0x0f 0x01 0xdb. */
459FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
460{
461 IEMOP_MNEMONIC(vmsave, "vmsave");
462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
463}
464
465
466/** Opcode 0x0f 0x01 0xdc. */
467FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
468{
469 IEMOP_MNEMONIC(stgi, "stgi");
470 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
471}
472
473
474/** Opcode 0x0f 0x01 0xdd. */
475FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
476{
477 IEMOP_MNEMONIC(clgi, "clgi");
478 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
479}
480
481
482/** Opcode 0x0f 0x01 0xdf. */
483FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
484{
485 IEMOP_MNEMONIC(invlpga, "invlpga");
486 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
487}
488
489
490/** Opcode 0x0f 0x01 0xde. */
491FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
492{
493 IEMOP_MNEMONIC(skinit, "skinit");
494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
495}
496#else
497/** Opcode 0x0f 0x01 0xd8. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
499
500/** Opcode 0x0f 0x01 0xd9. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
502
503/** Opcode 0x0f 0x01 0xda. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
505
506/** Opcode 0x0f 0x01 0xdb. */
507FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
508
509/** Opcode 0x0f 0x01 0xdc. */
510FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
511
512/** Opcode 0x0f 0x01 0xdd. */
513FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
514
515/** Opcode 0x0f 0x01 0xdf. */
516FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520#endif /* VBOX_WITH_NESTED_HWVIRT */
521
522/** Opcode 0x0f 0x01 /4. */
523FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
524{
525 IEMOP_MNEMONIC(smsw, "smsw");
526 IEMOP_HLP_MIN_286();
527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
528 {
529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
530 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
531 switch (pVCpu->iem.s.enmEffOpSize)
532 {
533 case IEMMODE_16BIT:
534 IEM_MC_BEGIN(0, 1);
535 IEM_MC_LOCAL(uint16_t, u16Tmp);
536 IEM_MC_FETCH_CR0_U16(u16Tmp);
537 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
538 { /* likely */ }
539 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
541 else
542 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
543 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
544 IEM_MC_ADVANCE_RIP();
545 IEM_MC_END();
546 return VINF_SUCCESS;
547
548 case IEMMODE_32BIT:
549 IEM_MC_BEGIN(0, 1);
550 IEM_MC_LOCAL(uint32_t, u32Tmp);
551 IEM_MC_FETCH_CR0_U32(u32Tmp);
552 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 return VINF_SUCCESS;
556
557 case IEMMODE_64BIT:
558 IEM_MC_BEGIN(0, 1);
559 IEM_MC_LOCAL(uint64_t, u64Tmp);
560 IEM_MC_FETCH_CR0_U64(u64Tmp);
561 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
562 IEM_MC_ADVANCE_RIP();
563 IEM_MC_END();
564 return VINF_SUCCESS;
565
566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
567 }
568 }
569 else
570 {
571 /* Ignore operand size here, memory refs are always 16-bit. */
572 IEM_MC_BEGIN(0, 2);
573 IEM_MC_LOCAL(uint16_t, u16Tmp);
574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
578 IEM_MC_FETCH_CR0_U16(u16Tmp);
579 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
580 { /* likely */ }
581 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
582 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
583 else
584 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
585 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
586 IEM_MC_ADVANCE_RIP();
587 IEM_MC_END();
588 return VINF_SUCCESS;
589 }
590}
591
592
593/** Opcode 0x0f 0x01 /6. */
594FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
595{
596 /* The operand size is effectively ignored, all is 16-bit and only the
597 lower 3-bits are used. */
598 IEMOP_MNEMONIC(lmsw, "lmsw");
599 IEMOP_HLP_MIN_286();
600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
601 {
602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
603 IEM_MC_BEGIN(1, 0);
604 IEM_MC_ARG(uint16_t, u16Tmp, 0);
605 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
606 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
607 IEM_MC_END();
608 }
609 else
610 {
611 IEM_MC_BEGIN(1, 1);
612 IEM_MC_ARG(uint16_t, u16Tmp, 0);
613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
617 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
618 IEM_MC_END();
619 }
620 return VINF_SUCCESS;
621}
622
623
624/** Opcode 0x0f 0x01 /7. */
625FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
626{
627 IEMOP_MNEMONIC(invlpg, "invlpg");
628 IEMOP_HLP_MIN_486();
629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
630 IEM_MC_BEGIN(1, 1);
631 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
633 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
634 IEM_MC_END();
635 return VINF_SUCCESS;
636}
637
638
639/** Opcode 0x0f 0x01 /7. */
640FNIEMOP_DEF(iemOp_Grp7_swapgs)
641{
642 IEMOP_MNEMONIC(swapgs, "swapgs");
643 IEMOP_HLP_ONLY_64BIT();
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
646}
647
648
649/** Opcode 0x0f 0x01 /7. */
650FNIEMOP_DEF(iemOp_Grp7_rdtscp)
651{
652 IEMOP_MNEMONIC(rdtscp, "rdtscp");
653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
654 /** @todo SVM intercept removal from here. */
655 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RDTSCP, SVM_EXIT_RDTSCP, 0, 0);
656 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
657}
658
659
660/**
661 * Group 7 jump table, memory variant.
662 */
663IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
664{
665 iemOp_Grp7_sgdt,
666 iemOp_Grp7_sidt,
667 iemOp_Grp7_lgdt,
668 iemOp_Grp7_lidt,
669 iemOp_Grp7_smsw,
670 iemOp_InvalidWithRM,
671 iemOp_Grp7_lmsw,
672 iemOp_Grp7_invlpg
673};
674
675
676/** Opcode 0x0f 0x01. */
677FNIEMOP_DEF(iemOp_Grp7)
678{
679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
680 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
681 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
682
683 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
684 {
685 case 0:
686 switch (bRm & X86_MODRM_RM_MASK)
687 {
688 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
689 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
690 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
691 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 1:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
700 }
701 return IEMOP_RAISE_INVALID_OPCODE();
702
703 case 2:
704 switch (bRm & X86_MODRM_RM_MASK)
705 {
706 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
707 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
708 }
709 return IEMOP_RAISE_INVALID_OPCODE();
710
711 case 3:
712 switch (bRm & X86_MODRM_RM_MASK)
713 {
714 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
715 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
716 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
717 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
718 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
719 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
720 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
721 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
723 }
724
725 case 4:
726 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
727
728 case 5:
729 return IEMOP_RAISE_INVALID_OPCODE();
730
731 case 6:
732 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
733
734 case 7:
735 switch (bRm & X86_MODRM_RM_MASK)
736 {
737 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
738 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
739 }
740 return IEMOP_RAISE_INVALID_OPCODE();
741
742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
743 }
744}
745
746/** Opcode 0x0f 0x00 /3. */
747FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
748{
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
751
752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
753 {
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 switch (pVCpu->iem.s.enmEffOpSize)
756 {
757 case IEMMODE_16BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 case IEMMODE_32BIT:
773 case IEMMODE_64BIT:
774 {
775 IEM_MC_BEGIN(3, 0);
776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
777 IEM_MC_ARG(uint16_t, u16Sel, 1);
778 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
779
780 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
781 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
782 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
783
784 IEM_MC_END();
785 return VINF_SUCCESS;
786 }
787
788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
789 }
790 }
791 else
792 {
793 switch (pVCpu->iem.s.enmEffOpSize)
794 {
795 case IEMMODE_16BIT:
796 {
797 IEM_MC_BEGIN(3, 1);
798 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
799 IEM_MC_ARG(uint16_t, u16Sel, 1);
800 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
802
803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
804 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
805
806 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
807 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
808 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
809
810 IEM_MC_END();
811 return VINF_SUCCESS;
812 }
813
814 case IEMMODE_32BIT:
815 case IEMMODE_64BIT:
816 {
817 IEM_MC_BEGIN(3, 1);
818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 1);
820 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
822
823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
824 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
825/** @todo testcase: make sure it's a 16-bit read. */
826
827 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
828 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
829 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
830
831 IEM_MC_END();
832 return VINF_SUCCESS;
833 }
834
835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
836 }
837 }
838}
839
840
841
842/** Opcode 0x0f 0x02. */
843FNIEMOP_DEF(iemOp_lar_Gv_Ew)
844{
845 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
846 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
847}
848
849
850/** Opcode 0x0f 0x03. */
851FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
852{
853 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
854 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
855}
856
857
858/** Opcode 0x0f 0x05. */
859FNIEMOP_DEF(iemOp_syscall)
860{
861 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
863 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
864}
865
866
867/** Opcode 0x0f 0x06. */
868FNIEMOP_DEF(iemOp_clts)
869{
870 IEMOP_MNEMONIC(clts, "clts");
871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
872 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
873}
874
875
876/** Opcode 0x0f 0x07. */
877FNIEMOP_DEF(iemOp_sysret)
878{
879 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
881 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
882}
883
884
885/** Opcode 0x0f 0x08. */
886FNIEMOP_DEF(iemOp_invd)
887{
888 IEMOP_MNEMONIC(invd, "invd");
889#ifdef VBOX_WITH_NESTED_HWVIRT
890 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
891 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
892#else
893 RT_NOREF_PV(pVCpu);
894#endif
895 /** @todo implement invd for the regular case (above only handles nested SVM
896 * exits). */
897 IEMOP_BITCH_ABOUT_STUB();
898 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
899}
900
901// IEMOP_HLP_MIN_486();
902
903
904/** Opcode 0x0f 0x09. */
905FNIEMOP_DEF(iemOp_wbinvd)
906{
907 IEMOP_MNEMONIC(wbinvd, "wbinvd");
908 IEMOP_HLP_MIN_486();
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
910 IEM_MC_BEGIN(0, 0);
911 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
912 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
913 IEM_MC_ADVANCE_RIP();
914 IEM_MC_END();
915 return VINF_SUCCESS; /* ignore for now */
916}
917
918
919/** Opcode 0x0f 0x0b. */
920FNIEMOP_DEF(iemOp_ud2)
921{
922 IEMOP_MNEMONIC(ud2, "ud2");
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926/** Opcode 0x0f 0x0d. */
927FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
928{
929 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
931 {
932 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
933 return IEMOP_RAISE_INVALID_OPCODE();
934 }
935
936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
938 {
939 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
940 return IEMOP_RAISE_INVALID_OPCODE();
941 }
942
943 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
944 {
945 case 2: /* Aliased to /0 for the time being. */
946 case 4: /* Aliased to /0 for the time being. */
947 case 5: /* Aliased to /0 for the time being. */
948 case 6: /* Aliased to /0 for the time being. */
949 case 7: /* Aliased to /0 for the time being. */
950 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
951 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
952 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
954 }
955
956 IEM_MC_BEGIN(0, 1);
957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
960 /* Currently a NOP. */
961 NOREF(GCPtrEffSrc);
962 IEM_MC_ADVANCE_RIP();
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x0e. */
969FNIEMOP_STUB(iemOp_femms);
970
971
972/** Opcode 0x0f 0x0f. */
973FNIEMOP_DEF(iemOp_3Dnow)
974{
975 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
976 {
977 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
978 return IEMOP_RAISE_INVALID_OPCODE();
979 }
980
981#ifdef IEM_WITH_3DNOW
982 /* This is pretty sparse, use switch instead of table. */
983 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
984 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
985#else
986 IEMOP_BITCH_ABOUT_STUB();
987 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
988#endif
989}
990
991
992/**
993 * @opcode 0x10
994 * @oppfx none
995 * @opcpuid sse
996 * @opgroup og_sse_simdfp_datamove
997 * @opxcpttype 4UA
998 * @optest op1=1 op2=2 -> op1=2
999 * @optest op1=0 op2=-22 -> op1=-22
1000 */
1001FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1002{
1003 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1006 {
1007 /*
1008 * Register, register.
1009 */
1010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1011 IEM_MC_BEGIN(0, 0);
1012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1013 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1014 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1015 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1016 IEM_MC_ADVANCE_RIP();
1017 IEM_MC_END();
1018 }
1019 else
1020 {
1021 /*
1022 * Memory, register.
1023 */
1024 IEM_MC_BEGIN(0, 2);
1025 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1027
1028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1032
1033 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1034 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1035
1036 IEM_MC_ADVANCE_RIP();
1037 IEM_MC_END();
1038 }
1039 return VINF_SUCCESS;
1040
1041}
1042
1043
1044/**
1045 * @opcode 0x10
1046 * @oppfx 0x66
1047 * @opcpuid sse2
1048 * @opgroup og_sse2_pcksclr_datamove
1049 * @opxcpttype 4UA
1050 * @optest op1=1 op2=2 -> op1=2
1051 * @optest op1=0 op2=-42 -> op1=-42
1052 */
1053FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1054{
1055 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1058 {
1059 /*
1060 * Register, register.
1061 */
1062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1063 IEM_MC_BEGIN(0, 0);
1064 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1066 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1067 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1068 IEM_MC_ADVANCE_RIP();
1069 IEM_MC_END();
1070 }
1071 else
1072 {
1073 /*
1074 * Memory, register.
1075 */
1076 IEM_MC_BEGIN(0, 2);
1077 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1079
1080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1083 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1084
1085 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1086 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1087
1088 IEM_MC_ADVANCE_RIP();
1089 IEM_MC_END();
1090 }
1091 return VINF_SUCCESS;
1092}
1093
1094
1095/**
1096 * @opcode 0x10
1097 * @oppfx 0xf3
1098 * @opcpuid sse
1099 * @opgroup og_sse_simdfp_datamove
1100 * @opxcpttype 5
1101 * @optest op1=1 op2=2 -> op1=2
1102 * @optest op1=0 op2=-22 -> op1=-22
1103 */
1104FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1105{
1106 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1109 {
1110 /*
1111 * Register, register.
1112 */
1113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1114 IEM_MC_BEGIN(0, 1);
1115 IEM_MC_LOCAL(uint32_t, uSrc);
1116
1117 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1118 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1119 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1120 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 else
1126 {
1127 /*
1128 * Memory, register.
1129 */
1130 IEM_MC_BEGIN(0, 2);
1131 IEM_MC_LOCAL(uint32_t, uSrc);
1132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1133
1134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1136 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1138
1139 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1140 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1141
1142 IEM_MC_ADVANCE_RIP();
1143 IEM_MC_END();
1144 }
1145 return VINF_SUCCESS;
1146}
1147
1148
1149/**
1150 * @opcode 0x10
1151 * @oppfx 0xf2
1152 * @opcpuid sse2
1153 * @opgroup og_sse2_pcksclr_datamove
1154 * @opxcpttype 5
1155 * @optest op1=1 op2=2 -> op1=2
1156 * @optest op1=0 op2=-42 -> op1=-42
1157 */
1158FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1159{
1160 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1163 {
1164 /*
1165 * Register, register.
1166 */
1167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1168 IEM_MC_BEGIN(0, 1);
1169 IEM_MC_LOCAL(uint64_t, uSrc);
1170
1171 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1172 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1173 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1174 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1175
1176 IEM_MC_ADVANCE_RIP();
1177 IEM_MC_END();
1178 }
1179 else
1180 {
1181 /*
1182 * Memory, register.
1183 */
1184 IEM_MC_BEGIN(0, 2);
1185 IEM_MC_LOCAL(uint64_t, uSrc);
1186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1187
1188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1190 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1191 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1192
1193 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1194 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1195
1196 IEM_MC_ADVANCE_RIP();
1197 IEM_MC_END();
1198 }
1199 return VINF_SUCCESS;
1200}
1201
1202
1203/**
1204 * @opcode 0x11
1205 * @oppfx none
1206 * @opcpuid sse
1207 * @opgroup og_sse_simdfp_datamove
1208 * @opxcpttype 4UA
1209 * @optest op1=1 op2=2 -> op1=2
1210 * @optest op1=0 op2=-42 -> op1=-42
1211 */
1212FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1213{
1214 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1217 {
1218 /*
1219 * Register, register.
1220 */
1221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1222 IEM_MC_BEGIN(0, 0);
1223 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1224 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1225 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1226 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1227 IEM_MC_ADVANCE_RIP();
1228 IEM_MC_END();
1229 }
1230 else
1231 {
1232 /*
1233 * Memory, register.
1234 */
1235 IEM_MC_BEGIN(0, 2);
1236 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1238
1239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1243
1244 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1245 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1246
1247 IEM_MC_ADVANCE_RIP();
1248 IEM_MC_END();
1249 }
1250 return VINF_SUCCESS;
1251}
1252
1253
1254/**
1255 * @opcode 0x11
1256 * @oppfx 0x66
1257 * @opcpuid sse2
1258 * @opgroup og_sse2_pcksclr_datamove
1259 * @opxcpttype 4UA
1260 * @optest op1=1 op2=2 -> op1=2
1261 * @optest op1=0 op2=-42 -> op1=-42
1262 */
1263FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1264{
1265 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1268 {
1269 /*
1270 * Register, register.
1271 */
1272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1273 IEM_MC_BEGIN(0, 0);
1274 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1275 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1276 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1277 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1278 IEM_MC_ADVANCE_RIP();
1279 IEM_MC_END();
1280 }
1281 else
1282 {
1283 /*
1284 * Memory, register.
1285 */
1286 IEM_MC_BEGIN(0, 2);
1287 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1289
1290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1292 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1294
1295 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1296 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1297
1298 IEM_MC_ADVANCE_RIP();
1299 IEM_MC_END();
1300 }
1301 return VINF_SUCCESS;
1302}
1303
1304
1305/**
1306 * @opcode 0x11
1307 * @oppfx 0xf3
1308 * @opcpuid sse
1309 * @opgroup og_sse_simdfp_datamove
1310 * @opxcpttype 5
1311 * @optest op1=1 op2=2 -> op1=2
1312 * @optest op1=0 op2=-22 -> op1=-22
1313 */
1314FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1315{
1316 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1319 {
1320 /*
1321 * Register, register.
1322 */
1323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1324 IEM_MC_BEGIN(0, 1);
1325 IEM_MC_LOCAL(uint32_t, uSrc);
1326
1327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1328 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1329 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1330 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1331
1332 IEM_MC_ADVANCE_RIP();
1333 IEM_MC_END();
1334 }
1335 else
1336 {
1337 /*
1338 * Memory, register.
1339 */
1340 IEM_MC_BEGIN(0, 2);
1341 IEM_MC_LOCAL(uint32_t, uSrc);
1342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1343
1344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1346 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1348
1349 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1350 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1351
1352 IEM_MC_ADVANCE_RIP();
1353 IEM_MC_END();
1354 }
1355 return VINF_SUCCESS;
1356}
1357
1358
1359/**
1360 * @opcode 0x11
1361 * @oppfx 0xf2
1362 * @opcpuid sse2
1363 * @opgroup og_sse2_pcksclr_datamove
1364 * @opxcpttype 5
1365 * @optest op1=1 op2=2 -> op1=2
1366 * @optest op1=0 op2=-42 -> op1=-42
1367 */
1368FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1369{
1370 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1373 {
1374 /*
1375 * Register, register.
1376 */
1377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1378 IEM_MC_BEGIN(0, 1);
1379 IEM_MC_LOCAL(uint64_t, uSrc);
1380
1381 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1382 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1383 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1384 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1385
1386 IEM_MC_ADVANCE_RIP();
1387 IEM_MC_END();
1388 }
1389 else
1390 {
1391 /*
1392 * Memory, register.
1393 */
1394 IEM_MC_BEGIN(0, 2);
1395 IEM_MC_LOCAL(uint64_t, uSrc);
1396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1397
1398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1400 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1402
1403 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1404 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1405
1406 IEM_MC_ADVANCE_RIP();
1407 IEM_MC_END();
1408 }
1409 return VINF_SUCCESS;
1410}
1411
1412
1413FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1414{
1415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1416 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1417 {
1418 /**
1419 * @opcode 0x12
1420 * @opcodesub 11 mr/reg
1421 * @oppfx none
1422 * @opcpuid sse
1423 * @opgroup og_sse_simdfp_datamove
1424 * @opxcpttype 5
1425 * @optest op1=1 op2=2 -> op1=2
1426 * @optest op1=0 op2=-42 -> op1=-42
1427 */
1428 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1429
1430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1431 IEM_MC_BEGIN(0, 1);
1432 IEM_MC_LOCAL(uint64_t, uSrc);
1433
1434 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1436 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1437 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1438
1439 IEM_MC_ADVANCE_RIP();
1440 IEM_MC_END();
1441 }
1442 else
1443 {
1444 /**
1445 * @opdone
1446 * @opcode 0x12
1447 * @opcodesub !11 mr/reg
1448 * @oppfx none
1449 * @opcpuid sse
1450 * @opgroup og_sse_simdfp_datamove
1451 * @opxcpttype 5
1452 * @optest op1=1 op2=2 -> op1=2
1453 * @optest op1=0 op2=-42 -> op1=-42
1454 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1455 */
1456 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1457
1458 IEM_MC_BEGIN(0, 2);
1459 IEM_MC_LOCAL(uint64_t, uSrc);
1460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1461
1462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1466
1467 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1468 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1469
1470 IEM_MC_ADVANCE_RIP();
1471 IEM_MC_END();
1472 }
1473 return VINF_SUCCESS;
1474}
1475
1476
1477/**
1478 * @opcode 0x12
1479 * @opcodesub !11 mr/reg
1480 * @oppfx 0x66
1481 * @opcpuid sse2
1482 * @opgroup og_sse2_pcksclr_datamove
1483 * @opxcpttype 5
1484 * @optest op1=1 op2=2 -> op1=2
1485 * @optest op1=0 op2=-42 -> op1=-42
1486 */
1487FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1488{
1489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1490 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1491 {
1492 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1493
1494 IEM_MC_BEGIN(0, 2);
1495 IEM_MC_LOCAL(uint64_t, uSrc);
1496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1497
1498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1502
1503 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1504 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1505
1506 IEM_MC_ADVANCE_RIP();
1507 IEM_MC_END();
1508 return VINF_SUCCESS;
1509 }
1510
1511 /**
1512 * @opdone
1513 * @opmnemonic ud660f12m3
1514 * @opcode 0x12
1515 * @opcodesub 11 mr/reg
1516 * @oppfx 0x66
1517 * @opunused immediate
1518 * @opcpuid sse
1519 * @optest ->
1520 */
1521 return IEMOP_RAISE_INVALID_OPCODE();
1522}
1523
1524
1525/**
1526 * @opcode 0x12
1527 * @oppfx 0xf3
1528 * @opcpuid sse3
1529 * @opgroup og_sse3_pcksclr_datamove
1530 * @opxcpttype 4
1531 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1532 * op1=0x00000002000000020000000100000001
1533 */
1534FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1535{
1536 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1538 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1539 {
1540 /*
1541 * Register, register.
1542 */
1543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1544 IEM_MC_BEGIN(2, 0);
1545 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1546 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1547
1548 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1549 IEM_MC_PREPARE_SSE_USAGE();
1550
1551 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1552 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1553 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1554
1555 IEM_MC_ADVANCE_RIP();
1556 IEM_MC_END();
1557 }
1558 else
1559 {
1560 /*
1561 * Register, memory.
1562 */
1563 IEM_MC_BEGIN(2, 2);
1564 IEM_MC_LOCAL(RTUINT128U, uSrc);
1565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1566 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1567 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1568
1569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1571 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1572 IEM_MC_PREPARE_SSE_USAGE();
1573
1574 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1575 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1576 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1577
1578 IEM_MC_ADVANCE_RIP();
1579 IEM_MC_END();
1580 }
1581 return VINF_SUCCESS;
1582}
1583
1584
1585/**
1586 * @opcode 0x12
1587 * @oppfx 0xf2
1588 * @opcpuid sse3
1589 * @opgroup og_sse3_pcksclr_datamove
1590 * @opxcpttype 5
1591 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1592 * op1=0x22222222111111112222222211111111
1593 */
1594FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1595{
1596 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1599 {
1600 /*
1601 * Register, register.
1602 */
1603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1604 IEM_MC_BEGIN(2, 0);
1605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1606 IEM_MC_ARG(uint64_t, uSrc, 1);
1607
1608 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1609 IEM_MC_PREPARE_SSE_USAGE();
1610
1611 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1612 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1613 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1614
1615 IEM_MC_ADVANCE_RIP();
1616 IEM_MC_END();
1617 }
1618 else
1619 {
1620 /*
1621 * Register, memory.
1622 */
1623 IEM_MC_BEGIN(2, 2);
1624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1625 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1626 IEM_MC_ARG(uint64_t, uSrc, 1);
1627
1628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1630 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1631 IEM_MC_PREPARE_SSE_USAGE();
1632
1633 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1634 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1635 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1636
1637 IEM_MC_ADVANCE_RIP();
1638 IEM_MC_END();
1639 }
1640 return VINF_SUCCESS;
1641}
1642
1643
1644/**
1645 * @opcode 0x13
1646 * @opcodesub !11 mr/reg
1647 * @oppfx none
1648 * @opcpuid sse
1649 * @opgroup og_sse_simdfp_datamove
1650 * @opxcpttype 5
1651 * @optest op1=1 op2=2 -> op1=2
1652 * @optest op1=0 op2=-42 -> op1=-42
1653 */
1654FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1655{
1656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1657 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1658 {
1659 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1660
1661 IEM_MC_BEGIN(0, 2);
1662 IEM_MC_LOCAL(uint64_t, uSrc);
1663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1664
1665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1667 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1668 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1669
1670 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1671 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1672
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 return VINF_SUCCESS;
1676 }
1677
1678 /**
1679 * @opdone
1680 * @opmnemonic ud0f13m3
1681 * @opcode 0x13
1682 * @opcodesub 11 mr/reg
1683 * @oppfx none
1684 * @opunused immediate
1685 * @opcpuid sse
1686 * @optest ->
1687 */
1688 return IEMOP_RAISE_INVALID_OPCODE();
1689}
1690
1691
1692/**
1693 * @opcode 0x13
1694 * @opcodesub !11 mr/reg
1695 * @oppfx 0x66
1696 * @opcpuid sse2
1697 * @opgroup og_sse2_pcksclr_datamove
1698 * @opxcpttype 5
1699 * @optest op1=1 op2=2 -> op1=2
1700 * @optest op1=0 op2=-42 -> op1=-42
1701 */
1702FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1703{
1704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1705 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1706 {
1707 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1708 IEM_MC_BEGIN(0, 2);
1709 IEM_MC_LOCAL(uint64_t, uSrc);
1710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1711
1712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1714 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1715 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1716
1717 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1718 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1719
1720 IEM_MC_ADVANCE_RIP();
1721 IEM_MC_END();
1722 return VINF_SUCCESS;
1723 }
1724
1725 /**
1726 * @opdone
1727 * @opmnemonic ud660f13m3
1728 * @opcode 0x13
1729 * @opcodesub 11 mr/reg
1730 * @oppfx 0x66
1731 * @opunused immediate
1732 * @opcpuid sse
1733 * @optest ->
1734 */
1735 return IEMOP_RAISE_INVALID_OPCODE();
1736}
1737
1738
1739/**
1740 * @opmnemonic udf30f13
1741 * @opcode 0x13
1742 * @oppfx 0xf3
1743 * @opunused intel-modrm
1744 * @opcpuid sse
1745 * @optest ->
1746 * @opdone
1747 */
1748
1749/**
1750 * @opmnemonic udf20f13
1751 * @opcode 0x13
1752 * @oppfx 0xf2
1753 * @opunused intel-modrm
1754 * @opcpuid sse
1755 * @optest ->
1756 * @opdone
1757 */
1758
1759/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1760FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1761/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1762FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1763
1764/**
1765 * @opdone
1766 * @opmnemonic udf30f14
1767 * @opcode 0x14
1768 * @oppfx 0xf3
1769 * @opunused intel-modrm
1770 * @opcpuid sse
1771 * @optest ->
1772 * @opdone
1773 */
1774
1775/**
1776 * @opmnemonic udf20f14
1777 * @opcode 0x14
1778 * @oppfx 0xf2
1779 * @opunused intel-modrm
1780 * @opcpuid sse
1781 * @optest ->
1782 * @opdone
1783 */
1784
1785/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1786FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1787/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1788FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1789/* Opcode 0xf3 0x0f 0x15 - invalid */
1790/* Opcode 0xf2 0x0f 0x15 - invalid */
1791
1792/**
1793 * @opdone
1794 * @opmnemonic udf30f15
1795 * @opcode 0x15
1796 * @oppfx 0xf3
1797 * @opunused intel-modrm
1798 * @opcpuid sse
1799 * @optest ->
1800 * @opdone
1801 */
1802
1803/**
1804 * @opmnemonic udf20f15
1805 * @opcode 0x15
1806 * @oppfx 0xf2
1807 * @opunused intel-modrm
1808 * @opcpuid sse
1809 * @optest ->
1810 * @opdone
1811 */
1812
1813FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1814{
1815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1817 {
1818 /**
1819 * @opcode 0x16
1820 * @opcodesub 11 mr/reg
1821 * @oppfx none
1822 * @opcpuid sse
1823 * @opgroup og_sse_simdfp_datamove
1824 * @opxcpttype 5
1825 * @optest op1=1 op2=2 -> op1=2
1826 * @optest op1=0 op2=-42 -> op1=-42
1827 */
1828 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1829
1830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1831 IEM_MC_BEGIN(0, 1);
1832 IEM_MC_LOCAL(uint64_t, uSrc);
1833
1834 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1835 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1836 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1837 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1838
1839 IEM_MC_ADVANCE_RIP();
1840 IEM_MC_END();
1841 }
1842 else
1843 {
1844 /**
1845 * @opdone
1846 * @opcode 0x16
1847 * @opcodesub !11 mr/reg
1848 * @oppfx none
1849 * @opcpuid sse
1850 * @opgroup og_sse_simdfp_datamove
1851 * @opxcpttype 5
1852 * @optest op1=1 op2=2 -> op1=2
1853 * @optest op1=0 op2=-42 -> op1=-42
1854 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1855 */
1856 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1857
1858 IEM_MC_BEGIN(0, 2);
1859 IEM_MC_LOCAL(uint64_t, uSrc);
1860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1861
1862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1866
1867 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1868 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1869
1870 IEM_MC_ADVANCE_RIP();
1871 IEM_MC_END();
1872 }
1873 return VINF_SUCCESS;
1874}
1875
1876
1877/**
1878 * @opcode 0x16
1879 * @opcodesub !11 mr/reg
1880 * @oppfx 0x66
1881 * @opcpuid sse2
1882 * @opgroup og_sse2_pcksclr_datamove
1883 * @opxcpttype 5
1884 * @optest op1=1 op2=2 -> op1=2
1885 * @optest op1=0 op2=-42 -> op1=-42
1886 */
1887FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1888{
1889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1890 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1891 {
1892 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1893 IEM_MC_BEGIN(0, 2);
1894 IEM_MC_LOCAL(uint64_t, uSrc);
1895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1896
1897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1899 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1900 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1901
1902 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1903 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1904
1905 IEM_MC_ADVANCE_RIP();
1906 IEM_MC_END();
1907 return VINF_SUCCESS;
1908 }
1909
1910 /**
1911 * @opdone
1912 * @opmnemonic ud660f16m3
1913 * @opcode 0x16
1914 * @opcodesub 11 mr/reg
1915 * @oppfx 0x66
1916 * @opunused immediate
1917 * @opcpuid sse
1918 * @optest ->
1919 */
1920 return IEMOP_RAISE_INVALID_OPCODE();
1921}
1922
1923
1924/**
1925 * @opcode 0x16
1926 * @oppfx 0xf3
1927 * @opcpuid sse3
1928 * @opgroup og_sse3_pcksclr_datamove
1929 * @opxcpttype 4
1930 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1931 * op1=0x00000002000000020000000100000001
1932 */
1933FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1934{
1935 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1938 {
1939 /*
1940 * Register, register.
1941 */
1942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1943 IEM_MC_BEGIN(2, 0);
1944 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1945 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1946
1947 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1948 IEM_MC_PREPARE_SSE_USAGE();
1949
1950 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1951 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1952 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1953
1954 IEM_MC_ADVANCE_RIP();
1955 IEM_MC_END();
1956 }
1957 else
1958 {
1959 /*
1960 * Register, memory.
1961 */
1962 IEM_MC_BEGIN(2, 2);
1963 IEM_MC_LOCAL(RTUINT128U, uSrc);
1964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1965 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1966 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1967
1968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1970 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1971 IEM_MC_PREPARE_SSE_USAGE();
1972
1973 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1974 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1975 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1976
1977 IEM_MC_ADVANCE_RIP();
1978 IEM_MC_END();
1979 }
1980 return VINF_SUCCESS;
1981}
1982
1983/**
1984 * @opdone
1985 * @opmnemonic udf30f16
1986 * @opcode 0x16
1987 * @oppfx 0xf2
1988 * @opunused intel-modrm
1989 * @opcpuid sse
1990 * @optest ->
1991 * @opdone
1992 */
1993
1994
1995/**
1996 * @opcode 0x17
1997 * @opcodesub !11 mr/reg
1998 * @oppfx none
1999 * @opcpuid sse
2000 * @opgroup og_sse_simdfp_datamove
2001 * @opxcpttype 5
2002 * @optest op1=1 op2=2 -> op1=2
2003 * @optest op1=0 op2=-42 -> op1=-42
2004 */
2005FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2006{
2007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2008 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2009 {
2010 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2011
2012 IEM_MC_BEGIN(0, 2);
2013 IEM_MC_LOCAL(uint64_t, uSrc);
2014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2015
2016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2018 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2019 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2020
2021 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2022 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2023
2024 IEM_MC_ADVANCE_RIP();
2025 IEM_MC_END();
2026 return VINF_SUCCESS;
2027 }
2028
2029 /**
2030 * @opdone
2031 * @opmnemonic ud0f17m3
2032 * @opcode 0x17
2033 * @opcodesub 11 mr/reg
2034 * @oppfx none
2035 * @opunused immediate
2036 * @opcpuid sse
2037 * @optest ->
2038 */
2039 return IEMOP_RAISE_INVALID_OPCODE();
2040}
2041
2042
2043/**
2044 * @opcode 0x17
2045 * @opcodesub !11 mr/reg
2046 * @oppfx 0x66
2047 * @opcpuid sse2
2048 * @opgroup og_sse2_pcksclr_datamove
2049 * @opxcpttype 5
2050 * @optest op1=1 op2=2 -> op1=2
2051 * @optest op1=0 op2=-42 -> op1=-42
2052 */
2053FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
2054{
2055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2056 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2057 {
2058 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2059
2060 IEM_MC_BEGIN(0, 2);
2061 IEM_MC_LOCAL(uint64_t, uSrc);
2062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2063
2064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2066 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2067 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2068
2069 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2070 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2071
2072 IEM_MC_ADVANCE_RIP();
2073 IEM_MC_END();
2074 return VINF_SUCCESS;
2075 }
2076
2077 /**
2078 * @opdone
2079 * @opmnemonic ud660f17m3
2080 * @opcode 0x17
2081 * @opcodesub 11 mr/reg
2082 * @oppfx 0x66
2083 * @opunused immediate
2084 * @opcpuid sse
2085 * @optest ->
2086 */
2087 return IEMOP_RAISE_INVALID_OPCODE();
2088}
2089
2090
2091/**
2092 * @opdone
2093 * @opmnemonic udf30f17
2094 * @opcode 0x17
2095 * @oppfx 0xf3
2096 * @opunused intel-modrm
2097 * @opcpuid sse
2098 * @optest ->
2099 * @opdone
2100 */
2101
2102/**
2103 * @opmnemonic udf20f17
2104 * @opcode 0x17
2105 * @oppfx 0xf2
2106 * @opunused intel-modrm
2107 * @opcpuid sse
2108 * @optest ->
2109 * @opdone
2110 */
2111
2112
2113/** Opcode 0x0f 0x18. */
2114FNIEMOP_DEF(iemOp_prefetch_Grp16)
2115{
2116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2117 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2118 {
2119 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2120 {
2121 case 4: /* Aliased to /0 for the time being according to AMD. */
2122 case 5: /* Aliased to /0 for the time being according to AMD. */
2123 case 6: /* Aliased to /0 for the time being according to AMD. */
2124 case 7: /* Aliased to /0 for the time being according to AMD. */
2125 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2126 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2127 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2128 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2130 }
2131
2132 IEM_MC_BEGIN(0, 1);
2133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2136 /* Currently a NOP. */
2137 NOREF(GCPtrEffSrc);
2138 IEM_MC_ADVANCE_RIP();
2139 IEM_MC_END();
2140 return VINF_SUCCESS;
2141 }
2142
2143 return IEMOP_RAISE_INVALID_OPCODE();
2144}
2145
2146
2147/** Opcode 0x0f 0x19..0x1f. */
2148FNIEMOP_DEF(iemOp_nop_Ev)
2149{
2150 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2151 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2152 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2153 {
2154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2155 IEM_MC_BEGIN(0, 0);
2156 IEM_MC_ADVANCE_RIP();
2157 IEM_MC_END();
2158 }
2159 else
2160 {
2161 IEM_MC_BEGIN(0, 1);
2162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2165 /* Currently a NOP. */
2166 NOREF(GCPtrEffSrc);
2167 IEM_MC_ADVANCE_RIP();
2168 IEM_MC_END();
2169 }
2170 return VINF_SUCCESS;
2171}
2172
2173
2174/** Opcode 0x0f 0x20. */
2175FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2176{
2177 /* mod is ignored, as is operand size overrides. */
2178 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2179 IEMOP_HLP_MIN_386();
2180 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2181 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2182 else
2183 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2184
2185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2186 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2187 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2188 {
2189 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2190 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2191 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2192 iCrReg |= 8;
2193 }
2194 switch (iCrReg)
2195 {
2196 case 0: case 2: case 3: case 4: case 8:
2197 break;
2198 default:
2199 return IEMOP_RAISE_INVALID_OPCODE();
2200 }
2201 IEMOP_HLP_DONE_DECODING();
2202
2203 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2204}
2205
2206
2207/** Opcode 0x0f 0x21. */
2208FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2209{
2210 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2211 IEMOP_HLP_MIN_386();
2212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2214 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2215 return IEMOP_RAISE_INVALID_OPCODE();
2216 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2217 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2218 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2219}
2220
2221
2222/** Opcode 0x0f 0x22. */
2223FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2224{
2225 /* mod is ignored, as is operand size overrides. */
2226 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2227 IEMOP_HLP_MIN_386();
2228 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2229 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2230 else
2231 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2232
2233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2234 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2235 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2236 {
2237 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2238 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2239 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2240 iCrReg |= 8;
2241 }
2242 switch (iCrReg)
2243 {
2244 case 0: case 2: case 3: case 4: case 8:
2245 break;
2246 default:
2247 return IEMOP_RAISE_INVALID_OPCODE();
2248 }
2249 IEMOP_HLP_DONE_DECODING();
2250
2251 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2252}
2253
2254
2255/** Opcode 0x0f 0x23. */
2256FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2257{
2258 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2259 IEMOP_HLP_MIN_386();
2260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2262 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2263 return IEMOP_RAISE_INVALID_OPCODE();
2264 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2265 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2266 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2267}
2268
2269
2270/** Opcode 0x0f 0x24. */
2271FNIEMOP_DEF(iemOp_mov_Rd_Td)
2272{
2273 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2274 /** @todo works on 386 and 486. */
2275 /* The RM byte is not considered, see testcase. */
2276 return IEMOP_RAISE_INVALID_OPCODE();
2277}
2278
2279
2280/** Opcode 0x0f 0x26. */
2281FNIEMOP_DEF(iemOp_mov_Td_Rd)
2282{
2283 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2284 /** @todo works on 386 and 486. */
2285 /* The RM byte is not considered, see testcase. */
2286 return IEMOP_RAISE_INVALID_OPCODE();
2287}
2288
2289
2290/**
2291 * @opcode 0x28
2292 * @oppfx none
2293 * @opcpuid sse
2294 * @opgroup og_sse_simdfp_datamove
2295 * @opxcpttype 1
2296 * @optest op1=1 op2=2 -> op1=2
2297 * @optest op1=0 op2=-42 -> op1=-42
2298 */
2299FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2300{
2301 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2303 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2304 {
2305 /*
2306 * Register, register.
2307 */
2308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2309 IEM_MC_BEGIN(0, 0);
2310 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2312 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2313 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2314 IEM_MC_ADVANCE_RIP();
2315 IEM_MC_END();
2316 }
2317 else
2318 {
2319 /*
2320 * Register, memory.
2321 */
2322 IEM_MC_BEGIN(0, 2);
2323 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2325
2326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2328 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2329 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2330
2331 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2332 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2333
2334 IEM_MC_ADVANCE_RIP();
2335 IEM_MC_END();
2336 }
2337 return VINF_SUCCESS;
2338}
2339
2340/**
2341 * @opcode 0x28
2342 * @oppfx 66
2343 * @opcpuid sse2
2344 * @opgroup og_sse2_pcksclr_datamove
2345 * @opxcpttype 1
2346 * @optest op1=1 op2=2 -> op1=2
2347 * @optest op1=0 op2=-42 -> op1=-42
2348 */
2349FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2350{
2351 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2352 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2353 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2354 {
2355 /*
2356 * Register, register.
2357 */
2358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2359 IEM_MC_BEGIN(0, 0);
2360 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2361 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2362 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2363 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2364 IEM_MC_ADVANCE_RIP();
2365 IEM_MC_END();
2366 }
2367 else
2368 {
2369 /*
2370 * Register, memory.
2371 */
2372 IEM_MC_BEGIN(0, 2);
2373 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2375
2376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2378 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2379 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2380
2381 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2382 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2383
2384 IEM_MC_ADVANCE_RIP();
2385 IEM_MC_END();
2386 }
2387 return VINF_SUCCESS;
2388}
2389
2390/* Opcode 0xf3 0x0f 0x28 - invalid */
2391/* Opcode 0xf2 0x0f 0x28 - invalid */
2392
2393/**
2394 * @opcode 0x29
2395 * @oppfx none
2396 * @opcpuid sse
2397 * @opgroup og_sse_simdfp_datamove
2398 * @opxcpttype 1
2399 * @optest op1=1 op2=2 -> op1=2
2400 * @optest op1=0 op2=-42 -> op1=-42
2401 */
2402FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2403{
2404 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2406 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2407 {
2408 /*
2409 * Register, register.
2410 */
2411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2412 IEM_MC_BEGIN(0, 0);
2413 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2414 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2415 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2416 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2417 IEM_MC_ADVANCE_RIP();
2418 IEM_MC_END();
2419 }
2420 else
2421 {
2422 /*
2423 * Memory, register.
2424 */
2425 IEM_MC_BEGIN(0, 2);
2426 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2428
2429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2431 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2432 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2433
2434 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2435 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2436
2437 IEM_MC_ADVANCE_RIP();
2438 IEM_MC_END();
2439 }
2440 return VINF_SUCCESS;
2441}
2442
2443/**
2444 * @opcode 0x29
2445 * @oppfx 66
2446 * @opcpuid sse2
2447 * @opgroup og_sse2_pcksclr_datamove
2448 * @opxcpttype 1
2449 * @optest op1=1 op2=2 -> op1=2
2450 * @optest op1=0 op2=-42 -> op1=-42
2451 */
2452FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2453{
2454 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2456 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2457 {
2458 /*
2459 * Register, register.
2460 */
2461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2462 IEM_MC_BEGIN(0, 0);
2463 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2464 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2465 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2466 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2467 IEM_MC_ADVANCE_RIP();
2468 IEM_MC_END();
2469 }
2470 else
2471 {
2472 /*
2473 * Memory, register.
2474 */
2475 IEM_MC_BEGIN(0, 2);
2476 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2478
2479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2481 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2483
2484 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2485 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2486
2487 IEM_MC_ADVANCE_RIP();
2488 IEM_MC_END();
2489 }
2490 return VINF_SUCCESS;
2491}
2492
2493/* Opcode 0xf3 0x0f 0x29 - invalid */
2494/* Opcode 0xf2 0x0f 0x29 - invalid */
2495
2496
2497/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2498FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2499/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2500FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2501/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2502FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2503/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2504FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2505
2506
2507/**
2508 * @opcode 0x2b
2509 * @opcodesub !11 mr/reg
2510 * @oppfx none
2511 * @opcpuid sse
2512 * @opgroup og_sse1_cachect
2513 * @opxcpttype 1
2514 * @optest op1=1 op2=2 -> op1=2
2515 * @optest op1=0 op2=-42 -> op1=-42
2516 */
2517FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2518{
2519 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2521 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2522 {
2523 /*
2524 * memory, register.
2525 */
2526 IEM_MC_BEGIN(0, 2);
2527 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2529
2530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2532 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2533 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2534
2535 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2536 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2537
2538 IEM_MC_ADVANCE_RIP();
2539 IEM_MC_END();
2540 }
2541 /* The register, register encoding is invalid. */
2542 else
2543 return IEMOP_RAISE_INVALID_OPCODE();
2544 return VINF_SUCCESS;
2545}
2546
2547/**
2548 * @opcode 0x2b
2549 * @opcodesub !11 mr/reg
2550 * @oppfx 0x66
2551 * @opcpuid sse2
2552 * @opgroup og_sse2_cachect
2553 * @opxcpttype 1
2554 * @optest op1=1 op2=2 -> op1=2
2555 * @optest op1=0 op2=-42 -> op1=-42
2556 */
2557FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2558{
2559 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2561 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2562 {
2563 /*
2564 * memory, register.
2565 */
2566 IEM_MC_BEGIN(0, 2);
2567 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2569
2570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2572 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2573 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2574
2575 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2576 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2577
2578 IEM_MC_ADVANCE_RIP();
2579 IEM_MC_END();
2580 }
2581 /* The register, register encoding is invalid. */
2582 else
2583 return IEMOP_RAISE_INVALID_OPCODE();
2584 return VINF_SUCCESS;
2585}
2586/* Opcode 0xf3 0x0f 0x2b - invalid */
2587/* Opcode 0xf2 0x0f 0x2b - invalid */
2588
2589
2590/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2591FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2592/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2593FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2594/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2595FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2596/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2597FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2598
2599/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2600FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2601/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2602FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2603/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2604FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2605/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2606FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2607
2608/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2609FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2610/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2611FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2612/* Opcode 0xf3 0x0f 0x2e - invalid */
2613/* Opcode 0xf2 0x0f 0x2e - invalid */
2614
2615/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2616FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2617/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2618FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2619/* Opcode 0xf3 0x0f 0x2f - invalid */
2620/* Opcode 0xf2 0x0f 0x2f - invalid */
2621
2622/** Opcode 0x0f 0x30. */
2623FNIEMOP_DEF(iemOp_wrmsr)
2624{
2625 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2627 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2628}
2629
2630
2631/** Opcode 0x0f 0x31. */
2632FNIEMOP_DEF(iemOp_rdtsc)
2633{
2634 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2636 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2637}
2638
2639
2640/** Opcode 0x0f 0x33. */
2641FNIEMOP_DEF(iemOp_rdmsr)
2642{
2643 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2646}
2647
2648
2649/** Opcode 0x0f 0x34. */
2650FNIEMOP_DEF(iemOp_rdpmc)
2651{
2652 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2654 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2655}
2656
2657
2658/** Opcode 0x0f 0x34. */
2659FNIEMOP_STUB(iemOp_sysenter);
2660/** Opcode 0x0f 0x35. */
2661FNIEMOP_STUB(iemOp_sysexit);
2662/** Opcode 0x0f 0x37. */
2663FNIEMOP_STUB(iemOp_getsec);
2664
2665
2666/** Opcode 0x0f 0x38. */
2667FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2668{
2669#ifdef IEM_WITH_THREE_0F_38
2670 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2671 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2672#else
2673 IEMOP_BITCH_ABOUT_STUB();
2674 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2675#endif
2676}
2677
2678
2679/** Opcode 0x0f 0x3a. */
2680FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2681{
2682#ifdef IEM_WITH_THREE_0F_3A
2683 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2684 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2685#else
2686 IEMOP_BITCH_ABOUT_STUB();
2687 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2688#endif
2689}
2690
2691
2692/**
2693 * Implements a conditional move.
2694 *
2695 * Wish there was an obvious way to do this where we could share and reduce
2696 * code bloat.
2697 *
2698 * @param a_Cnd The conditional "microcode" operation.
2699 */
2700#define CMOV_X(a_Cnd) \
2701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2702 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2703 { \
2704 switch (pVCpu->iem.s.enmEffOpSize) \
2705 { \
2706 case IEMMODE_16BIT: \
2707 IEM_MC_BEGIN(0, 1); \
2708 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2709 a_Cnd { \
2710 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2711 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2712 } IEM_MC_ENDIF(); \
2713 IEM_MC_ADVANCE_RIP(); \
2714 IEM_MC_END(); \
2715 return VINF_SUCCESS; \
2716 \
2717 case IEMMODE_32BIT: \
2718 IEM_MC_BEGIN(0, 1); \
2719 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2720 a_Cnd { \
2721 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2722 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2723 } IEM_MC_ELSE() { \
2724 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2725 } IEM_MC_ENDIF(); \
2726 IEM_MC_ADVANCE_RIP(); \
2727 IEM_MC_END(); \
2728 return VINF_SUCCESS; \
2729 \
2730 case IEMMODE_64BIT: \
2731 IEM_MC_BEGIN(0, 1); \
2732 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2733 a_Cnd { \
2734 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2735 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2736 } IEM_MC_ENDIF(); \
2737 IEM_MC_ADVANCE_RIP(); \
2738 IEM_MC_END(); \
2739 return VINF_SUCCESS; \
2740 \
2741 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2742 } \
2743 } \
2744 else \
2745 { \
2746 switch (pVCpu->iem.s.enmEffOpSize) \
2747 { \
2748 case IEMMODE_16BIT: \
2749 IEM_MC_BEGIN(0, 2); \
2750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2751 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2753 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2754 a_Cnd { \
2755 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2756 } IEM_MC_ENDIF(); \
2757 IEM_MC_ADVANCE_RIP(); \
2758 IEM_MC_END(); \
2759 return VINF_SUCCESS; \
2760 \
2761 case IEMMODE_32BIT: \
2762 IEM_MC_BEGIN(0, 2); \
2763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2764 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2766 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2767 a_Cnd { \
2768 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2769 } IEM_MC_ELSE() { \
2770 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2771 } IEM_MC_ENDIF(); \
2772 IEM_MC_ADVANCE_RIP(); \
2773 IEM_MC_END(); \
2774 return VINF_SUCCESS; \
2775 \
2776 case IEMMODE_64BIT: \
2777 IEM_MC_BEGIN(0, 2); \
2778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2779 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2781 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2782 a_Cnd { \
2783 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2784 } IEM_MC_ENDIF(); \
2785 IEM_MC_ADVANCE_RIP(); \
2786 IEM_MC_END(); \
2787 return VINF_SUCCESS; \
2788 \
2789 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2790 } \
2791 } do {} while (0)
2792
2793
2794
2795/** Opcode 0x0f 0x40. */
2796FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2797{
2798 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2799 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2800}
2801
2802
2803/** Opcode 0x0f 0x41. */
2804FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2805{
2806 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2807 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2808}
2809
2810
2811/** Opcode 0x0f 0x42. */
2812FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2813{
2814 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2815 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2816}
2817
2818
2819/** Opcode 0x0f 0x43. */
2820FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2821{
2822 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2823 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2824}
2825
2826
2827/** Opcode 0x0f 0x44. */
2828FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2829{
2830 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2831 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2832}
2833
2834
2835/** Opcode 0x0f 0x45. */
2836FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2837{
2838 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2839 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2840}
2841
2842
2843/** Opcode 0x0f 0x46. */
2844FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2845{
2846 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2847 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2848}
2849
2850
2851/** Opcode 0x0f 0x47. */
2852FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2853{
2854 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2855 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2856}
2857
2858
2859/** Opcode 0x0f 0x48. */
2860FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2861{
2862 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2863 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2864}
2865
2866
2867/** Opcode 0x0f 0x49. */
2868FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2869{
2870 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2871 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2872}
2873
2874
2875/** Opcode 0x0f 0x4a. */
2876FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2877{
2878 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2879 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2880}
2881
2882
2883/** Opcode 0x0f 0x4b. */
2884FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2885{
2886 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2887 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2888}
2889
2890
2891/** Opcode 0x0f 0x4c. */
2892FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2893{
2894 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2895 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2896}
2897
2898
2899/** Opcode 0x0f 0x4d. */
2900FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2901{
2902 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2903 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2904}
2905
2906
2907/** Opcode 0x0f 0x4e. */
2908FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2909{
2910 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2911 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2912}
2913
2914
2915/** Opcode 0x0f 0x4f. */
2916FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2917{
2918 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2919 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2920}
2921
2922#undef CMOV_X
2923
2924/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2925FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2926/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2927FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2928/* Opcode 0xf3 0x0f 0x50 - invalid */
2929/* Opcode 0xf2 0x0f 0x50 - invalid */
2930
2931/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2932FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2933/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2934FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2935/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2936FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2937/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2938FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2939
2940/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2941FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2942/* Opcode 0x66 0x0f 0x52 - invalid */
2943/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2944FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2945/* Opcode 0xf2 0x0f 0x52 - invalid */
2946
2947/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2948FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2949/* Opcode 0x66 0x0f 0x53 - invalid */
2950/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2951FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2952/* Opcode 0xf2 0x0f 0x53 - invalid */
2953
2954/** Opcode 0x0f 0x54 - andps Vps, Wps */
2955FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2956/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2957FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2958/* Opcode 0xf3 0x0f 0x54 - invalid */
2959/* Opcode 0xf2 0x0f 0x54 - invalid */
2960
2961/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2962FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2963/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2964FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2965/* Opcode 0xf3 0x0f 0x55 - invalid */
2966/* Opcode 0xf2 0x0f 0x55 - invalid */
2967
2968/** Opcode 0x0f 0x56 - orps Vps, Wps */
2969FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2970/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2971FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2972/* Opcode 0xf3 0x0f 0x56 - invalid */
2973/* Opcode 0xf2 0x0f 0x56 - invalid */
2974
2975/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2976FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2977/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2978FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2979/* Opcode 0xf3 0x0f 0x57 - invalid */
2980/* Opcode 0xf2 0x0f 0x57 - invalid */
2981
2982/** Opcode 0x0f 0x58 - addps Vps, Wps */
2983FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2984/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2985FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2986/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2987FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2988/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2989FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2990
2991/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2992FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2993/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2994FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2995/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2996FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2997/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2998FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2999
3000/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
3001FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
3002/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
3003FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
3004/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
3005FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
3006/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
3007FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
3008
3009/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
3010FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
3011/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
3012FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
3013/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
3014FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
3015/* Opcode 0xf2 0x0f 0x5b - invalid */
3016
3017/** Opcode 0x0f 0x5c - subps Vps, Wps */
3018FNIEMOP_STUB(iemOp_subps_Vps_Wps);
3019/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
3020FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
3021/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
3022FNIEMOP_STUB(iemOp_subss_Vss_Wss);
3023/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
3024FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
3025
3026/** Opcode 0x0f 0x5d - minps Vps, Wps */
3027FNIEMOP_STUB(iemOp_minps_Vps_Wps);
3028/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
3029FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
3030/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
3031FNIEMOP_STUB(iemOp_minss_Vss_Wss);
3032/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
3033FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
3034
3035/** Opcode 0x0f 0x5e - divps Vps, Wps */
3036FNIEMOP_STUB(iemOp_divps_Vps_Wps);
3037/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
3038FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
3039/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
3040FNIEMOP_STUB(iemOp_divss_Vss_Wss);
3041/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
3042FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
3043
3044/** Opcode 0x0f 0x5f - maxps Vps, Wps */
3045FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
3046/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
3047FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
3048/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
3049FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
3050/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
3051FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
3052
3053/**
3054 * Common worker for MMX instructions on the forms:
3055 * pxxxx mm1, mm2/mem32
3056 *
3057 * The 2nd operand is the first half of a register, which in the memory case
3058 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3059 * memory accessed for MMX.
3060 *
3061 * Exceptions type 4.
3062 */
3063FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3064{
3065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3067 {
3068 /*
3069 * Register, register.
3070 */
3071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3072 IEM_MC_BEGIN(2, 0);
3073 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3074 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3075 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3076 IEM_MC_PREPARE_SSE_USAGE();
3077 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3078 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3079 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3080 IEM_MC_ADVANCE_RIP();
3081 IEM_MC_END();
3082 }
3083 else
3084 {
3085 /*
3086 * Register, memory.
3087 */
3088 IEM_MC_BEGIN(2, 2);
3089 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3090 IEM_MC_LOCAL(uint64_t, uSrc);
3091 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3093
3094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3096 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3097 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3098
3099 IEM_MC_PREPARE_SSE_USAGE();
3100 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3101 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3102
3103 IEM_MC_ADVANCE_RIP();
3104 IEM_MC_END();
3105 }
3106 return VINF_SUCCESS;
3107}
3108
3109
3110/**
3111 * Common worker for SSE2 instructions on the forms:
3112 * pxxxx xmm1, xmm2/mem128
3113 *
3114 * The 2nd operand is the first half of a register, which in the memory case
3115 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3116 * memory accessed for MMX.
3117 *
3118 * Exceptions type 4.
3119 */
3120FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3121{
3122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3123 if (!pImpl->pfnU64)
3124 return IEMOP_RAISE_INVALID_OPCODE();
3125 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3126 {
3127 /*
3128 * Register, register.
3129 */
3130 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3131 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3133 IEM_MC_BEGIN(2, 0);
3134 IEM_MC_ARG(uint64_t *, pDst, 0);
3135 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3136 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3137 IEM_MC_PREPARE_FPU_USAGE();
3138 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3139 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3140 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3141 IEM_MC_ADVANCE_RIP();
3142 IEM_MC_END();
3143 }
3144 else
3145 {
3146 /*
3147 * Register, memory.
3148 */
3149 IEM_MC_BEGIN(2, 2);
3150 IEM_MC_ARG(uint64_t *, pDst, 0);
3151 IEM_MC_LOCAL(uint32_t, uSrc);
3152 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3154
3155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3157 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3158 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3159
3160 IEM_MC_PREPARE_FPU_USAGE();
3161 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3162 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3163
3164 IEM_MC_ADVANCE_RIP();
3165 IEM_MC_END();
3166 }
3167 return VINF_SUCCESS;
3168}
3169
3170
3171/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3172FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3173{
3174 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3175 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3176}
3177
3178/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3179FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3180{
3181 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3182 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3183}
3184
3185/* Opcode 0xf3 0x0f 0x60 - invalid */
3186
3187
3188/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3189FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3190{
3191 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3192 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3193}
3194
3195/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3196FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3197{
3198 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3199 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3200}
3201
3202/* Opcode 0xf3 0x0f 0x61 - invalid */
3203
3204
3205/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3206FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3207{
3208 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3209 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3210}
3211
3212/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3213FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3214{
3215 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3216 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3217}
3218
3219/* Opcode 0xf3 0x0f 0x62 - invalid */
3220
3221
3222
3223/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3224FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3225/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3226FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3227/* Opcode 0xf3 0x0f 0x63 - invalid */
3228
3229/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3230FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3231/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3232FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3233/* Opcode 0xf3 0x0f 0x64 - invalid */
3234
3235/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3236FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3237/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3238FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3239/* Opcode 0xf3 0x0f 0x65 - invalid */
3240
3241/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3242FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3243/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3244FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3245/* Opcode 0xf3 0x0f 0x66 - invalid */
3246
3247/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3248FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3249/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3250FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3251/* Opcode 0xf3 0x0f 0x67 - invalid */
3252
3253
3254/**
3255 * Common worker for MMX instructions on the form:
3256 * pxxxx mm1, mm2/mem64
3257 *
3258 * The 2nd operand is the second half of a register, which in the memory case
3259 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3260 * where it may read the full 128 bits or only the upper 64 bits.
3261 *
3262 * Exceptions type 4.
3263 */
3264FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3265{
3266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3267 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3269 {
3270 /*
3271 * Register, register.
3272 */
3273 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3274 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3276 IEM_MC_BEGIN(2, 0);
3277 IEM_MC_ARG(uint64_t *, pDst, 0);
3278 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3279 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3280 IEM_MC_PREPARE_FPU_USAGE();
3281 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3282 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3283 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3284 IEM_MC_ADVANCE_RIP();
3285 IEM_MC_END();
3286 }
3287 else
3288 {
3289 /*
3290 * Register, memory.
3291 */
3292 IEM_MC_BEGIN(2, 2);
3293 IEM_MC_ARG(uint64_t *, pDst, 0);
3294 IEM_MC_LOCAL(uint64_t, uSrc);
3295 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3297
3298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3300 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3301 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3302
3303 IEM_MC_PREPARE_FPU_USAGE();
3304 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3305 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3306
3307 IEM_MC_ADVANCE_RIP();
3308 IEM_MC_END();
3309 }
3310 return VINF_SUCCESS;
3311}
3312
3313
3314/**
3315 * Common worker for SSE2 instructions on the form:
3316 * pxxxx xmm1, xmm2/mem128
3317 *
3318 * The 2nd operand is the second half of a register, which in the memory case
3319 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3320 * where it may read the full 128 bits or only the upper 64 bits.
3321 *
3322 * Exceptions type 4.
3323 */
3324FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3325{
3326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3327 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3328 {
3329 /*
3330 * Register, register.
3331 */
3332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3333 IEM_MC_BEGIN(2, 0);
3334 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3335 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3336 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3337 IEM_MC_PREPARE_SSE_USAGE();
3338 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3339 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3340 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3341 IEM_MC_ADVANCE_RIP();
3342 IEM_MC_END();
3343 }
3344 else
3345 {
3346 /*
3347 * Register, memory.
3348 */
3349 IEM_MC_BEGIN(2, 2);
3350 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3351 IEM_MC_LOCAL(RTUINT128U, uSrc);
3352 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3354
3355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3357 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3358 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3359
3360 IEM_MC_PREPARE_SSE_USAGE();
3361 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3362 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3363
3364 IEM_MC_ADVANCE_RIP();
3365 IEM_MC_END();
3366 }
3367 return VINF_SUCCESS;
3368}
3369
3370
3371/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3372FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3373{
3374 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3375 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3376}
3377
3378/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3379FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3380{
3381 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3382 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3383}
3384/* Opcode 0xf3 0x0f 0x68 - invalid */
3385
3386
3387/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3388FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3389{
3390 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3391 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3392}
3393
3394/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3395FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3396{
3397 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3398 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3399
3400}
3401/* Opcode 0xf3 0x0f 0x69 - invalid */
3402
3403
3404/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3405FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3406{
3407 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3408 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3409}
3410
3411/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3412FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3413{
3414 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3415 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3416}
3417/* Opcode 0xf3 0x0f 0x6a - invalid */
3418
3419
3420/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3421FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3422/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3423FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3424/* Opcode 0xf3 0x0f 0x6b - invalid */
3425
3426
3427/* Opcode 0x0f 0x6c - invalid */
3428
3429/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3430FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3431{
3432 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3433 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3434}
3435
3436/* Opcode 0xf3 0x0f 0x6c - invalid */
3437/* Opcode 0xf2 0x0f 0x6c - invalid */
3438
3439
3440/* Opcode 0x0f 0x6d - invalid */
3441
3442/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3443FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3444{
3445 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3446 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3447}
3448
3449/* Opcode 0xf3 0x0f 0x6d - invalid */
3450
3451
3452FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3453{
3454 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3455 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3456 {
3457 /**
3458 * @opcode 0x6e
3459 * @opcodesub rex.w=1
3460 * @oppfx none
3461 * @opcpuid mmx
3462 * @opgroup og_mmx_datamove
3463 * @opxcpttype 5
3464 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3465 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3466 */
3467 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3468 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3469 {
3470 /* MMX, greg64 */
3471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3472 IEM_MC_BEGIN(0, 1);
3473 IEM_MC_LOCAL(uint64_t, u64Tmp);
3474
3475 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3476 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3477
3478 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3479 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3480 IEM_MC_FPU_TO_MMX_MODE();
3481
3482 IEM_MC_ADVANCE_RIP();
3483 IEM_MC_END();
3484 }
3485 else
3486 {
3487 /* MMX, [mem64] */
3488 IEM_MC_BEGIN(0, 2);
3489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3490 IEM_MC_LOCAL(uint64_t, u64Tmp);
3491
3492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3494 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3495 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3496
3497 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3498 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3499 IEM_MC_FPU_TO_MMX_MODE();
3500
3501 IEM_MC_ADVANCE_RIP();
3502 IEM_MC_END();
3503 }
3504 }
3505 else
3506 {
3507 /**
3508 * @opdone
3509 * @opcode 0x6e
3510 * @opcodesub rex.w=0
3511 * @oppfx none
3512 * @opcpuid mmx
3513 * @opgroup og_mmx_datamove
3514 * @opxcpttype 5
3515 * @opfunction iemOp_movd_q_Pd_Ey
3516 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3517 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3518 */
3519 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3521 {
3522 /* MMX, greg */
3523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3524 IEM_MC_BEGIN(0, 1);
3525 IEM_MC_LOCAL(uint64_t, u64Tmp);
3526
3527 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3528 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3529
3530 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3531 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3532 IEM_MC_FPU_TO_MMX_MODE();
3533
3534 IEM_MC_ADVANCE_RIP();
3535 IEM_MC_END();
3536 }
3537 else
3538 {
3539 /* MMX, [mem] */
3540 IEM_MC_BEGIN(0, 2);
3541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3542 IEM_MC_LOCAL(uint32_t, u32Tmp);
3543
3544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3546 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3547 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3548
3549 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3550 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3551 IEM_MC_FPU_TO_MMX_MODE();
3552
3553 IEM_MC_ADVANCE_RIP();
3554 IEM_MC_END();
3555 }
3556 }
3557 return VINF_SUCCESS;
3558}
3559
3560/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
3561FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3562{
3563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3564 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3565 {
3566 /**
3567 * @opcode 0x6e
3568 * @opcodesub rex.w=1
3569 * @oppfx 0x66
3570 * @opcpuid sse2
3571 * @opgroup og_sse2_simdint_datamove
3572 * @opxcpttype 5
3573 * @optest 64-bit / op1=1 op2=2 -> op1=2
3574 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3575 */
3576 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3577 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3578 {
3579 /* XMM, greg64 */
3580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3581 IEM_MC_BEGIN(0, 1);
3582 IEM_MC_LOCAL(uint64_t, u64Tmp);
3583
3584 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3585 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3586
3587 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3588 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3589
3590 IEM_MC_ADVANCE_RIP();
3591 IEM_MC_END();
3592 }
3593 else
3594 {
3595 /* XMM, [mem64] */
3596 IEM_MC_BEGIN(0, 2);
3597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3598 IEM_MC_LOCAL(uint64_t, u64Tmp);
3599
3600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3602 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3603 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3604
3605 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3606 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3607
3608 IEM_MC_ADVANCE_RIP();
3609 IEM_MC_END();
3610 }
3611 }
3612 else
3613 {
3614 /**
3615 * @opdone
3616 * @opcode 0x6e
3617 * @opcodesub rex.w=0
3618 * @oppfx 0x66
3619 * @opcpuid sse2
3620 * @opgroup og_sse2_simdint_datamove
3621 * @opxcpttype 5
3622 * @opfunction iemOp_movd_q_Vy_Ey
3623 * @optest op1=1 op2=2 -> op1=2
3624 * @optest op1=0 op2=-42 -> op1=-42
3625 */
3626 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3627 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3628 {
3629 /* XMM, greg32 */
3630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3631 IEM_MC_BEGIN(0, 1);
3632 IEM_MC_LOCAL(uint32_t, u32Tmp);
3633
3634 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3635 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3636
3637 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3638 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3639
3640 IEM_MC_ADVANCE_RIP();
3641 IEM_MC_END();
3642 }
3643 else
3644 {
3645 /* XMM, [mem32] */
3646 IEM_MC_BEGIN(0, 2);
3647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3648 IEM_MC_LOCAL(uint32_t, u32Tmp);
3649
3650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3652 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3653 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3654
3655 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3656 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3657
3658 IEM_MC_ADVANCE_RIP();
3659 IEM_MC_END();
3660 }
3661 }
3662 return VINF_SUCCESS;
3663}
3664
3665/* Opcode 0xf3 0x0f 0x6e - invalid */
3666
3667
3668/**
3669 * @opcode 0x6f
3670 * @oppfx none
3671 * @opcpuid mmx
3672 * @opgroup og_mmx_datamove
3673 * @opxcpttype 5
3674 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3675 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3676 */
3677FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3678{
3679 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3681 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3682 {
3683 /*
3684 * Register, register.
3685 */
3686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3687 IEM_MC_BEGIN(0, 1);
3688 IEM_MC_LOCAL(uint64_t, u64Tmp);
3689
3690 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3691 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3692
3693 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3694 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3695 IEM_MC_FPU_TO_MMX_MODE();
3696
3697 IEM_MC_ADVANCE_RIP();
3698 IEM_MC_END();
3699 }
3700 else
3701 {
3702 /*
3703 * Register, memory.
3704 */
3705 IEM_MC_BEGIN(0, 2);
3706 IEM_MC_LOCAL(uint64_t, u64Tmp);
3707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3708
3709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3712 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3713
3714 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3715 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3716 IEM_MC_FPU_TO_MMX_MODE();
3717
3718 IEM_MC_ADVANCE_RIP();
3719 IEM_MC_END();
3720 }
3721 return VINF_SUCCESS;
3722}
3723
3724/**
3725 * @opcode 0x6f
3726 * @oppfx 0x66
3727 * @opcpuid sse2
3728 * @opgroup og_sse2_simdint_datamove
3729 * @opxcpttype 1
3730 * @optest op1=1 op2=2 -> op1=2
3731 * @optest op1=0 op2=-42 -> op1=-42
3732 */
3733FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
3734{
3735 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3737 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3738 {
3739 /*
3740 * Register, register.
3741 */
3742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3743 IEM_MC_BEGIN(0, 0);
3744
3745 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3746 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3747
3748 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3749 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3750 IEM_MC_ADVANCE_RIP();
3751 IEM_MC_END();
3752 }
3753 else
3754 {
3755 /*
3756 * Register, memory.
3757 */
3758 IEM_MC_BEGIN(0, 2);
3759 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3761
3762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3764 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3765 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3766
3767 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3768 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3769
3770 IEM_MC_ADVANCE_RIP();
3771 IEM_MC_END();
3772 }
3773 return VINF_SUCCESS;
3774}
3775
3776/**
3777 * @opcode 0x6f
3778 * @oppfx 0xf3
3779 * @opcpuid sse2
3780 * @opgroup og_sse2_simdint_datamove
3781 * @opxcpttype 4UA
3782 * @optest op1=1 op2=2 -> op1=2
3783 * @optest op1=0 op2=-42 -> op1=-42
3784 */
3785FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
3786{
3787 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3789 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3790 {
3791 /*
3792 * Register, register.
3793 */
3794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3795 IEM_MC_BEGIN(0, 0);
3796 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3797 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3798 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3799 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3800 IEM_MC_ADVANCE_RIP();
3801 IEM_MC_END();
3802 }
3803 else
3804 {
3805 /*
3806 * Register, memory.
3807 */
3808 IEM_MC_BEGIN(0, 2);
3809 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3811
3812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3814 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3815 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3816 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3817 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3818
3819 IEM_MC_ADVANCE_RIP();
3820 IEM_MC_END();
3821 }
3822 return VINF_SUCCESS;
3823}
3824
3825
3826/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3827FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3828{
3829 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3830 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3831 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3832 {
3833 /*
3834 * Register, register.
3835 */
3836 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3838
3839 IEM_MC_BEGIN(3, 0);
3840 IEM_MC_ARG(uint64_t *, pDst, 0);
3841 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3842 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3843 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3844 IEM_MC_PREPARE_FPU_USAGE();
3845 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3846 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3847 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3848 IEM_MC_ADVANCE_RIP();
3849 IEM_MC_END();
3850 }
3851 else
3852 {
3853 /*
3854 * Register, memory.
3855 */
3856 IEM_MC_BEGIN(3, 2);
3857 IEM_MC_ARG(uint64_t *, pDst, 0);
3858 IEM_MC_LOCAL(uint64_t, uSrc);
3859 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3861
3862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3863 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3864 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3866 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3867
3868 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3869 IEM_MC_PREPARE_FPU_USAGE();
3870 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3871 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3872
3873 IEM_MC_ADVANCE_RIP();
3874 IEM_MC_END();
3875 }
3876 return VINF_SUCCESS;
3877}
3878
3879/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3880FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3881{
3882 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3884 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3885 {
3886 /*
3887 * Register, register.
3888 */
3889 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3891
3892 IEM_MC_BEGIN(3, 0);
3893 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3894 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3895 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3896 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3897 IEM_MC_PREPARE_SSE_USAGE();
3898 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3899 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3900 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3901 IEM_MC_ADVANCE_RIP();
3902 IEM_MC_END();
3903 }
3904 else
3905 {
3906 /*
3907 * Register, memory.
3908 */
3909 IEM_MC_BEGIN(3, 2);
3910 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3911 IEM_MC_LOCAL(RTUINT128U, uSrc);
3912 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3914
3915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3916 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3917 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3919 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3920
3921 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3922 IEM_MC_PREPARE_SSE_USAGE();
3923 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3924 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3925
3926 IEM_MC_ADVANCE_RIP();
3927 IEM_MC_END();
3928 }
3929 return VINF_SUCCESS;
3930}
3931
3932/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3933FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3934{
3935 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3938 {
3939 /*
3940 * Register, register.
3941 */
3942 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3944
3945 IEM_MC_BEGIN(3, 0);
3946 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3947 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3948 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3949 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3950 IEM_MC_PREPARE_SSE_USAGE();
3951 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3952 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3953 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3954 IEM_MC_ADVANCE_RIP();
3955 IEM_MC_END();
3956 }
3957 else
3958 {
3959 /*
3960 * Register, memory.
3961 */
3962 IEM_MC_BEGIN(3, 2);
3963 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3964 IEM_MC_LOCAL(RTUINT128U, uSrc);
3965 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3967
3968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3969 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3970 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3972 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3973
3974 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3975 IEM_MC_PREPARE_SSE_USAGE();
3976 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3977 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3978
3979 IEM_MC_ADVANCE_RIP();
3980 IEM_MC_END();
3981 }
3982 return VINF_SUCCESS;
3983}
3984
3985/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3986FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3987{
3988 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3989 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3990 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3991 {
3992 /*
3993 * Register, register.
3994 */
3995 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3997
3998 IEM_MC_BEGIN(3, 0);
3999 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4000 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4001 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4002 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4003 IEM_MC_PREPARE_SSE_USAGE();
4004 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4005 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4006 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
4007 IEM_MC_ADVANCE_RIP();
4008 IEM_MC_END();
4009 }
4010 else
4011 {
4012 /*
4013 * Register, memory.
4014 */
4015 IEM_MC_BEGIN(3, 2);
4016 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4017 IEM_MC_LOCAL(RTUINT128U, uSrc);
4018 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4020
4021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4022 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4023 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4025 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4026
4027 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4028 IEM_MC_PREPARE_SSE_USAGE();
4029 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4030 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
4031
4032 IEM_MC_ADVANCE_RIP();
4033 IEM_MC_END();
4034 }
4035 return VINF_SUCCESS;
4036}
4037
4038
4039/** Opcode 0x0f 0x71 11/2. */
4040FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
4041
4042/** Opcode 0x66 0x0f 0x71 11/2. */
4043FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
4044
4045/** Opcode 0x0f 0x71 11/4. */
4046FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
4047
4048/** Opcode 0x66 0x0f 0x71 11/4. */
4049FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
4050
4051/** Opcode 0x0f 0x71 11/6. */
4052FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
4053
4054/** Opcode 0x66 0x0f 0x71 11/6. */
4055FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
4056
4057
4058/**
4059 * Group 12 jump table for register variant.
4060 */
4061IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
4062{
4063 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4064 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4065 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4066 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4067 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4068 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4069 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4070 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4071};
4072AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
4073
4074
4075/** Opcode 0x0f 0x71. */
4076FNIEMOP_DEF(iemOp_Grp12)
4077{
4078 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4079 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4080 /* register, register */
4081 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4082 + pVCpu->iem.s.idxPrefix], bRm);
4083 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4084}
4085
4086
4087/** Opcode 0x0f 0x72 11/2. */
4088FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
4089
4090/** Opcode 0x66 0x0f 0x72 11/2. */
4091FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
4092
4093/** Opcode 0x0f 0x72 11/4. */
4094FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
4095
4096/** Opcode 0x66 0x0f 0x72 11/4. */
4097FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4098
4099/** Opcode 0x0f 0x72 11/6. */
4100FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4101
4102/** Opcode 0x66 0x0f 0x72 11/6. */
4103FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4104
4105
4106/**
4107 * Group 13 jump table for register variant.
4108 */
4109IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4110{
4111 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4112 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4113 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4114 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4115 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4116 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4117 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4118 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4119};
4120AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4121
4122/** Opcode 0x0f 0x72. */
4123FNIEMOP_DEF(iemOp_Grp13)
4124{
4125 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4126 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4127 /* register, register */
4128 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4129 + pVCpu->iem.s.idxPrefix], bRm);
4130 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4131}
4132
4133
4134/** Opcode 0x0f 0x73 11/2. */
4135FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4136
4137/** Opcode 0x66 0x0f 0x73 11/2. */
4138FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4139
4140/** Opcode 0x66 0x0f 0x73 11/3. */
4141FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4142
4143/** Opcode 0x0f 0x73 11/6. */
4144FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4145
4146/** Opcode 0x66 0x0f 0x73 11/6. */
4147FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4148
4149/** Opcode 0x66 0x0f 0x73 11/7. */
4150FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4151
4152/**
4153 * Group 14 jump table for register variant.
4154 */
4155IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4156{
4157 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4158 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4159 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4160 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4161 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4162 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4163 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4164 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4165};
4166AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4167
4168
4169/** Opcode 0x0f 0x73. */
4170FNIEMOP_DEF(iemOp_Grp14)
4171{
4172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4173 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4174 /* register, register */
4175 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4176 + pVCpu->iem.s.idxPrefix], bRm);
4177 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4178}
4179
4180
4181/**
4182 * Common worker for MMX instructions on the form:
4183 * pxxx mm1, mm2/mem64
4184 */
4185FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4186{
4187 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4188 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4189 {
4190 /*
4191 * Register, register.
4192 */
4193 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4194 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4196 IEM_MC_BEGIN(2, 0);
4197 IEM_MC_ARG(uint64_t *, pDst, 0);
4198 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4199 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4200 IEM_MC_PREPARE_FPU_USAGE();
4201 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4202 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4203 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4204 IEM_MC_ADVANCE_RIP();
4205 IEM_MC_END();
4206 }
4207 else
4208 {
4209 /*
4210 * Register, memory.
4211 */
4212 IEM_MC_BEGIN(2, 2);
4213 IEM_MC_ARG(uint64_t *, pDst, 0);
4214 IEM_MC_LOCAL(uint64_t, uSrc);
4215 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4217
4218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4220 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4221 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4222
4223 IEM_MC_PREPARE_FPU_USAGE();
4224 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4225 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4226
4227 IEM_MC_ADVANCE_RIP();
4228 IEM_MC_END();
4229 }
4230 return VINF_SUCCESS;
4231}
4232
4233
4234/**
4235 * Common worker for SSE2 instructions on the forms:
4236 * pxxx xmm1, xmm2/mem128
4237 *
4238 * Proper alignment of the 128-bit operand is enforced.
4239 * Exceptions type 4. SSE2 cpuid checks.
4240 */
4241FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4242{
4243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4244 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4245 {
4246 /*
4247 * Register, register.
4248 */
4249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4250 IEM_MC_BEGIN(2, 0);
4251 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4252 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4253 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4254 IEM_MC_PREPARE_SSE_USAGE();
4255 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4256 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4257 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4258 IEM_MC_ADVANCE_RIP();
4259 IEM_MC_END();
4260 }
4261 else
4262 {
4263 /*
4264 * Register, memory.
4265 */
4266 IEM_MC_BEGIN(2, 2);
4267 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4268 IEM_MC_LOCAL(RTUINT128U, uSrc);
4269 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4271
4272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4274 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4275 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4276
4277 IEM_MC_PREPARE_SSE_USAGE();
4278 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4279 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4280
4281 IEM_MC_ADVANCE_RIP();
4282 IEM_MC_END();
4283 }
4284 return VINF_SUCCESS;
4285}
4286
4287
4288/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4289FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4290{
4291 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4292 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4293}
4294
4295/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4296FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4297{
4298 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4299 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4300}
4301
4302/* Opcode 0xf3 0x0f 0x74 - invalid */
4303/* Opcode 0xf2 0x0f 0x74 - invalid */
4304
4305
4306/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4307FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4308{
4309 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4310 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4311}
4312
4313/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4314FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4315{
4316 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4317 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4318}
4319
4320/* Opcode 0xf3 0x0f 0x75 - invalid */
4321/* Opcode 0xf2 0x0f 0x75 - invalid */
4322
4323
4324/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4325FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4326{
4327 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4328 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4329}
4330
4331/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4332FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4333{
4334 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4335 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4336}
4337
4338/* Opcode 0xf3 0x0f 0x76 - invalid */
4339/* Opcode 0xf2 0x0f 0x76 - invalid */
4340
4341
4342/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4343FNIEMOP_STUB(iemOp_emms);
4344/* Opcode 0x66 0x0f 0x77 - invalid */
4345/* Opcode 0xf3 0x0f 0x77 - invalid */
4346/* Opcode 0xf2 0x0f 0x77 - invalid */
4347
4348/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4349FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4350/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4351FNIEMOP_STUB(iemOp_AmdGrp17);
4352/* Opcode 0xf3 0x0f 0x78 - invalid */
4353/* Opcode 0xf2 0x0f 0x78 - invalid */
4354
4355/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4356FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4357/* Opcode 0x66 0x0f 0x79 - invalid */
4358/* Opcode 0xf3 0x0f 0x79 - invalid */
4359/* Opcode 0xf2 0x0f 0x79 - invalid */
4360
4361/* Opcode 0x0f 0x7a - invalid */
4362/* Opcode 0x66 0x0f 0x7a - invalid */
4363/* Opcode 0xf3 0x0f 0x7a - invalid */
4364/* Opcode 0xf2 0x0f 0x7a - invalid */
4365
4366/* Opcode 0x0f 0x7b - invalid */
4367/* Opcode 0x66 0x0f 0x7b - invalid */
4368/* Opcode 0xf3 0x0f 0x7b - invalid */
4369/* Opcode 0xf2 0x0f 0x7b - invalid */
4370
4371/* Opcode 0x0f 0x7c - invalid */
4372/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4373FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4374/* Opcode 0xf3 0x0f 0x7c - invalid */
4375/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4376FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4377
4378/* Opcode 0x0f 0x7d - invalid */
4379/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4380FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4381/* Opcode 0xf3 0x0f 0x7d - invalid */
4382/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4383FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4384
4385
4386/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4387FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4388{
4389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4390 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4391 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
4392 else
4393 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
4394 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4395 {
4396 /* greg, MMX */
4397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4398 IEM_MC_BEGIN(0, 1);
4399 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4400 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4401 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4402 {
4403 IEM_MC_LOCAL(uint64_t, u64Tmp);
4404 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4405 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4406 }
4407 else
4408 {
4409 IEM_MC_LOCAL(uint32_t, u32Tmp);
4410 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4411 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4412 }
4413 IEM_MC_ADVANCE_RIP();
4414 IEM_MC_END();
4415 }
4416 else
4417 {
4418 /* [mem], MMX */
4419 IEM_MC_BEGIN(0, 2);
4420 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4421 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4424 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4425 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4426 {
4427 IEM_MC_LOCAL(uint64_t, u64Tmp);
4428 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4429 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4430 }
4431 else
4432 {
4433 IEM_MC_LOCAL(uint32_t, u32Tmp);
4434 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4435 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4436 }
4437 IEM_MC_ADVANCE_RIP();
4438 IEM_MC_END();
4439 }
4440 return VINF_SUCCESS;
4441}
4442
4443/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
4444FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4445{
4446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4447 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4448 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
4449 else
4450 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
4451 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4452 {
4453 /* greg, XMM */
4454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4455 IEM_MC_BEGIN(0, 1);
4456 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4457 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4458 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4459 {
4460 IEM_MC_LOCAL(uint64_t, u64Tmp);
4461 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4462 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4463 }
4464 else
4465 {
4466 IEM_MC_LOCAL(uint32_t, u32Tmp);
4467 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4468 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4469 }
4470 IEM_MC_ADVANCE_RIP();
4471 IEM_MC_END();
4472 }
4473 else
4474 {
4475 /* [mem], XMM */
4476 IEM_MC_BEGIN(0, 2);
4477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4478 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4481 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4482 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4483 {
4484 IEM_MC_LOCAL(uint64_t, u64Tmp);
4485 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4486 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4487 }
4488 else
4489 {
4490 IEM_MC_LOCAL(uint32_t, u32Tmp);
4491 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4492 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4493 }
4494 IEM_MC_ADVANCE_RIP();
4495 IEM_MC_END();
4496 }
4497 return VINF_SUCCESS;
4498}
4499
4500
4501/**
4502 * @opcode 0x7e
4503 * @opcodesub !11 mr/reg
4504 * @oppfx 0xf3
4505 * @opcpuid sse2
4506 * @opgroup og_sse2_pcksclr_datamove
4507 * @opxcpttype 5
4508 * @optest op1=1 op2=2 -> op1=2
4509 * @optest op1=0 op2=-42 -> op1=-42
4510 */
4511FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4512{
4513 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4515 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4516 {
4517 /*
4518 * Register, register.
4519 */
4520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4521 IEM_MC_BEGIN(0, 2);
4522 IEM_MC_LOCAL(uint64_t, uSrc);
4523
4524 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4525 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4526
4527 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4528 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4529
4530 IEM_MC_ADVANCE_RIP();
4531 IEM_MC_END();
4532 }
4533 else
4534 {
4535 /*
4536 * Memory, register.
4537 */
4538 IEM_MC_BEGIN(0, 2);
4539 IEM_MC_LOCAL(uint64_t, uSrc);
4540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4541
4542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4544 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4545 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4546
4547 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4548 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4549
4550 IEM_MC_ADVANCE_RIP();
4551 IEM_MC_END();
4552 }
4553 return VINF_SUCCESS;
4554}
4555
4556/* Opcode 0xf2 0x0f 0x7e - invalid */
4557
4558
4559/** Opcode 0x0f 0x7f - movq Qq, Pq */
4560FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4561{
4562 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4564 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4565 {
4566 /*
4567 * Register, register.
4568 */
4569 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4570 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4572 IEM_MC_BEGIN(0, 1);
4573 IEM_MC_LOCAL(uint64_t, u64Tmp);
4574 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4575 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4576 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4577 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4578 IEM_MC_ADVANCE_RIP();
4579 IEM_MC_END();
4580 }
4581 else
4582 {
4583 /*
4584 * Register, memory.
4585 */
4586 IEM_MC_BEGIN(0, 2);
4587 IEM_MC_LOCAL(uint64_t, u64Tmp);
4588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4589
4590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4592 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4593 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4594
4595 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4596 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4597
4598 IEM_MC_ADVANCE_RIP();
4599 IEM_MC_END();
4600 }
4601 return VINF_SUCCESS;
4602}
4603
4604/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4605FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4606{
4607 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4608 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4609 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4610 {
4611 /*
4612 * Register, register.
4613 */
4614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4615 IEM_MC_BEGIN(0, 0);
4616 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4617 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4618 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4619 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4620 IEM_MC_ADVANCE_RIP();
4621 IEM_MC_END();
4622 }
4623 else
4624 {
4625 /*
4626 * Register, memory.
4627 */
4628 IEM_MC_BEGIN(0, 2);
4629 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4631
4632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4634 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4635 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4636
4637 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4638 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4639
4640 IEM_MC_ADVANCE_RIP();
4641 IEM_MC_END();
4642 }
4643 return VINF_SUCCESS;
4644}
4645
4646/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4647FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4648{
4649 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4650 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4651 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4652 {
4653 /*
4654 * Register, register.
4655 */
4656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4657 IEM_MC_BEGIN(0, 0);
4658 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4659 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4660 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4661 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4662 IEM_MC_ADVANCE_RIP();
4663 IEM_MC_END();
4664 }
4665 else
4666 {
4667 /*
4668 * Register, memory.
4669 */
4670 IEM_MC_BEGIN(0, 2);
4671 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4673
4674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4676 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4677 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4678
4679 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4680 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4681
4682 IEM_MC_ADVANCE_RIP();
4683 IEM_MC_END();
4684 }
4685 return VINF_SUCCESS;
4686}
4687
4688/* Opcode 0xf2 0x0f 0x7f - invalid */
4689
4690
4691
4692/** Opcode 0x0f 0x80. */
4693FNIEMOP_DEF(iemOp_jo_Jv)
4694{
4695 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4696 IEMOP_HLP_MIN_386();
4697 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4698 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4699 {
4700 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4702
4703 IEM_MC_BEGIN(0, 0);
4704 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4705 IEM_MC_REL_JMP_S16(i16Imm);
4706 } IEM_MC_ELSE() {
4707 IEM_MC_ADVANCE_RIP();
4708 } IEM_MC_ENDIF();
4709 IEM_MC_END();
4710 }
4711 else
4712 {
4713 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4715
4716 IEM_MC_BEGIN(0, 0);
4717 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4718 IEM_MC_REL_JMP_S32(i32Imm);
4719 } IEM_MC_ELSE() {
4720 IEM_MC_ADVANCE_RIP();
4721 } IEM_MC_ENDIF();
4722 IEM_MC_END();
4723 }
4724 return VINF_SUCCESS;
4725}
4726
4727
4728/** Opcode 0x0f 0x81. */
4729FNIEMOP_DEF(iemOp_jno_Jv)
4730{
4731 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4732 IEMOP_HLP_MIN_386();
4733 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4734 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4735 {
4736 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4738
4739 IEM_MC_BEGIN(0, 0);
4740 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4741 IEM_MC_ADVANCE_RIP();
4742 } IEM_MC_ELSE() {
4743 IEM_MC_REL_JMP_S16(i16Imm);
4744 } IEM_MC_ENDIF();
4745 IEM_MC_END();
4746 }
4747 else
4748 {
4749 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4751
4752 IEM_MC_BEGIN(0, 0);
4753 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4754 IEM_MC_ADVANCE_RIP();
4755 } IEM_MC_ELSE() {
4756 IEM_MC_REL_JMP_S32(i32Imm);
4757 } IEM_MC_ENDIF();
4758 IEM_MC_END();
4759 }
4760 return VINF_SUCCESS;
4761}
4762
4763
4764/** Opcode 0x0f 0x82. */
4765FNIEMOP_DEF(iemOp_jc_Jv)
4766{
4767 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4768 IEMOP_HLP_MIN_386();
4769 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4770 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4771 {
4772 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4774
4775 IEM_MC_BEGIN(0, 0);
4776 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4777 IEM_MC_REL_JMP_S16(i16Imm);
4778 } IEM_MC_ELSE() {
4779 IEM_MC_ADVANCE_RIP();
4780 } IEM_MC_ENDIF();
4781 IEM_MC_END();
4782 }
4783 else
4784 {
4785 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4787
4788 IEM_MC_BEGIN(0, 0);
4789 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4790 IEM_MC_REL_JMP_S32(i32Imm);
4791 } IEM_MC_ELSE() {
4792 IEM_MC_ADVANCE_RIP();
4793 } IEM_MC_ENDIF();
4794 IEM_MC_END();
4795 }
4796 return VINF_SUCCESS;
4797}
4798
4799
4800/** Opcode 0x0f 0x83. */
4801FNIEMOP_DEF(iemOp_jnc_Jv)
4802{
4803 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4804 IEMOP_HLP_MIN_386();
4805 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4806 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4807 {
4808 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4810
4811 IEM_MC_BEGIN(0, 0);
4812 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4813 IEM_MC_ADVANCE_RIP();
4814 } IEM_MC_ELSE() {
4815 IEM_MC_REL_JMP_S16(i16Imm);
4816 } IEM_MC_ENDIF();
4817 IEM_MC_END();
4818 }
4819 else
4820 {
4821 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4823
4824 IEM_MC_BEGIN(0, 0);
4825 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4826 IEM_MC_ADVANCE_RIP();
4827 } IEM_MC_ELSE() {
4828 IEM_MC_REL_JMP_S32(i32Imm);
4829 } IEM_MC_ENDIF();
4830 IEM_MC_END();
4831 }
4832 return VINF_SUCCESS;
4833}
4834
4835
4836/** Opcode 0x0f 0x84. */
4837FNIEMOP_DEF(iemOp_je_Jv)
4838{
4839 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4840 IEMOP_HLP_MIN_386();
4841 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4842 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4843 {
4844 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4846
4847 IEM_MC_BEGIN(0, 0);
4848 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4849 IEM_MC_REL_JMP_S16(i16Imm);
4850 } IEM_MC_ELSE() {
4851 IEM_MC_ADVANCE_RIP();
4852 } IEM_MC_ENDIF();
4853 IEM_MC_END();
4854 }
4855 else
4856 {
4857 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4859
4860 IEM_MC_BEGIN(0, 0);
4861 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4862 IEM_MC_REL_JMP_S32(i32Imm);
4863 } IEM_MC_ELSE() {
4864 IEM_MC_ADVANCE_RIP();
4865 } IEM_MC_ENDIF();
4866 IEM_MC_END();
4867 }
4868 return VINF_SUCCESS;
4869}
4870
4871
4872/** Opcode 0x0f 0x85. */
4873FNIEMOP_DEF(iemOp_jne_Jv)
4874{
4875 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4876 IEMOP_HLP_MIN_386();
4877 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4878 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4879 {
4880 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4882
4883 IEM_MC_BEGIN(0, 0);
4884 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4885 IEM_MC_ADVANCE_RIP();
4886 } IEM_MC_ELSE() {
4887 IEM_MC_REL_JMP_S16(i16Imm);
4888 } IEM_MC_ENDIF();
4889 IEM_MC_END();
4890 }
4891 else
4892 {
4893 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4895
4896 IEM_MC_BEGIN(0, 0);
4897 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4898 IEM_MC_ADVANCE_RIP();
4899 } IEM_MC_ELSE() {
4900 IEM_MC_REL_JMP_S32(i32Imm);
4901 } IEM_MC_ENDIF();
4902 IEM_MC_END();
4903 }
4904 return VINF_SUCCESS;
4905}
4906
4907
4908/** Opcode 0x0f 0x86. */
4909FNIEMOP_DEF(iemOp_jbe_Jv)
4910{
4911 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4912 IEMOP_HLP_MIN_386();
4913 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4914 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4915 {
4916 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4918
4919 IEM_MC_BEGIN(0, 0);
4920 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4921 IEM_MC_REL_JMP_S16(i16Imm);
4922 } IEM_MC_ELSE() {
4923 IEM_MC_ADVANCE_RIP();
4924 } IEM_MC_ENDIF();
4925 IEM_MC_END();
4926 }
4927 else
4928 {
4929 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4931
4932 IEM_MC_BEGIN(0, 0);
4933 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4934 IEM_MC_REL_JMP_S32(i32Imm);
4935 } IEM_MC_ELSE() {
4936 IEM_MC_ADVANCE_RIP();
4937 } IEM_MC_ENDIF();
4938 IEM_MC_END();
4939 }
4940 return VINF_SUCCESS;
4941}
4942
4943
4944/** Opcode 0x0f 0x87. */
4945FNIEMOP_DEF(iemOp_jnbe_Jv)
4946{
4947 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4948 IEMOP_HLP_MIN_386();
4949 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4950 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4951 {
4952 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4954
4955 IEM_MC_BEGIN(0, 0);
4956 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4957 IEM_MC_ADVANCE_RIP();
4958 } IEM_MC_ELSE() {
4959 IEM_MC_REL_JMP_S16(i16Imm);
4960 } IEM_MC_ENDIF();
4961 IEM_MC_END();
4962 }
4963 else
4964 {
4965 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4967
4968 IEM_MC_BEGIN(0, 0);
4969 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4970 IEM_MC_ADVANCE_RIP();
4971 } IEM_MC_ELSE() {
4972 IEM_MC_REL_JMP_S32(i32Imm);
4973 } IEM_MC_ENDIF();
4974 IEM_MC_END();
4975 }
4976 return VINF_SUCCESS;
4977}
4978
4979
4980/** Opcode 0x0f 0x88. */
4981FNIEMOP_DEF(iemOp_js_Jv)
4982{
4983 IEMOP_MNEMONIC(js_Jv, "js Jv");
4984 IEMOP_HLP_MIN_386();
4985 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4986 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4987 {
4988 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4990
4991 IEM_MC_BEGIN(0, 0);
4992 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4993 IEM_MC_REL_JMP_S16(i16Imm);
4994 } IEM_MC_ELSE() {
4995 IEM_MC_ADVANCE_RIP();
4996 } IEM_MC_ENDIF();
4997 IEM_MC_END();
4998 }
4999 else
5000 {
5001 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5003
5004 IEM_MC_BEGIN(0, 0);
5005 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5006 IEM_MC_REL_JMP_S32(i32Imm);
5007 } IEM_MC_ELSE() {
5008 IEM_MC_ADVANCE_RIP();
5009 } IEM_MC_ENDIF();
5010 IEM_MC_END();
5011 }
5012 return VINF_SUCCESS;
5013}
5014
5015
5016/** Opcode 0x0f 0x89. */
5017FNIEMOP_DEF(iemOp_jns_Jv)
5018{
5019 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5020 IEMOP_HLP_MIN_386();
5021 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5022 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5023 {
5024 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5026
5027 IEM_MC_BEGIN(0, 0);
5028 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5029 IEM_MC_ADVANCE_RIP();
5030 } IEM_MC_ELSE() {
5031 IEM_MC_REL_JMP_S16(i16Imm);
5032 } IEM_MC_ENDIF();
5033 IEM_MC_END();
5034 }
5035 else
5036 {
5037 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5039
5040 IEM_MC_BEGIN(0, 0);
5041 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5042 IEM_MC_ADVANCE_RIP();
5043 } IEM_MC_ELSE() {
5044 IEM_MC_REL_JMP_S32(i32Imm);
5045 } IEM_MC_ENDIF();
5046 IEM_MC_END();
5047 }
5048 return VINF_SUCCESS;
5049}
5050
5051
5052/** Opcode 0x0f 0x8a. */
5053FNIEMOP_DEF(iemOp_jp_Jv)
5054{
5055 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5056 IEMOP_HLP_MIN_386();
5057 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5058 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5059 {
5060 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5062
5063 IEM_MC_BEGIN(0, 0);
5064 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5065 IEM_MC_REL_JMP_S16(i16Imm);
5066 } IEM_MC_ELSE() {
5067 IEM_MC_ADVANCE_RIP();
5068 } IEM_MC_ENDIF();
5069 IEM_MC_END();
5070 }
5071 else
5072 {
5073 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5075
5076 IEM_MC_BEGIN(0, 0);
5077 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5078 IEM_MC_REL_JMP_S32(i32Imm);
5079 } IEM_MC_ELSE() {
5080 IEM_MC_ADVANCE_RIP();
5081 } IEM_MC_ENDIF();
5082 IEM_MC_END();
5083 }
5084 return VINF_SUCCESS;
5085}
5086
5087
5088/** Opcode 0x0f 0x8b. */
5089FNIEMOP_DEF(iemOp_jnp_Jv)
5090{
5091 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5092 IEMOP_HLP_MIN_386();
5093 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5094 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5095 {
5096 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5098
5099 IEM_MC_BEGIN(0, 0);
5100 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5101 IEM_MC_ADVANCE_RIP();
5102 } IEM_MC_ELSE() {
5103 IEM_MC_REL_JMP_S16(i16Imm);
5104 } IEM_MC_ENDIF();
5105 IEM_MC_END();
5106 }
5107 else
5108 {
5109 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5111
5112 IEM_MC_BEGIN(0, 0);
5113 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5114 IEM_MC_ADVANCE_RIP();
5115 } IEM_MC_ELSE() {
5116 IEM_MC_REL_JMP_S32(i32Imm);
5117 } IEM_MC_ENDIF();
5118 IEM_MC_END();
5119 }
5120 return VINF_SUCCESS;
5121}
5122
5123
5124/** Opcode 0x0f 0x8c. */
5125FNIEMOP_DEF(iemOp_jl_Jv)
5126{
5127 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5128 IEMOP_HLP_MIN_386();
5129 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5130 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5131 {
5132 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5134
5135 IEM_MC_BEGIN(0, 0);
5136 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5137 IEM_MC_REL_JMP_S16(i16Imm);
5138 } IEM_MC_ELSE() {
5139 IEM_MC_ADVANCE_RIP();
5140 } IEM_MC_ENDIF();
5141 IEM_MC_END();
5142 }
5143 else
5144 {
5145 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5147
5148 IEM_MC_BEGIN(0, 0);
5149 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5150 IEM_MC_REL_JMP_S32(i32Imm);
5151 } IEM_MC_ELSE() {
5152 IEM_MC_ADVANCE_RIP();
5153 } IEM_MC_ENDIF();
5154 IEM_MC_END();
5155 }
5156 return VINF_SUCCESS;
5157}
5158
5159
5160/** Opcode 0x0f 0x8d. */
5161FNIEMOP_DEF(iemOp_jnl_Jv)
5162{
5163 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5164 IEMOP_HLP_MIN_386();
5165 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5166 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5167 {
5168 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5170
5171 IEM_MC_BEGIN(0, 0);
5172 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5173 IEM_MC_ADVANCE_RIP();
5174 } IEM_MC_ELSE() {
5175 IEM_MC_REL_JMP_S16(i16Imm);
5176 } IEM_MC_ENDIF();
5177 IEM_MC_END();
5178 }
5179 else
5180 {
5181 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5183
5184 IEM_MC_BEGIN(0, 0);
5185 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5186 IEM_MC_ADVANCE_RIP();
5187 } IEM_MC_ELSE() {
5188 IEM_MC_REL_JMP_S32(i32Imm);
5189 } IEM_MC_ENDIF();
5190 IEM_MC_END();
5191 }
5192 return VINF_SUCCESS;
5193}
5194
5195
5196/** Opcode 0x0f 0x8e. */
5197FNIEMOP_DEF(iemOp_jle_Jv)
5198{
5199 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5200 IEMOP_HLP_MIN_386();
5201 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5202 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5203 {
5204 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5206
5207 IEM_MC_BEGIN(0, 0);
5208 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5209 IEM_MC_REL_JMP_S16(i16Imm);
5210 } IEM_MC_ELSE() {
5211 IEM_MC_ADVANCE_RIP();
5212 } IEM_MC_ENDIF();
5213 IEM_MC_END();
5214 }
5215 else
5216 {
5217 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5219
5220 IEM_MC_BEGIN(0, 0);
5221 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5222 IEM_MC_REL_JMP_S32(i32Imm);
5223 } IEM_MC_ELSE() {
5224 IEM_MC_ADVANCE_RIP();
5225 } IEM_MC_ENDIF();
5226 IEM_MC_END();
5227 }
5228 return VINF_SUCCESS;
5229}
5230
5231
5232/** Opcode 0x0f 0x8f. */
5233FNIEMOP_DEF(iemOp_jnle_Jv)
5234{
5235 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5236 IEMOP_HLP_MIN_386();
5237 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5238 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5239 {
5240 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5242
5243 IEM_MC_BEGIN(0, 0);
5244 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5245 IEM_MC_ADVANCE_RIP();
5246 } IEM_MC_ELSE() {
5247 IEM_MC_REL_JMP_S16(i16Imm);
5248 } IEM_MC_ENDIF();
5249 IEM_MC_END();
5250 }
5251 else
5252 {
5253 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5255
5256 IEM_MC_BEGIN(0, 0);
5257 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5258 IEM_MC_ADVANCE_RIP();
5259 } IEM_MC_ELSE() {
5260 IEM_MC_REL_JMP_S32(i32Imm);
5261 } IEM_MC_ENDIF();
5262 IEM_MC_END();
5263 }
5264 return VINF_SUCCESS;
5265}
5266
5267
5268/** Opcode 0x0f 0x90. */
5269FNIEMOP_DEF(iemOp_seto_Eb)
5270{
5271 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5272 IEMOP_HLP_MIN_386();
5273 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5274
5275 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5276 * any way. AMD says it's "unused", whatever that means. We're
5277 * ignoring for now. */
5278 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5279 {
5280 /* register target */
5281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5282 IEM_MC_BEGIN(0, 0);
5283 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5284 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5285 } IEM_MC_ELSE() {
5286 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5287 } IEM_MC_ENDIF();
5288 IEM_MC_ADVANCE_RIP();
5289 IEM_MC_END();
5290 }
5291 else
5292 {
5293 /* memory target */
5294 IEM_MC_BEGIN(0, 1);
5295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5298 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5299 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5300 } IEM_MC_ELSE() {
5301 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5302 } IEM_MC_ENDIF();
5303 IEM_MC_ADVANCE_RIP();
5304 IEM_MC_END();
5305 }
5306 return VINF_SUCCESS;
5307}
5308
5309
5310/** Opcode 0x0f 0x91. */
5311FNIEMOP_DEF(iemOp_setno_Eb)
5312{
5313 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5314 IEMOP_HLP_MIN_386();
5315 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5316
5317 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5318 * any way. AMD says it's "unused", whatever that means. We're
5319 * ignoring for now. */
5320 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5321 {
5322 /* register target */
5323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5324 IEM_MC_BEGIN(0, 0);
5325 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5326 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5327 } IEM_MC_ELSE() {
5328 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5329 } IEM_MC_ENDIF();
5330 IEM_MC_ADVANCE_RIP();
5331 IEM_MC_END();
5332 }
5333 else
5334 {
5335 /* memory target */
5336 IEM_MC_BEGIN(0, 1);
5337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5340 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5341 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5342 } IEM_MC_ELSE() {
5343 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5344 } IEM_MC_ENDIF();
5345 IEM_MC_ADVANCE_RIP();
5346 IEM_MC_END();
5347 }
5348 return VINF_SUCCESS;
5349}
5350
5351
5352/** Opcode 0x0f 0x92. */
5353FNIEMOP_DEF(iemOp_setc_Eb)
5354{
5355 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5356 IEMOP_HLP_MIN_386();
5357 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5358
5359 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5360 * any way. AMD says it's "unused", whatever that means. We're
5361 * ignoring for now. */
5362 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5363 {
5364 /* register target */
5365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5366 IEM_MC_BEGIN(0, 0);
5367 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5368 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5369 } IEM_MC_ELSE() {
5370 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5371 } IEM_MC_ENDIF();
5372 IEM_MC_ADVANCE_RIP();
5373 IEM_MC_END();
5374 }
5375 else
5376 {
5377 /* memory target */
5378 IEM_MC_BEGIN(0, 1);
5379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5382 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5383 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5384 } IEM_MC_ELSE() {
5385 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5386 } IEM_MC_ENDIF();
5387 IEM_MC_ADVANCE_RIP();
5388 IEM_MC_END();
5389 }
5390 return VINF_SUCCESS;
5391}
5392
5393
5394/** Opcode 0x0f 0x93. */
5395FNIEMOP_DEF(iemOp_setnc_Eb)
5396{
5397 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5398 IEMOP_HLP_MIN_386();
5399 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5400
5401 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5402 * any way. AMD says it's "unused", whatever that means. We're
5403 * ignoring for now. */
5404 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5405 {
5406 /* register target */
5407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5408 IEM_MC_BEGIN(0, 0);
5409 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5410 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5411 } IEM_MC_ELSE() {
5412 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5413 } IEM_MC_ENDIF();
5414 IEM_MC_ADVANCE_RIP();
5415 IEM_MC_END();
5416 }
5417 else
5418 {
5419 /* memory target */
5420 IEM_MC_BEGIN(0, 1);
5421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5424 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5425 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5426 } IEM_MC_ELSE() {
5427 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5428 } IEM_MC_ENDIF();
5429 IEM_MC_ADVANCE_RIP();
5430 IEM_MC_END();
5431 }
5432 return VINF_SUCCESS;
5433}
5434
5435
5436/** Opcode 0x0f 0x94. */
5437FNIEMOP_DEF(iemOp_sete_Eb)
5438{
5439 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5440 IEMOP_HLP_MIN_386();
5441 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5442
5443 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5444 * any way. AMD says it's "unused", whatever that means. We're
5445 * ignoring for now. */
5446 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5447 {
5448 /* register target */
5449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5450 IEM_MC_BEGIN(0, 0);
5451 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5452 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5453 } IEM_MC_ELSE() {
5454 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5455 } IEM_MC_ENDIF();
5456 IEM_MC_ADVANCE_RIP();
5457 IEM_MC_END();
5458 }
5459 else
5460 {
5461 /* memory target */
5462 IEM_MC_BEGIN(0, 1);
5463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5466 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5467 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5468 } IEM_MC_ELSE() {
5469 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5470 } IEM_MC_ENDIF();
5471 IEM_MC_ADVANCE_RIP();
5472 IEM_MC_END();
5473 }
5474 return VINF_SUCCESS;
5475}
5476
5477
5478/** Opcode 0x0f 0x95. */
5479FNIEMOP_DEF(iemOp_setne_Eb)
5480{
5481 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5482 IEMOP_HLP_MIN_386();
5483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5484
5485 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5486 * any way. AMD says it's "unused", whatever that means. We're
5487 * ignoring for now. */
5488 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5489 {
5490 /* register target */
5491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5492 IEM_MC_BEGIN(0, 0);
5493 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5494 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5495 } IEM_MC_ELSE() {
5496 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5497 } IEM_MC_ENDIF();
5498 IEM_MC_ADVANCE_RIP();
5499 IEM_MC_END();
5500 }
5501 else
5502 {
5503 /* memory target */
5504 IEM_MC_BEGIN(0, 1);
5505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5508 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5509 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5510 } IEM_MC_ELSE() {
5511 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5512 } IEM_MC_ENDIF();
5513 IEM_MC_ADVANCE_RIP();
5514 IEM_MC_END();
5515 }
5516 return VINF_SUCCESS;
5517}
5518
5519
5520/** Opcode 0x0f 0x96. */
5521FNIEMOP_DEF(iemOp_setbe_Eb)
5522{
5523 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5524 IEMOP_HLP_MIN_386();
5525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5526
5527 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5528 * any way. AMD says it's "unused", whatever that means. We're
5529 * ignoring for now. */
5530 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5531 {
5532 /* register target */
5533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5534 IEM_MC_BEGIN(0, 0);
5535 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5536 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5537 } IEM_MC_ELSE() {
5538 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5539 } IEM_MC_ENDIF();
5540 IEM_MC_ADVANCE_RIP();
5541 IEM_MC_END();
5542 }
5543 else
5544 {
5545 /* memory target */
5546 IEM_MC_BEGIN(0, 1);
5547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5550 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5551 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5552 } IEM_MC_ELSE() {
5553 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5554 } IEM_MC_ENDIF();
5555 IEM_MC_ADVANCE_RIP();
5556 IEM_MC_END();
5557 }
5558 return VINF_SUCCESS;
5559}
5560
5561
5562/** Opcode 0x0f 0x97. */
5563FNIEMOP_DEF(iemOp_setnbe_Eb)
5564{
5565 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5566 IEMOP_HLP_MIN_386();
5567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5568
5569 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5570 * any way. AMD says it's "unused", whatever that means. We're
5571 * ignoring for now. */
5572 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5573 {
5574 /* register target */
5575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5576 IEM_MC_BEGIN(0, 0);
5577 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5578 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5579 } IEM_MC_ELSE() {
5580 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5581 } IEM_MC_ENDIF();
5582 IEM_MC_ADVANCE_RIP();
5583 IEM_MC_END();
5584 }
5585 else
5586 {
5587 /* memory target */
5588 IEM_MC_BEGIN(0, 1);
5589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5592 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5593 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5594 } IEM_MC_ELSE() {
5595 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5596 } IEM_MC_ENDIF();
5597 IEM_MC_ADVANCE_RIP();
5598 IEM_MC_END();
5599 }
5600 return VINF_SUCCESS;
5601}
5602
5603
5604/** Opcode 0x0f 0x98. */
5605FNIEMOP_DEF(iemOp_sets_Eb)
5606{
5607 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5608 IEMOP_HLP_MIN_386();
5609 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5610
5611 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5612 * any way. AMD says it's "unused", whatever that means. We're
5613 * ignoring for now. */
5614 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5615 {
5616 /* register target */
5617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5618 IEM_MC_BEGIN(0, 0);
5619 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5620 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5621 } IEM_MC_ELSE() {
5622 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5623 } IEM_MC_ENDIF();
5624 IEM_MC_ADVANCE_RIP();
5625 IEM_MC_END();
5626 }
5627 else
5628 {
5629 /* memory target */
5630 IEM_MC_BEGIN(0, 1);
5631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5634 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5635 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5636 } IEM_MC_ELSE() {
5637 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5638 } IEM_MC_ENDIF();
5639 IEM_MC_ADVANCE_RIP();
5640 IEM_MC_END();
5641 }
5642 return VINF_SUCCESS;
5643}
5644
5645
5646/** Opcode 0x0f 0x99. */
5647FNIEMOP_DEF(iemOp_setns_Eb)
5648{
5649 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5650 IEMOP_HLP_MIN_386();
5651 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5652
5653 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5654 * any way. AMD says it's "unused", whatever that means. We're
5655 * ignoring for now. */
5656 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5657 {
5658 /* register target */
5659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5660 IEM_MC_BEGIN(0, 0);
5661 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5662 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5663 } IEM_MC_ELSE() {
5664 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5665 } IEM_MC_ENDIF();
5666 IEM_MC_ADVANCE_RIP();
5667 IEM_MC_END();
5668 }
5669 else
5670 {
5671 /* memory target */
5672 IEM_MC_BEGIN(0, 1);
5673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5676 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5677 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5678 } IEM_MC_ELSE() {
5679 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5680 } IEM_MC_ENDIF();
5681 IEM_MC_ADVANCE_RIP();
5682 IEM_MC_END();
5683 }
5684 return VINF_SUCCESS;
5685}
5686
5687
5688/** Opcode 0x0f 0x9a. */
5689FNIEMOP_DEF(iemOp_setp_Eb)
5690{
5691 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5692 IEMOP_HLP_MIN_386();
5693 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5694
5695 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5696 * any way. AMD says it's "unused", whatever that means. We're
5697 * ignoring for now. */
5698 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5699 {
5700 /* register target */
5701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5702 IEM_MC_BEGIN(0, 0);
5703 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5704 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5705 } IEM_MC_ELSE() {
5706 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5707 } IEM_MC_ENDIF();
5708 IEM_MC_ADVANCE_RIP();
5709 IEM_MC_END();
5710 }
5711 else
5712 {
5713 /* memory target */
5714 IEM_MC_BEGIN(0, 1);
5715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5718 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5719 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5720 } IEM_MC_ELSE() {
5721 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5722 } IEM_MC_ENDIF();
5723 IEM_MC_ADVANCE_RIP();
5724 IEM_MC_END();
5725 }
5726 return VINF_SUCCESS;
5727}
5728
5729
5730/** Opcode 0x0f 0x9b. */
5731FNIEMOP_DEF(iemOp_setnp_Eb)
5732{
5733 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5734 IEMOP_HLP_MIN_386();
5735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5736
5737 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5738 * any way. AMD says it's "unused", whatever that means. We're
5739 * ignoring for now. */
5740 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5741 {
5742 /* register target */
5743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5744 IEM_MC_BEGIN(0, 0);
5745 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5746 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5747 } IEM_MC_ELSE() {
5748 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5749 } IEM_MC_ENDIF();
5750 IEM_MC_ADVANCE_RIP();
5751 IEM_MC_END();
5752 }
5753 else
5754 {
5755 /* memory target */
5756 IEM_MC_BEGIN(0, 1);
5757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5760 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5761 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5762 } IEM_MC_ELSE() {
5763 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5764 } IEM_MC_ENDIF();
5765 IEM_MC_ADVANCE_RIP();
5766 IEM_MC_END();
5767 }
5768 return VINF_SUCCESS;
5769}
5770
5771
5772/** Opcode 0x0f 0x9c. */
5773FNIEMOP_DEF(iemOp_setl_Eb)
5774{
5775 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5776 IEMOP_HLP_MIN_386();
5777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5778
5779 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5780 * any way. AMD says it's "unused", whatever that means. We're
5781 * ignoring for now. */
5782 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5783 {
5784 /* register target */
5785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5786 IEM_MC_BEGIN(0, 0);
5787 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5788 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5789 } IEM_MC_ELSE() {
5790 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5791 } IEM_MC_ENDIF();
5792 IEM_MC_ADVANCE_RIP();
5793 IEM_MC_END();
5794 }
5795 else
5796 {
5797 /* memory target */
5798 IEM_MC_BEGIN(0, 1);
5799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5802 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5803 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5804 } IEM_MC_ELSE() {
5805 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5806 } IEM_MC_ENDIF();
5807 IEM_MC_ADVANCE_RIP();
5808 IEM_MC_END();
5809 }
5810 return VINF_SUCCESS;
5811}
5812
5813
5814/** Opcode 0x0f 0x9d. */
5815FNIEMOP_DEF(iemOp_setnl_Eb)
5816{
5817 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5818 IEMOP_HLP_MIN_386();
5819 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5820
5821 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5822 * any way. AMD says it's "unused", whatever that means. We're
5823 * ignoring for now. */
5824 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5825 {
5826 /* register target */
5827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5828 IEM_MC_BEGIN(0, 0);
5829 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5830 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5831 } IEM_MC_ELSE() {
5832 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5833 } IEM_MC_ENDIF();
5834 IEM_MC_ADVANCE_RIP();
5835 IEM_MC_END();
5836 }
5837 else
5838 {
5839 /* memory target */
5840 IEM_MC_BEGIN(0, 1);
5841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5844 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5845 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5846 } IEM_MC_ELSE() {
5847 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5848 } IEM_MC_ENDIF();
5849 IEM_MC_ADVANCE_RIP();
5850 IEM_MC_END();
5851 }
5852 return VINF_SUCCESS;
5853}
5854
5855
5856/** Opcode 0x0f 0x9e. */
5857FNIEMOP_DEF(iemOp_setle_Eb)
5858{
5859 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5860 IEMOP_HLP_MIN_386();
5861 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5862
5863 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5864 * any way. AMD says it's "unused", whatever that means. We're
5865 * ignoring for now. */
5866 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5867 {
5868 /* register target */
5869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5870 IEM_MC_BEGIN(0, 0);
5871 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5872 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5873 } IEM_MC_ELSE() {
5874 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5875 } IEM_MC_ENDIF();
5876 IEM_MC_ADVANCE_RIP();
5877 IEM_MC_END();
5878 }
5879 else
5880 {
5881 /* memory target */
5882 IEM_MC_BEGIN(0, 1);
5883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5886 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5887 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5888 } IEM_MC_ELSE() {
5889 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5890 } IEM_MC_ENDIF();
5891 IEM_MC_ADVANCE_RIP();
5892 IEM_MC_END();
5893 }
5894 return VINF_SUCCESS;
5895}
5896
5897
5898/** Opcode 0x0f 0x9f. */
5899FNIEMOP_DEF(iemOp_setnle_Eb)
5900{
5901 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5902 IEMOP_HLP_MIN_386();
5903 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5904
5905 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5906 * any way. AMD says it's "unused", whatever that means. We're
5907 * ignoring for now. */
5908 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5909 {
5910 /* register target */
5911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5912 IEM_MC_BEGIN(0, 0);
5913 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5914 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5915 } IEM_MC_ELSE() {
5916 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5917 } IEM_MC_ENDIF();
5918 IEM_MC_ADVANCE_RIP();
5919 IEM_MC_END();
5920 }
5921 else
5922 {
5923 /* memory target */
5924 IEM_MC_BEGIN(0, 1);
5925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5928 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5929 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5930 } IEM_MC_ELSE() {
5931 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5932 } IEM_MC_ENDIF();
5933 IEM_MC_ADVANCE_RIP();
5934 IEM_MC_END();
5935 }
5936 return VINF_SUCCESS;
5937}
5938
5939
5940/**
5941 * Common 'push segment-register' helper.
5942 */
5943FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5944{
5945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5946 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5947 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5948
5949 switch (pVCpu->iem.s.enmEffOpSize)
5950 {
5951 case IEMMODE_16BIT:
5952 IEM_MC_BEGIN(0, 1);
5953 IEM_MC_LOCAL(uint16_t, u16Value);
5954 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5955 IEM_MC_PUSH_U16(u16Value);
5956 IEM_MC_ADVANCE_RIP();
5957 IEM_MC_END();
5958 break;
5959
5960 case IEMMODE_32BIT:
5961 IEM_MC_BEGIN(0, 1);
5962 IEM_MC_LOCAL(uint32_t, u32Value);
5963 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5964 IEM_MC_PUSH_U32_SREG(u32Value);
5965 IEM_MC_ADVANCE_RIP();
5966 IEM_MC_END();
5967 break;
5968
5969 case IEMMODE_64BIT:
5970 IEM_MC_BEGIN(0, 1);
5971 IEM_MC_LOCAL(uint64_t, u64Value);
5972 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5973 IEM_MC_PUSH_U64(u64Value);
5974 IEM_MC_ADVANCE_RIP();
5975 IEM_MC_END();
5976 break;
5977 }
5978
5979 return VINF_SUCCESS;
5980}
5981
5982
5983/** Opcode 0x0f 0xa0. */
5984FNIEMOP_DEF(iemOp_push_fs)
5985{
5986 IEMOP_MNEMONIC(push_fs, "push fs");
5987 IEMOP_HLP_MIN_386();
5988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5989 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5990}
5991
5992
5993/** Opcode 0x0f 0xa1. */
5994FNIEMOP_DEF(iemOp_pop_fs)
5995{
5996 IEMOP_MNEMONIC(pop_fs, "pop fs");
5997 IEMOP_HLP_MIN_386();
5998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5999 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
6000}
6001
6002
6003/** Opcode 0x0f 0xa2. */
6004FNIEMOP_DEF(iemOp_cpuid)
6005{
6006 IEMOP_MNEMONIC(cpuid, "cpuid");
6007 IEMOP_HLP_MIN_486(); /* not all 486es. */
6008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6009 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6010}
6011
6012
6013/**
6014 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6015 * iemOp_bts_Ev_Gv.
6016 */
6017FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6018{
6019 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6020 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6021
6022 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6023 {
6024 /* register destination. */
6025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6026 switch (pVCpu->iem.s.enmEffOpSize)
6027 {
6028 case IEMMODE_16BIT:
6029 IEM_MC_BEGIN(3, 0);
6030 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6031 IEM_MC_ARG(uint16_t, u16Src, 1);
6032 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6033
6034 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6035 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6036 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6037 IEM_MC_REF_EFLAGS(pEFlags);
6038 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6039
6040 IEM_MC_ADVANCE_RIP();
6041 IEM_MC_END();
6042 return VINF_SUCCESS;
6043
6044 case IEMMODE_32BIT:
6045 IEM_MC_BEGIN(3, 0);
6046 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6047 IEM_MC_ARG(uint32_t, u32Src, 1);
6048 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6049
6050 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6051 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6052 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6053 IEM_MC_REF_EFLAGS(pEFlags);
6054 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6055
6056 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6057 IEM_MC_ADVANCE_RIP();
6058 IEM_MC_END();
6059 return VINF_SUCCESS;
6060
6061 case IEMMODE_64BIT:
6062 IEM_MC_BEGIN(3, 0);
6063 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6064 IEM_MC_ARG(uint64_t, u64Src, 1);
6065 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6066
6067 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6068 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6069 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6070 IEM_MC_REF_EFLAGS(pEFlags);
6071 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6072
6073 IEM_MC_ADVANCE_RIP();
6074 IEM_MC_END();
6075 return VINF_SUCCESS;
6076
6077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6078 }
6079 }
6080 else
6081 {
6082 /* memory destination. */
6083
6084 uint32_t fAccess;
6085 if (pImpl->pfnLockedU16)
6086 fAccess = IEM_ACCESS_DATA_RW;
6087 else /* BT */
6088 fAccess = IEM_ACCESS_DATA_R;
6089
6090 /** @todo test negative bit offsets! */
6091 switch (pVCpu->iem.s.enmEffOpSize)
6092 {
6093 case IEMMODE_16BIT:
6094 IEM_MC_BEGIN(3, 2);
6095 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6096 IEM_MC_ARG(uint16_t, u16Src, 1);
6097 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6099 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6100
6101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6102 if (pImpl->pfnLockedU16)
6103 IEMOP_HLP_DONE_DECODING();
6104 else
6105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6106 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6107 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6108 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6109 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6110 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6111 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6112 IEM_MC_FETCH_EFLAGS(EFlags);
6113
6114 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6115 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6116 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6117 else
6118 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6119 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6120
6121 IEM_MC_COMMIT_EFLAGS(EFlags);
6122 IEM_MC_ADVANCE_RIP();
6123 IEM_MC_END();
6124 return VINF_SUCCESS;
6125
6126 case IEMMODE_32BIT:
6127 IEM_MC_BEGIN(3, 2);
6128 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6129 IEM_MC_ARG(uint32_t, u32Src, 1);
6130 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6132 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6133
6134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6135 if (pImpl->pfnLockedU16)
6136 IEMOP_HLP_DONE_DECODING();
6137 else
6138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6139 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6140 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6141 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6142 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6143 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6144 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6145 IEM_MC_FETCH_EFLAGS(EFlags);
6146
6147 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6148 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6149 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6150 else
6151 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6152 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6153
6154 IEM_MC_COMMIT_EFLAGS(EFlags);
6155 IEM_MC_ADVANCE_RIP();
6156 IEM_MC_END();
6157 return VINF_SUCCESS;
6158
6159 case IEMMODE_64BIT:
6160 IEM_MC_BEGIN(3, 2);
6161 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6162 IEM_MC_ARG(uint64_t, u64Src, 1);
6163 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6165 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6166
6167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6168 if (pImpl->pfnLockedU16)
6169 IEMOP_HLP_DONE_DECODING();
6170 else
6171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6172 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6173 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6174 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6175 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6176 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6177 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6178 IEM_MC_FETCH_EFLAGS(EFlags);
6179
6180 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6181 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6182 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6183 else
6184 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6185 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6186
6187 IEM_MC_COMMIT_EFLAGS(EFlags);
6188 IEM_MC_ADVANCE_RIP();
6189 IEM_MC_END();
6190 return VINF_SUCCESS;
6191
6192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6193 }
6194 }
6195}
6196
6197
6198/** Opcode 0x0f 0xa3. */
6199FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6200{
6201 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6202 IEMOP_HLP_MIN_386();
6203 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6204}
6205
6206
6207/**
6208 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6209 */
6210FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6211{
6212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6213 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6214
6215 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6216 {
6217 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6219
6220 switch (pVCpu->iem.s.enmEffOpSize)
6221 {
6222 case IEMMODE_16BIT:
6223 IEM_MC_BEGIN(4, 0);
6224 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6225 IEM_MC_ARG(uint16_t, u16Src, 1);
6226 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6227 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6228
6229 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6230 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6231 IEM_MC_REF_EFLAGS(pEFlags);
6232 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6233
6234 IEM_MC_ADVANCE_RIP();
6235 IEM_MC_END();
6236 return VINF_SUCCESS;
6237
6238 case IEMMODE_32BIT:
6239 IEM_MC_BEGIN(4, 0);
6240 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6241 IEM_MC_ARG(uint32_t, u32Src, 1);
6242 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6243 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6244
6245 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6246 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6247 IEM_MC_REF_EFLAGS(pEFlags);
6248 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6249
6250 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6251 IEM_MC_ADVANCE_RIP();
6252 IEM_MC_END();
6253 return VINF_SUCCESS;
6254
6255 case IEMMODE_64BIT:
6256 IEM_MC_BEGIN(4, 0);
6257 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6258 IEM_MC_ARG(uint64_t, u64Src, 1);
6259 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6260 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6261
6262 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6263 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6264 IEM_MC_REF_EFLAGS(pEFlags);
6265 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6266
6267 IEM_MC_ADVANCE_RIP();
6268 IEM_MC_END();
6269 return VINF_SUCCESS;
6270
6271 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6272 }
6273 }
6274 else
6275 {
6276 switch (pVCpu->iem.s.enmEffOpSize)
6277 {
6278 case IEMMODE_16BIT:
6279 IEM_MC_BEGIN(4, 2);
6280 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6281 IEM_MC_ARG(uint16_t, u16Src, 1);
6282 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6283 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6285
6286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6287 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6288 IEM_MC_ASSIGN(cShiftArg, cShift);
6289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6290 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6291 IEM_MC_FETCH_EFLAGS(EFlags);
6292 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6293 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6294
6295 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6296 IEM_MC_COMMIT_EFLAGS(EFlags);
6297 IEM_MC_ADVANCE_RIP();
6298 IEM_MC_END();
6299 return VINF_SUCCESS;
6300
6301 case IEMMODE_32BIT:
6302 IEM_MC_BEGIN(4, 2);
6303 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6304 IEM_MC_ARG(uint32_t, u32Src, 1);
6305 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6306 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6308
6309 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6310 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6311 IEM_MC_ASSIGN(cShiftArg, cShift);
6312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6313 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6314 IEM_MC_FETCH_EFLAGS(EFlags);
6315 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6316 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6317
6318 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6319 IEM_MC_COMMIT_EFLAGS(EFlags);
6320 IEM_MC_ADVANCE_RIP();
6321 IEM_MC_END();
6322 return VINF_SUCCESS;
6323
6324 case IEMMODE_64BIT:
6325 IEM_MC_BEGIN(4, 2);
6326 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6327 IEM_MC_ARG(uint64_t, u64Src, 1);
6328 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6329 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6331
6332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6333 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6334 IEM_MC_ASSIGN(cShiftArg, cShift);
6335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6336 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6337 IEM_MC_FETCH_EFLAGS(EFlags);
6338 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6339 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6340
6341 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6342 IEM_MC_COMMIT_EFLAGS(EFlags);
6343 IEM_MC_ADVANCE_RIP();
6344 IEM_MC_END();
6345 return VINF_SUCCESS;
6346
6347 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6348 }
6349 }
6350}
6351
6352
6353/**
6354 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6355 */
6356FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6357{
6358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6359 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6360
6361 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6362 {
6363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6364
6365 switch (pVCpu->iem.s.enmEffOpSize)
6366 {
6367 case IEMMODE_16BIT:
6368 IEM_MC_BEGIN(4, 0);
6369 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6370 IEM_MC_ARG(uint16_t, u16Src, 1);
6371 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6372 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6373
6374 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6375 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6376 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6377 IEM_MC_REF_EFLAGS(pEFlags);
6378 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6379
6380 IEM_MC_ADVANCE_RIP();
6381 IEM_MC_END();
6382 return VINF_SUCCESS;
6383
6384 case IEMMODE_32BIT:
6385 IEM_MC_BEGIN(4, 0);
6386 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6387 IEM_MC_ARG(uint32_t, u32Src, 1);
6388 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6389 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6390
6391 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6392 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6393 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6394 IEM_MC_REF_EFLAGS(pEFlags);
6395 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6396
6397 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6398 IEM_MC_ADVANCE_RIP();
6399 IEM_MC_END();
6400 return VINF_SUCCESS;
6401
6402 case IEMMODE_64BIT:
6403 IEM_MC_BEGIN(4, 0);
6404 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6405 IEM_MC_ARG(uint64_t, u64Src, 1);
6406 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6407 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6408
6409 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6410 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6411 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6412 IEM_MC_REF_EFLAGS(pEFlags);
6413 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6414
6415 IEM_MC_ADVANCE_RIP();
6416 IEM_MC_END();
6417 return VINF_SUCCESS;
6418
6419 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6420 }
6421 }
6422 else
6423 {
6424 switch (pVCpu->iem.s.enmEffOpSize)
6425 {
6426 case IEMMODE_16BIT:
6427 IEM_MC_BEGIN(4, 2);
6428 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6429 IEM_MC_ARG(uint16_t, u16Src, 1);
6430 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6431 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6433
6434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6436 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6437 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6438 IEM_MC_FETCH_EFLAGS(EFlags);
6439 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6440 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6441
6442 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6443 IEM_MC_COMMIT_EFLAGS(EFlags);
6444 IEM_MC_ADVANCE_RIP();
6445 IEM_MC_END();
6446 return VINF_SUCCESS;
6447
6448 case IEMMODE_32BIT:
6449 IEM_MC_BEGIN(4, 2);
6450 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6451 IEM_MC_ARG(uint32_t, u32Src, 1);
6452 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6453 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6455
6456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6458 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6459 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6460 IEM_MC_FETCH_EFLAGS(EFlags);
6461 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6462 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6463
6464 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6465 IEM_MC_COMMIT_EFLAGS(EFlags);
6466 IEM_MC_ADVANCE_RIP();
6467 IEM_MC_END();
6468 return VINF_SUCCESS;
6469
6470 case IEMMODE_64BIT:
6471 IEM_MC_BEGIN(4, 2);
6472 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6473 IEM_MC_ARG(uint64_t, u64Src, 1);
6474 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6475 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6477
6478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6480 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6481 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6482 IEM_MC_FETCH_EFLAGS(EFlags);
6483 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6484 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6485
6486 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6487 IEM_MC_COMMIT_EFLAGS(EFlags);
6488 IEM_MC_ADVANCE_RIP();
6489 IEM_MC_END();
6490 return VINF_SUCCESS;
6491
6492 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6493 }
6494 }
6495}
6496
6497
6498
6499/** Opcode 0x0f 0xa4. */
6500FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6501{
6502 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6503 IEMOP_HLP_MIN_386();
6504 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6505}
6506
6507
6508/** Opcode 0x0f 0xa5. */
6509FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6510{
6511 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6512 IEMOP_HLP_MIN_386();
6513 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6514}
6515
6516
6517/** Opcode 0x0f 0xa8. */
6518FNIEMOP_DEF(iemOp_push_gs)
6519{
6520 IEMOP_MNEMONIC(push_gs, "push gs");
6521 IEMOP_HLP_MIN_386();
6522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6523 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6524}
6525
6526
6527/** Opcode 0x0f 0xa9. */
6528FNIEMOP_DEF(iemOp_pop_gs)
6529{
6530 IEMOP_MNEMONIC(pop_gs, "pop gs");
6531 IEMOP_HLP_MIN_386();
6532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6533 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6534}
6535
6536
6537/** Opcode 0x0f 0xaa. */
6538FNIEMOP_DEF(iemOp_rsm)
6539{
6540 IEMOP_MNEMONIC(rsm, "rsm");
6541 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
6542 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
6543 * intercept). */
6544 IEMOP_BITCH_ABOUT_STUB();
6545 return IEMOP_RAISE_INVALID_OPCODE();
6546}
6547
6548//IEMOP_HLP_MIN_386();
6549
6550
6551/** Opcode 0x0f 0xab. */
6552FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6553{
6554 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6555 IEMOP_HLP_MIN_386();
6556 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6557}
6558
6559
6560/** Opcode 0x0f 0xac. */
6561FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6562{
6563 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6564 IEMOP_HLP_MIN_386();
6565 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6566}
6567
6568
6569/** Opcode 0x0f 0xad. */
6570FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6571{
6572 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6573 IEMOP_HLP_MIN_386();
6574 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6575}
6576
6577
6578/** Opcode 0x0f 0xae mem/0. */
6579FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6580{
6581 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6582 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6583 return IEMOP_RAISE_INVALID_OPCODE();
6584
6585 IEM_MC_BEGIN(3, 1);
6586 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6587 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6588 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6591 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6592 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6593 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6594 IEM_MC_END();
6595 return VINF_SUCCESS;
6596}
6597
6598
6599/** Opcode 0x0f 0xae mem/1. */
6600FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6601{
6602 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6603 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6604 return IEMOP_RAISE_INVALID_OPCODE();
6605
6606 IEM_MC_BEGIN(3, 1);
6607 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6608 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6609 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6612 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6613 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6614 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6615 IEM_MC_END();
6616 return VINF_SUCCESS;
6617}
6618
6619
6620/**
6621 * @opmaps grp15
6622 * @opcode !11/2
6623 * @oppfx none
6624 * @opcpuid sse
6625 * @opgroup og_sse_mxcsrsm
6626 * @opxcpttype 5
6627 * @optest op1=0 -> mxcsr=0
6628 * @optest op1=0x2083 -> mxcsr=0x2083
6629 * @optest op1=0xfffffffe -> value.xcpt=0xd
6630 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6631 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6632 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6633 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6634 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6635 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6636 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6637 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6638 */
6639FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6640{
6641 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6642 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6643 return IEMOP_RAISE_INVALID_OPCODE();
6644
6645 IEM_MC_BEGIN(2, 0);
6646 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6647 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6650 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6651 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6652 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6653 IEM_MC_END();
6654 return VINF_SUCCESS;
6655}
6656
6657
6658/**
6659 * @opmaps grp15
6660 * @opcode !11/3
6661 * @oppfx none
6662 * @opcpuid sse
6663 * @opgroup og_sse_mxcsrsm
6664 * @opxcpttype 5
6665 * @optest mxcsr=0 -> op1=0
6666 * @optest mxcsr=0x2083 -> op1=0x2083
6667 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6668 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6669 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6670 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6671 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6672 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6673 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6674 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6675 */
6676FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6677{
6678 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6679 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6680 return IEMOP_RAISE_INVALID_OPCODE();
6681
6682 IEM_MC_BEGIN(2, 0);
6683 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6684 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6687 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6688 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6689 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6690 IEM_MC_END();
6691 return VINF_SUCCESS;
6692}
6693
6694
6695/**
6696 * @opmaps grp15
6697 * @opcode !11/4
6698 * @oppfx none
6699 * @opcpuid xsave
6700 * @opgroup og_system
6701 * @opxcpttype none
6702 */
6703FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6704{
6705 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6706 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6707 return IEMOP_RAISE_INVALID_OPCODE();
6708
6709 IEM_MC_BEGIN(3, 0);
6710 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6711 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6712 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6715 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6716 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6717 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6718 IEM_MC_END();
6719 return VINF_SUCCESS;
6720}
6721
6722
6723/**
6724 * @opmaps grp15
6725 * @opcode !11/5
6726 * @oppfx none
6727 * @opcpuid xsave
6728 * @opgroup og_system
6729 * @opxcpttype none
6730 */
6731FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6732{
6733 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6734 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6735 return IEMOP_RAISE_INVALID_OPCODE();
6736
6737 IEM_MC_BEGIN(3, 0);
6738 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6739 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6740 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6743 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6744 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6745 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6746 IEM_MC_END();
6747 return VINF_SUCCESS;
6748}
6749
6750/** Opcode 0x0f 0xae mem/6. */
6751FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6752
6753/**
6754 * @opmaps grp15
6755 * @opcode !11/7
6756 * @oppfx none
6757 * @opcpuid clfsh
6758 * @opgroup og_cachectl
6759 * @optest op1=1 ->
6760 */
6761FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6762{
6763 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6764 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6765 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6766
6767 IEM_MC_BEGIN(2, 0);
6768 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6769 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6772 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6773 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6774 IEM_MC_END();
6775 return VINF_SUCCESS;
6776}
6777
6778/**
6779 * @opmaps grp15
6780 * @opcode !11/7
6781 * @oppfx 0x66
6782 * @opcpuid clflushopt
6783 * @opgroup og_cachectl
6784 * @optest op1=1 ->
6785 */
6786FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6787{
6788 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6789 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6790 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6791
6792 IEM_MC_BEGIN(2, 0);
6793 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6794 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6797 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6798 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6799 IEM_MC_END();
6800 return VINF_SUCCESS;
6801}
6802
6803
6804/** Opcode 0x0f 0xae 11b/5. */
6805FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6806{
6807 RT_NOREF_PV(bRm);
6808 IEMOP_MNEMONIC(lfence, "lfence");
6809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6810 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6811 return IEMOP_RAISE_INVALID_OPCODE();
6812
6813 IEM_MC_BEGIN(0, 0);
6814 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6815 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6816 else
6817 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6818 IEM_MC_ADVANCE_RIP();
6819 IEM_MC_END();
6820 return VINF_SUCCESS;
6821}
6822
6823
6824/** Opcode 0x0f 0xae 11b/6. */
6825FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6826{
6827 RT_NOREF_PV(bRm);
6828 IEMOP_MNEMONIC(mfence, "mfence");
6829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6830 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6831 return IEMOP_RAISE_INVALID_OPCODE();
6832
6833 IEM_MC_BEGIN(0, 0);
6834 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6835 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6836 else
6837 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6838 IEM_MC_ADVANCE_RIP();
6839 IEM_MC_END();
6840 return VINF_SUCCESS;
6841}
6842
6843
6844/** Opcode 0x0f 0xae 11b/7. */
6845FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6846{
6847 RT_NOREF_PV(bRm);
6848 IEMOP_MNEMONIC(sfence, "sfence");
6849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6850 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6851 return IEMOP_RAISE_INVALID_OPCODE();
6852
6853 IEM_MC_BEGIN(0, 0);
6854 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6855 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6856 else
6857 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6858 IEM_MC_ADVANCE_RIP();
6859 IEM_MC_END();
6860 return VINF_SUCCESS;
6861}
6862
6863
6864/** Opcode 0xf3 0x0f 0xae 11b/0. */
6865FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6866
6867/** Opcode 0xf3 0x0f 0xae 11b/1. */
6868FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6869
6870/** Opcode 0xf3 0x0f 0xae 11b/2. */
6871FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6872
6873/** Opcode 0xf3 0x0f 0xae 11b/3. */
6874FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6875
6876
6877/**
6878 * Group 15 jump table for register variant.
6879 */
6880IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6881{ /* pfx: none, 066h, 0f3h, 0f2h */
6882 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6883 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6884 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6885 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6886 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6887 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6888 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6889 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6890};
6891AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6892
6893
6894/**
6895 * Group 15 jump table for memory variant.
6896 */
6897IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6898{ /* pfx: none, 066h, 0f3h, 0f2h */
6899 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6900 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6901 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6902 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6903 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6904 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6905 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6906 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6907};
6908AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6909
6910
6911/** Opcode 0x0f 0xae. */
6912FNIEMOP_DEF(iemOp_Grp15)
6913{
6914 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6915 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6916 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6917 /* register, register */
6918 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6919 + pVCpu->iem.s.idxPrefix], bRm);
6920 /* memory, register */
6921 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6922 + pVCpu->iem.s.idxPrefix], bRm);
6923}
6924
6925
6926/** Opcode 0x0f 0xaf. */
6927FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6928{
6929 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6930 IEMOP_HLP_MIN_386();
6931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6932 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6933}
6934
6935
6936/** Opcode 0x0f 0xb0. */
6937FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6938{
6939 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6940 IEMOP_HLP_MIN_486();
6941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6942
6943 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6944 {
6945 IEMOP_HLP_DONE_DECODING();
6946 IEM_MC_BEGIN(4, 0);
6947 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6948 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6949 IEM_MC_ARG(uint8_t, u8Src, 2);
6950 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6951
6952 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6953 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6954 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6955 IEM_MC_REF_EFLAGS(pEFlags);
6956 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6957 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6958 else
6959 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6960
6961 IEM_MC_ADVANCE_RIP();
6962 IEM_MC_END();
6963 }
6964 else
6965 {
6966 IEM_MC_BEGIN(4, 3);
6967 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6968 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6969 IEM_MC_ARG(uint8_t, u8Src, 2);
6970 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6972 IEM_MC_LOCAL(uint8_t, u8Al);
6973
6974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6975 IEMOP_HLP_DONE_DECODING();
6976 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6977 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6978 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6979 IEM_MC_FETCH_EFLAGS(EFlags);
6980 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6981 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6982 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6983 else
6984 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6985
6986 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6987 IEM_MC_COMMIT_EFLAGS(EFlags);
6988 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6989 IEM_MC_ADVANCE_RIP();
6990 IEM_MC_END();
6991 }
6992 return VINF_SUCCESS;
6993}
6994
6995/** Opcode 0x0f 0xb1. */
6996FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6997{
6998 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6999 IEMOP_HLP_MIN_486();
7000 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7001
7002 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7003 {
7004 IEMOP_HLP_DONE_DECODING();
7005 switch (pVCpu->iem.s.enmEffOpSize)
7006 {
7007 case IEMMODE_16BIT:
7008 IEM_MC_BEGIN(4, 0);
7009 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7010 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7011 IEM_MC_ARG(uint16_t, u16Src, 2);
7012 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7013
7014 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7015 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7016 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
7017 IEM_MC_REF_EFLAGS(pEFlags);
7018 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7019 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7020 else
7021 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7022
7023 IEM_MC_ADVANCE_RIP();
7024 IEM_MC_END();
7025 return VINF_SUCCESS;
7026
7027 case IEMMODE_32BIT:
7028 IEM_MC_BEGIN(4, 0);
7029 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7030 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7031 IEM_MC_ARG(uint32_t, u32Src, 2);
7032 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7033
7034 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7035 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7036 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7037 IEM_MC_REF_EFLAGS(pEFlags);
7038 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7039 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7040 else
7041 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7042
7043 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7044 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7045 IEM_MC_ADVANCE_RIP();
7046 IEM_MC_END();
7047 return VINF_SUCCESS;
7048
7049 case IEMMODE_64BIT:
7050 IEM_MC_BEGIN(4, 0);
7051 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7052 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7053#ifdef RT_ARCH_X86
7054 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7055#else
7056 IEM_MC_ARG(uint64_t, u64Src, 2);
7057#endif
7058 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7059
7060 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7061 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7062 IEM_MC_REF_EFLAGS(pEFlags);
7063#ifdef RT_ARCH_X86
7064 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7065 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7066 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7067 else
7068 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7069#else
7070 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7071 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7072 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7073 else
7074 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7075#endif
7076
7077 IEM_MC_ADVANCE_RIP();
7078 IEM_MC_END();
7079 return VINF_SUCCESS;
7080
7081 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7082 }
7083 }
7084 else
7085 {
7086 switch (pVCpu->iem.s.enmEffOpSize)
7087 {
7088 case IEMMODE_16BIT:
7089 IEM_MC_BEGIN(4, 3);
7090 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7091 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7092 IEM_MC_ARG(uint16_t, u16Src, 2);
7093 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7095 IEM_MC_LOCAL(uint16_t, u16Ax);
7096
7097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7098 IEMOP_HLP_DONE_DECODING();
7099 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7100 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7101 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7102 IEM_MC_FETCH_EFLAGS(EFlags);
7103 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7104 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7105 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7106 else
7107 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7108
7109 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7110 IEM_MC_COMMIT_EFLAGS(EFlags);
7111 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7112 IEM_MC_ADVANCE_RIP();
7113 IEM_MC_END();
7114 return VINF_SUCCESS;
7115
7116 case IEMMODE_32BIT:
7117 IEM_MC_BEGIN(4, 3);
7118 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7119 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7120 IEM_MC_ARG(uint32_t, u32Src, 2);
7121 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7123 IEM_MC_LOCAL(uint32_t, u32Eax);
7124
7125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7126 IEMOP_HLP_DONE_DECODING();
7127 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7128 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7129 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7130 IEM_MC_FETCH_EFLAGS(EFlags);
7131 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7132 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7133 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7134 else
7135 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7136
7137 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7138 IEM_MC_COMMIT_EFLAGS(EFlags);
7139 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7140 IEM_MC_ADVANCE_RIP();
7141 IEM_MC_END();
7142 return VINF_SUCCESS;
7143
7144 case IEMMODE_64BIT:
7145 IEM_MC_BEGIN(4, 3);
7146 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7147 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7148#ifdef RT_ARCH_X86
7149 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7150#else
7151 IEM_MC_ARG(uint64_t, u64Src, 2);
7152#endif
7153 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7155 IEM_MC_LOCAL(uint64_t, u64Rax);
7156
7157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7158 IEMOP_HLP_DONE_DECODING();
7159 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7160 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7161 IEM_MC_FETCH_EFLAGS(EFlags);
7162 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7163#ifdef RT_ARCH_X86
7164 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7165 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7166 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7167 else
7168 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7169#else
7170 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7171 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7172 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7173 else
7174 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7175#endif
7176
7177 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7178 IEM_MC_COMMIT_EFLAGS(EFlags);
7179 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7180 IEM_MC_ADVANCE_RIP();
7181 IEM_MC_END();
7182 return VINF_SUCCESS;
7183
7184 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7185 }
7186 }
7187}
7188
7189
7190FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7191{
7192 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7193 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7194
7195 switch (pVCpu->iem.s.enmEffOpSize)
7196 {
7197 case IEMMODE_16BIT:
7198 IEM_MC_BEGIN(5, 1);
7199 IEM_MC_ARG(uint16_t, uSel, 0);
7200 IEM_MC_ARG(uint16_t, offSeg, 1);
7201 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7202 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7203 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7204 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7207 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7208 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7209 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7210 IEM_MC_END();
7211 return VINF_SUCCESS;
7212
7213 case IEMMODE_32BIT:
7214 IEM_MC_BEGIN(5, 1);
7215 IEM_MC_ARG(uint16_t, uSel, 0);
7216 IEM_MC_ARG(uint32_t, offSeg, 1);
7217 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7218 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7219 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7220 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7223 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7224 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7225 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7226 IEM_MC_END();
7227 return VINF_SUCCESS;
7228
7229 case IEMMODE_64BIT:
7230 IEM_MC_BEGIN(5, 1);
7231 IEM_MC_ARG(uint16_t, uSel, 0);
7232 IEM_MC_ARG(uint64_t, offSeg, 1);
7233 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7234 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7235 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7236 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7239 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7240 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7241 else
7242 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7243 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7244 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7245 IEM_MC_END();
7246 return VINF_SUCCESS;
7247
7248 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7249 }
7250}
7251
7252
7253/** Opcode 0x0f 0xb2. */
7254FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7255{
7256 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7257 IEMOP_HLP_MIN_386();
7258 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7259 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7260 return IEMOP_RAISE_INVALID_OPCODE();
7261 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7262}
7263
7264
7265/** Opcode 0x0f 0xb3. */
7266FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7267{
7268 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7269 IEMOP_HLP_MIN_386();
7270 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7271}
7272
7273
7274/** Opcode 0x0f 0xb4. */
7275FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7276{
7277 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7278 IEMOP_HLP_MIN_386();
7279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7280 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7281 return IEMOP_RAISE_INVALID_OPCODE();
7282 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7283}
7284
7285
7286/** Opcode 0x0f 0xb5. */
7287FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7288{
7289 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7290 IEMOP_HLP_MIN_386();
7291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7292 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7293 return IEMOP_RAISE_INVALID_OPCODE();
7294 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7295}
7296
7297
7298/** Opcode 0x0f 0xb6. */
7299FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7300{
7301 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7302 IEMOP_HLP_MIN_386();
7303
7304 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7305
7306 /*
7307 * If rm is denoting a register, no more instruction bytes.
7308 */
7309 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7310 {
7311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7312 switch (pVCpu->iem.s.enmEffOpSize)
7313 {
7314 case IEMMODE_16BIT:
7315 IEM_MC_BEGIN(0, 1);
7316 IEM_MC_LOCAL(uint16_t, u16Value);
7317 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7318 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7319 IEM_MC_ADVANCE_RIP();
7320 IEM_MC_END();
7321 return VINF_SUCCESS;
7322
7323 case IEMMODE_32BIT:
7324 IEM_MC_BEGIN(0, 1);
7325 IEM_MC_LOCAL(uint32_t, u32Value);
7326 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7327 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7328 IEM_MC_ADVANCE_RIP();
7329 IEM_MC_END();
7330 return VINF_SUCCESS;
7331
7332 case IEMMODE_64BIT:
7333 IEM_MC_BEGIN(0, 1);
7334 IEM_MC_LOCAL(uint64_t, u64Value);
7335 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7336 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7337 IEM_MC_ADVANCE_RIP();
7338 IEM_MC_END();
7339 return VINF_SUCCESS;
7340
7341 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7342 }
7343 }
7344 else
7345 {
7346 /*
7347 * We're loading a register from memory.
7348 */
7349 switch (pVCpu->iem.s.enmEffOpSize)
7350 {
7351 case IEMMODE_16BIT:
7352 IEM_MC_BEGIN(0, 2);
7353 IEM_MC_LOCAL(uint16_t, u16Value);
7354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7357 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7358 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7359 IEM_MC_ADVANCE_RIP();
7360 IEM_MC_END();
7361 return VINF_SUCCESS;
7362
7363 case IEMMODE_32BIT:
7364 IEM_MC_BEGIN(0, 2);
7365 IEM_MC_LOCAL(uint32_t, u32Value);
7366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7369 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7370 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7371 IEM_MC_ADVANCE_RIP();
7372 IEM_MC_END();
7373 return VINF_SUCCESS;
7374
7375 case IEMMODE_64BIT:
7376 IEM_MC_BEGIN(0, 2);
7377 IEM_MC_LOCAL(uint64_t, u64Value);
7378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7381 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7382 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7383 IEM_MC_ADVANCE_RIP();
7384 IEM_MC_END();
7385 return VINF_SUCCESS;
7386
7387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7388 }
7389 }
7390}
7391
7392
7393/** Opcode 0x0f 0xb7. */
7394FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7395{
7396 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7397 IEMOP_HLP_MIN_386();
7398
7399 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7400
7401 /** @todo Not entirely sure how the operand size prefix is handled here,
7402 * assuming that it will be ignored. Would be nice to have a few
7403 * test for this. */
7404 /*
7405 * If rm is denoting a register, no more instruction bytes.
7406 */
7407 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7408 {
7409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7410 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7411 {
7412 IEM_MC_BEGIN(0, 1);
7413 IEM_MC_LOCAL(uint32_t, u32Value);
7414 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7415 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7416 IEM_MC_ADVANCE_RIP();
7417 IEM_MC_END();
7418 }
7419 else
7420 {
7421 IEM_MC_BEGIN(0, 1);
7422 IEM_MC_LOCAL(uint64_t, u64Value);
7423 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7424 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7425 IEM_MC_ADVANCE_RIP();
7426 IEM_MC_END();
7427 }
7428 }
7429 else
7430 {
7431 /*
7432 * We're loading a register from memory.
7433 */
7434 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7435 {
7436 IEM_MC_BEGIN(0, 2);
7437 IEM_MC_LOCAL(uint32_t, u32Value);
7438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7441 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7442 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7443 IEM_MC_ADVANCE_RIP();
7444 IEM_MC_END();
7445 }
7446 else
7447 {
7448 IEM_MC_BEGIN(0, 2);
7449 IEM_MC_LOCAL(uint64_t, u64Value);
7450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7453 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7454 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7455 IEM_MC_ADVANCE_RIP();
7456 IEM_MC_END();
7457 }
7458 }
7459 return VINF_SUCCESS;
7460}
7461
7462
7463/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7464FNIEMOP_UD_STUB(iemOp_jmpe);
7465/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7466FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7467
7468
7469/**
7470 * @opcode 0xb9
7471 * @opinvalid intel-modrm
7472 * @optest ->
7473 */
7474FNIEMOP_DEF(iemOp_Grp10)
7475{
7476 /*
7477 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7478 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7479 */
7480 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7481 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7482 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7483}
7484
7485
7486/** Opcode 0x0f 0xba. */
7487FNIEMOP_DEF(iemOp_Grp8)
7488{
7489 IEMOP_HLP_MIN_386();
7490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7491 PCIEMOPBINSIZES pImpl;
7492 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7493 {
7494 case 0: case 1: case 2: case 3:
7495 /* Both AMD and Intel want full modr/m decoding and imm8. */
7496 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7497 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7498 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7499 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7500 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7502 }
7503 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7504
7505 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7506 {
7507 /* register destination. */
7508 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7510
7511 switch (pVCpu->iem.s.enmEffOpSize)
7512 {
7513 case IEMMODE_16BIT:
7514 IEM_MC_BEGIN(3, 0);
7515 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7516 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7517 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7518
7519 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7520 IEM_MC_REF_EFLAGS(pEFlags);
7521 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7522
7523 IEM_MC_ADVANCE_RIP();
7524 IEM_MC_END();
7525 return VINF_SUCCESS;
7526
7527 case IEMMODE_32BIT:
7528 IEM_MC_BEGIN(3, 0);
7529 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7530 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7531 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7532
7533 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7534 IEM_MC_REF_EFLAGS(pEFlags);
7535 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7536
7537 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7538 IEM_MC_ADVANCE_RIP();
7539 IEM_MC_END();
7540 return VINF_SUCCESS;
7541
7542 case IEMMODE_64BIT:
7543 IEM_MC_BEGIN(3, 0);
7544 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7545 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7546 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7547
7548 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7549 IEM_MC_REF_EFLAGS(pEFlags);
7550 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7551
7552 IEM_MC_ADVANCE_RIP();
7553 IEM_MC_END();
7554 return VINF_SUCCESS;
7555
7556 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7557 }
7558 }
7559 else
7560 {
7561 /* memory destination. */
7562
7563 uint32_t fAccess;
7564 if (pImpl->pfnLockedU16)
7565 fAccess = IEM_ACCESS_DATA_RW;
7566 else /* BT */
7567 fAccess = IEM_ACCESS_DATA_R;
7568
7569 /** @todo test negative bit offsets! */
7570 switch (pVCpu->iem.s.enmEffOpSize)
7571 {
7572 case IEMMODE_16BIT:
7573 IEM_MC_BEGIN(3, 1);
7574 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7575 IEM_MC_ARG(uint16_t, u16Src, 1);
7576 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7578
7579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7580 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7581 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7582 if (pImpl->pfnLockedU16)
7583 IEMOP_HLP_DONE_DECODING();
7584 else
7585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7586 IEM_MC_FETCH_EFLAGS(EFlags);
7587 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7588 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7589 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7590 else
7591 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7592 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7593
7594 IEM_MC_COMMIT_EFLAGS(EFlags);
7595 IEM_MC_ADVANCE_RIP();
7596 IEM_MC_END();
7597 return VINF_SUCCESS;
7598
7599 case IEMMODE_32BIT:
7600 IEM_MC_BEGIN(3, 1);
7601 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7602 IEM_MC_ARG(uint32_t, u32Src, 1);
7603 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7605
7606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7607 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7608 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7609 if (pImpl->pfnLockedU16)
7610 IEMOP_HLP_DONE_DECODING();
7611 else
7612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7613 IEM_MC_FETCH_EFLAGS(EFlags);
7614 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7615 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7616 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7617 else
7618 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7619 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7620
7621 IEM_MC_COMMIT_EFLAGS(EFlags);
7622 IEM_MC_ADVANCE_RIP();
7623 IEM_MC_END();
7624 return VINF_SUCCESS;
7625
7626 case IEMMODE_64BIT:
7627 IEM_MC_BEGIN(3, 1);
7628 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7629 IEM_MC_ARG(uint64_t, u64Src, 1);
7630 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7632
7633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7634 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7635 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7636 if (pImpl->pfnLockedU16)
7637 IEMOP_HLP_DONE_DECODING();
7638 else
7639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7640 IEM_MC_FETCH_EFLAGS(EFlags);
7641 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7642 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7643 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7644 else
7645 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7646 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7647
7648 IEM_MC_COMMIT_EFLAGS(EFlags);
7649 IEM_MC_ADVANCE_RIP();
7650 IEM_MC_END();
7651 return VINF_SUCCESS;
7652
7653 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7654 }
7655 }
7656}
7657
7658
7659/** Opcode 0x0f 0xbb. */
7660FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7661{
7662 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7663 IEMOP_HLP_MIN_386();
7664 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7665}
7666
7667
7668/** Opcode 0x0f 0xbc. */
7669FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7670{
7671 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7672 IEMOP_HLP_MIN_386();
7673 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7674 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7675}
7676
7677
7678/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7679FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7680
7681
7682/** Opcode 0x0f 0xbd. */
7683FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7684{
7685 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7686 IEMOP_HLP_MIN_386();
7687 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7688 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7689}
7690
7691
7692/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7693FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7694
7695
7696/** Opcode 0x0f 0xbe. */
7697FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7698{
7699 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7700 IEMOP_HLP_MIN_386();
7701
7702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7703
7704 /*
7705 * If rm is denoting a register, no more instruction bytes.
7706 */
7707 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7708 {
7709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7710 switch (pVCpu->iem.s.enmEffOpSize)
7711 {
7712 case IEMMODE_16BIT:
7713 IEM_MC_BEGIN(0, 1);
7714 IEM_MC_LOCAL(uint16_t, u16Value);
7715 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7716 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7717 IEM_MC_ADVANCE_RIP();
7718 IEM_MC_END();
7719 return VINF_SUCCESS;
7720
7721 case IEMMODE_32BIT:
7722 IEM_MC_BEGIN(0, 1);
7723 IEM_MC_LOCAL(uint32_t, u32Value);
7724 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7725 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7726 IEM_MC_ADVANCE_RIP();
7727 IEM_MC_END();
7728 return VINF_SUCCESS;
7729
7730 case IEMMODE_64BIT:
7731 IEM_MC_BEGIN(0, 1);
7732 IEM_MC_LOCAL(uint64_t, u64Value);
7733 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7734 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7735 IEM_MC_ADVANCE_RIP();
7736 IEM_MC_END();
7737 return VINF_SUCCESS;
7738
7739 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7740 }
7741 }
7742 else
7743 {
7744 /*
7745 * We're loading a register from memory.
7746 */
7747 switch (pVCpu->iem.s.enmEffOpSize)
7748 {
7749 case IEMMODE_16BIT:
7750 IEM_MC_BEGIN(0, 2);
7751 IEM_MC_LOCAL(uint16_t, u16Value);
7752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7755 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7756 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7757 IEM_MC_ADVANCE_RIP();
7758 IEM_MC_END();
7759 return VINF_SUCCESS;
7760
7761 case IEMMODE_32BIT:
7762 IEM_MC_BEGIN(0, 2);
7763 IEM_MC_LOCAL(uint32_t, u32Value);
7764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7767 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7768 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7769 IEM_MC_ADVANCE_RIP();
7770 IEM_MC_END();
7771 return VINF_SUCCESS;
7772
7773 case IEMMODE_64BIT:
7774 IEM_MC_BEGIN(0, 2);
7775 IEM_MC_LOCAL(uint64_t, u64Value);
7776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7779 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7780 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7781 IEM_MC_ADVANCE_RIP();
7782 IEM_MC_END();
7783 return VINF_SUCCESS;
7784
7785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7786 }
7787 }
7788}
7789
7790
7791/** Opcode 0x0f 0xbf. */
7792FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7793{
7794 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7795 IEMOP_HLP_MIN_386();
7796
7797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7798
7799 /** @todo Not entirely sure how the operand size prefix is handled here,
7800 * assuming that it will be ignored. Would be nice to have a few
7801 * test for this. */
7802 /*
7803 * If rm is denoting a register, no more instruction bytes.
7804 */
7805 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7806 {
7807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7808 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7809 {
7810 IEM_MC_BEGIN(0, 1);
7811 IEM_MC_LOCAL(uint32_t, u32Value);
7812 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7813 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7814 IEM_MC_ADVANCE_RIP();
7815 IEM_MC_END();
7816 }
7817 else
7818 {
7819 IEM_MC_BEGIN(0, 1);
7820 IEM_MC_LOCAL(uint64_t, u64Value);
7821 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7822 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7823 IEM_MC_ADVANCE_RIP();
7824 IEM_MC_END();
7825 }
7826 }
7827 else
7828 {
7829 /*
7830 * We're loading a register from memory.
7831 */
7832 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7833 {
7834 IEM_MC_BEGIN(0, 2);
7835 IEM_MC_LOCAL(uint32_t, u32Value);
7836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7839 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7840 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7841 IEM_MC_ADVANCE_RIP();
7842 IEM_MC_END();
7843 }
7844 else
7845 {
7846 IEM_MC_BEGIN(0, 2);
7847 IEM_MC_LOCAL(uint64_t, u64Value);
7848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7851 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7852 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7853 IEM_MC_ADVANCE_RIP();
7854 IEM_MC_END();
7855 }
7856 }
7857 return VINF_SUCCESS;
7858}
7859
7860
7861/** Opcode 0x0f 0xc0. */
7862FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7863{
7864 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7865 IEMOP_HLP_MIN_486();
7866 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7867
7868 /*
7869 * If rm is denoting a register, no more instruction bytes.
7870 */
7871 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7872 {
7873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7874
7875 IEM_MC_BEGIN(3, 0);
7876 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7877 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7878 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7879
7880 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7881 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7882 IEM_MC_REF_EFLAGS(pEFlags);
7883 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7884
7885 IEM_MC_ADVANCE_RIP();
7886 IEM_MC_END();
7887 }
7888 else
7889 {
7890 /*
7891 * We're accessing memory.
7892 */
7893 IEM_MC_BEGIN(3, 3);
7894 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7895 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7896 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7897 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7899
7900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7901 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7902 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7903 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7904 IEM_MC_FETCH_EFLAGS(EFlags);
7905 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7906 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7907 else
7908 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7909
7910 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7911 IEM_MC_COMMIT_EFLAGS(EFlags);
7912 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7913 IEM_MC_ADVANCE_RIP();
7914 IEM_MC_END();
7915 return VINF_SUCCESS;
7916 }
7917 return VINF_SUCCESS;
7918}
7919
7920
7921/** Opcode 0x0f 0xc1. */
7922FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7923{
7924 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7925 IEMOP_HLP_MIN_486();
7926 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7927
7928 /*
7929 * If rm is denoting a register, no more instruction bytes.
7930 */
7931 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7932 {
7933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7934
7935 switch (pVCpu->iem.s.enmEffOpSize)
7936 {
7937 case IEMMODE_16BIT:
7938 IEM_MC_BEGIN(3, 0);
7939 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7940 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7941 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7942
7943 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7944 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7945 IEM_MC_REF_EFLAGS(pEFlags);
7946 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7947
7948 IEM_MC_ADVANCE_RIP();
7949 IEM_MC_END();
7950 return VINF_SUCCESS;
7951
7952 case IEMMODE_32BIT:
7953 IEM_MC_BEGIN(3, 0);
7954 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7955 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7956 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7957
7958 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7959 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7960 IEM_MC_REF_EFLAGS(pEFlags);
7961 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7962
7963 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7964 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7965 IEM_MC_ADVANCE_RIP();
7966 IEM_MC_END();
7967 return VINF_SUCCESS;
7968
7969 case IEMMODE_64BIT:
7970 IEM_MC_BEGIN(3, 0);
7971 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7972 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7973 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7974
7975 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7976 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7977 IEM_MC_REF_EFLAGS(pEFlags);
7978 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7979
7980 IEM_MC_ADVANCE_RIP();
7981 IEM_MC_END();
7982 return VINF_SUCCESS;
7983
7984 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7985 }
7986 }
7987 else
7988 {
7989 /*
7990 * We're accessing memory.
7991 */
7992 switch (pVCpu->iem.s.enmEffOpSize)
7993 {
7994 case IEMMODE_16BIT:
7995 IEM_MC_BEGIN(3, 3);
7996 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7997 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7998 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7999 IEM_MC_LOCAL(uint16_t, u16RegCopy);
8000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8001
8002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8003 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8004 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8005 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
8006 IEM_MC_FETCH_EFLAGS(EFlags);
8007 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8008 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8009 else
8010 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
8011
8012 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8013 IEM_MC_COMMIT_EFLAGS(EFlags);
8014 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
8015 IEM_MC_ADVANCE_RIP();
8016 IEM_MC_END();
8017 return VINF_SUCCESS;
8018
8019 case IEMMODE_32BIT:
8020 IEM_MC_BEGIN(3, 3);
8021 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8022 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8023 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8024 IEM_MC_LOCAL(uint32_t, u32RegCopy);
8025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8026
8027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8028 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8029 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8030 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8031 IEM_MC_FETCH_EFLAGS(EFlags);
8032 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8033 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8034 else
8035 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8036
8037 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8038 IEM_MC_COMMIT_EFLAGS(EFlags);
8039 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8040 IEM_MC_ADVANCE_RIP();
8041 IEM_MC_END();
8042 return VINF_SUCCESS;
8043
8044 case IEMMODE_64BIT:
8045 IEM_MC_BEGIN(3, 3);
8046 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8047 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8048 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8049 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8051
8052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8053 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8054 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8055 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8056 IEM_MC_FETCH_EFLAGS(EFlags);
8057 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8058 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8059 else
8060 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8061
8062 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8063 IEM_MC_COMMIT_EFLAGS(EFlags);
8064 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8065 IEM_MC_ADVANCE_RIP();
8066 IEM_MC_END();
8067 return VINF_SUCCESS;
8068
8069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8070 }
8071 }
8072}
8073
8074
8075/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8076FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8077/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8078FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8079/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8080FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8081/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8082FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8083
8084
8085/** Opcode 0x0f 0xc3. */
8086FNIEMOP_DEF(iemOp_movnti_My_Gy)
8087{
8088 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8089
8090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8091
8092 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8093 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8094 {
8095 switch (pVCpu->iem.s.enmEffOpSize)
8096 {
8097 case IEMMODE_32BIT:
8098 IEM_MC_BEGIN(0, 2);
8099 IEM_MC_LOCAL(uint32_t, u32Value);
8100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8101
8102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8104 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8105 return IEMOP_RAISE_INVALID_OPCODE();
8106
8107 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8108 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8109 IEM_MC_ADVANCE_RIP();
8110 IEM_MC_END();
8111 break;
8112
8113 case IEMMODE_64BIT:
8114 IEM_MC_BEGIN(0, 2);
8115 IEM_MC_LOCAL(uint64_t, u64Value);
8116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8117
8118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8120 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8121 return IEMOP_RAISE_INVALID_OPCODE();
8122
8123 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8124 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8125 IEM_MC_ADVANCE_RIP();
8126 IEM_MC_END();
8127 break;
8128
8129 case IEMMODE_16BIT:
8130 /** @todo check this form. */
8131 return IEMOP_RAISE_INVALID_OPCODE();
8132 }
8133 }
8134 else
8135 return IEMOP_RAISE_INVALID_OPCODE();
8136 return VINF_SUCCESS;
8137}
8138/* Opcode 0x66 0x0f 0xc3 - invalid */
8139/* Opcode 0xf3 0x0f 0xc3 - invalid */
8140/* Opcode 0xf2 0x0f 0xc3 - invalid */
8141
8142/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8143FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8144/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8145FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8146/* Opcode 0xf3 0x0f 0xc4 - invalid */
8147/* Opcode 0xf2 0x0f 0xc4 - invalid */
8148
8149/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8150FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8151/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8152FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8153/* Opcode 0xf3 0x0f 0xc5 - invalid */
8154/* Opcode 0xf2 0x0f 0xc5 - invalid */
8155
8156/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8157FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8158/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8159FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8160/* Opcode 0xf3 0x0f 0xc6 - invalid */
8161/* Opcode 0xf2 0x0f 0xc6 - invalid */
8162
8163
8164/** Opcode 0x0f 0xc7 !11/1. */
8165FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8166{
8167 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8168
8169 IEM_MC_BEGIN(4, 3);
8170 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8171 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8172 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8173 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8174 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8175 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8177
8178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8179 IEMOP_HLP_DONE_DECODING();
8180 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8181
8182 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8183 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8184 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8185
8186 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8187 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8188 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8189
8190 IEM_MC_FETCH_EFLAGS(EFlags);
8191 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8192 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8193 else
8194 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8195
8196 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8197 IEM_MC_COMMIT_EFLAGS(EFlags);
8198 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8199 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8200 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8201 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8202 IEM_MC_ENDIF();
8203 IEM_MC_ADVANCE_RIP();
8204
8205 IEM_MC_END();
8206 return VINF_SUCCESS;
8207}
8208
8209
8210/** Opcode REX.W 0x0f 0xc7 !11/1. */
8211FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8212{
8213 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8214 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8215 {
8216#if 0
8217 RT_NOREF(bRm);
8218 IEMOP_BITCH_ABOUT_STUB();
8219 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8220#else
8221 IEM_MC_BEGIN(4, 3);
8222 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8223 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8224 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8225 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8226 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8227 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8229
8230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8231 IEMOP_HLP_DONE_DECODING();
8232 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8233 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8234
8235 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8236 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8237 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8238
8239 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8240 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8241 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8242
8243 IEM_MC_FETCH_EFLAGS(EFlags);
8244# ifdef RT_ARCH_AMD64
8245 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8246 {
8247 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8248 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8249 else
8250 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8251 }
8252 else
8253# endif
8254 {
8255 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8256 accesses and not all all atomic, which works fine on in UNI CPU guest
8257 configuration (ignoring DMA). If guest SMP is active we have no choice
8258 but to use a rendezvous callback here. Sigh. */
8259 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8260 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8261 else
8262 {
8263 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8264 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8265 }
8266 }
8267
8268 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8269 IEM_MC_COMMIT_EFLAGS(EFlags);
8270 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8271 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8272 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8273 IEM_MC_ENDIF();
8274 IEM_MC_ADVANCE_RIP();
8275
8276 IEM_MC_END();
8277 return VINF_SUCCESS;
8278#endif
8279 }
8280 Log(("cmpxchg16b -> #UD\n"));
8281 return IEMOP_RAISE_INVALID_OPCODE();
8282}
8283
8284FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8285{
8286 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8287 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8288 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8289}
8290
8291/** Opcode 0x0f 0xc7 11/6. */
8292FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8293
8294/** Opcode 0x0f 0xc7 !11/6. */
8295FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8296
8297/** Opcode 0x66 0x0f 0xc7 !11/6. */
8298FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8299
8300/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8301FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8302
8303/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8304FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8305
8306/** Opcode 0x0f 0xc7 11/7. */
8307FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8308
8309
8310/**
8311 * Group 9 jump table for register variant.
8312 */
8313IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8314{ /* pfx: none, 066h, 0f3h, 0f2h */
8315 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8316 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8317 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8318 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8319 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8320 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8321 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8322 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8323};
8324AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8325
8326
8327/**
8328 * Group 9 jump table for memory variant.
8329 */
8330IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8331{ /* pfx: none, 066h, 0f3h, 0f2h */
8332 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8333 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8334 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8335 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8336 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8337 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8338 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8339 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8340};
8341AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8342
8343
8344/** Opcode 0x0f 0xc7. */
8345FNIEMOP_DEF(iemOp_Grp9)
8346{
8347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8348 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8349 /* register, register */
8350 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8351 + pVCpu->iem.s.idxPrefix], bRm);
8352 /* memory, register */
8353 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8354 + pVCpu->iem.s.idxPrefix], bRm);
8355}
8356
8357
8358/**
8359 * Common 'bswap register' helper.
8360 */
8361FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8362{
8363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8364 switch (pVCpu->iem.s.enmEffOpSize)
8365 {
8366 case IEMMODE_16BIT:
8367 IEM_MC_BEGIN(1, 0);
8368 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8369 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8370 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8371 IEM_MC_ADVANCE_RIP();
8372 IEM_MC_END();
8373 return VINF_SUCCESS;
8374
8375 case IEMMODE_32BIT:
8376 IEM_MC_BEGIN(1, 0);
8377 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8378 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8379 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8380 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8381 IEM_MC_ADVANCE_RIP();
8382 IEM_MC_END();
8383 return VINF_SUCCESS;
8384
8385 case IEMMODE_64BIT:
8386 IEM_MC_BEGIN(1, 0);
8387 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8388 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8389 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8390 IEM_MC_ADVANCE_RIP();
8391 IEM_MC_END();
8392 return VINF_SUCCESS;
8393
8394 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8395 }
8396}
8397
8398
8399/** Opcode 0x0f 0xc8. */
8400FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8401{
8402 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8403 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8404 prefix. REX.B is the correct prefix it appears. For a parallel
8405 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8406 IEMOP_HLP_MIN_486();
8407 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8408}
8409
8410
8411/** Opcode 0x0f 0xc9. */
8412FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8413{
8414 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8415 IEMOP_HLP_MIN_486();
8416 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8417}
8418
8419
8420/** Opcode 0x0f 0xca. */
8421FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8422{
8423 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8424 IEMOP_HLP_MIN_486();
8425 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8426}
8427
8428
8429/** Opcode 0x0f 0xcb. */
8430FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8431{
8432 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8433 IEMOP_HLP_MIN_486();
8434 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8435}
8436
8437
8438/** Opcode 0x0f 0xcc. */
8439FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8440{
8441 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8442 IEMOP_HLP_MIN_486();
8443 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8444}
8445
8446
8447/** Opcode 0x0f 0xcd. */
8448FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8449{
8450 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8451 IEMOP_HLP_MIN_486();
8452 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8453}
8454
8455
8456/** Opcode 0x0f 0xce. */
8457FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8458{
8459 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8460 IEMOP_HLP_MIN_486();
8461 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8462}
8463
8464
8465/** Opcode 0x0f 0xcf. */
8466FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8467{
8468 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8469 IEMOP_HLP_MIN_486();
8470 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8471}
8472
8473
8474/* Opcode 0x0f 0xd0 - invalid */
8475/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8476FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8477/* Opcode 0xf3 0x0f 0xd0 - invalid */
8478/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8479FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8480
8481/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8482FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8483/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8484FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8485/* Opcode 0xf3 0x0f 0xd1 - invalid */
8486/* Opcode 0xf2 0x0f 0xd1 - invalid */
8487
8488/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8489FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8490/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8491FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8492/* Opcode 0xf3 0x0f 0xd2 - invalid */
8493/* Opcode 0xf2 0x0f 0xd2 - invalid */
8494
8495/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8496FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8497/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8498FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8499/* Opcode 0xf3 0x0f 0xd3 - invalid */
8500/* Opcode 0xf2 0x0f 0xd3 - invalid */
8501
8502/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8503FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8504/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8505FNIEMOP_STUB(iemOp_paddq_Vx_W);
8506/* Opcode 0xf3 0x0f 0xd4 - invalid */
8507/* Opcode 0xf2 0x0f 0xd4 - invalid */
8508
8509/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8510FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8511/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8512FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8513/* Opcode 0xf3 0x0f 0xd5 - invalid */
8514/* Opcode 0xf2 0x0f 0xd5 - invalid */
8515
8516/* Opcode 0x0f 0xd6 - invalid */
8517
8518/**
8519 * @opcode 0xd6
8520 * @oppfx 0x66
8521 * @opcpuid sse2
8522 * @opgroup og_sse2_pcksclr_datamove
8523 * @opxcpttype none
8524 * @optest op1=-1 op2=2 -> op1=2
8525 * @optest op1=0 op2=-42 -> op1=-42
8526 */
8527FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8528{
8529 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8531 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8532 {
8533 /*
8534 * Register, register.
8535 */
8536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8537 IEM_MC_BEGIN(0, 2);
8538 IEM_MC_LOCAL(uint64_t, uSrc);
8539
8540 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8541 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8542
8543 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8544 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8545
8546 IEM_MC_ADVANCE_RIP();
8547 IEM_MC_END();
8548 }
8549 else
8550 {
8551 /*
8552 * Memory, register.
8553 */
8554 IEM_MC_BEGIN(0, 2);
8555 IEM_MC_LOCAL(uint64_t, uSrc);
8556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8557
8558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8560 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8561 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8562
8563 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8564 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8565
8566 IEM_MC_ADVANCE_RIP();
8567 IEM_MC_END();
8568 }
8569 return VINF_SUCCESS;
8570}
8571
8572
8573/**
8574 * @opcode 0xd6
8575 * @opcodesub 11 mr/reg
8576 * @oppfx f3
8577 * @opcpuid sse2
8578 * @opgroup og_sse2_simdint_datamove
8579 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8580 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8581 */
8582FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8583{
8584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8585 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8586 {
8587 /*
8588 * Register, register.
8589 */
8590 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8592 IEM_MC_BEGIN(0, 1);
8593 IEM_MC_LOCAL(uint64_t, uSrc);
8594
8595 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8596 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8597
8598 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8599 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8600 IEM_MC_FPU_TO_MMX_MODE();
8601
8602 IEM_MC_ADVANCE_RIP();
8603 IEM_MC_END();
8604 return VINF_SUCCESS;
8605 }
8606
8607 /**
8608 * @opdone
8609 * @opmnemonic udf30fd6mem
8610 * @opcode 0xd6
8611 * @opcodesub !11 mr/reg
8612 * @oppfx f3
8613 * @opunused intel-modrm
8614 * @opcpuid sse
8615 * @optest ->
8616 */
8617 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8618}
8619
8620
8621/**
8622 * @opcode 0xd6
8623 * @opcodesub 11 mr/reg
8624 * @oppfx f2
8625 * @opcpuid sse2
8626 * @opgroup og_sse2_simdint_datamove
8627 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8628 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8629 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
8630 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
8631 * @optest op1=-42 op2=0xfedcba9876543210
8632 * -> op1=0xfedcba9876543210 ftw=0xff
8633 */
8634FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
8635{
8636 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8637 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8638 {
8639 /*
8640 * Register, register.
8641 */
8642 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8644 IEM_MC_BEGIN(0, 1);
8645 IEM_MC_LOCAL(uint64_t, uSrc);
8646
8647 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8648 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8649
8650 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8651 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
8652 IEM_MC_FPU_TO_MMX_MODE();
8653
8654 IEM_MC_ADVANCE_RIP();
8655 IEM_MC_END();
8656 return VINF_SUCCESS;
8657 }
8658
8659 /**
8660 * @opdone
8661 * @opmnemonic udf20fd6mem
8662 * @opcode 0xd6
8663 * @opcodesub !11 mr/reg
8664 * @oppfx f2
8665 * @opunused intel-modrm
8666 * @opcpuid sse
8667 * @optest ->
8668 */
8669 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8670}
8671
8672/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8673FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8674{
8675 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8676 /** @todo testcase: Check that the instruction implicitly clears the high
8677 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8678 * and opcode modifications are made to work with the whole width (not
8679 * just 128). */
8680 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8681 /* Docs says register only. */
8682 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8683 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8684 {
8685 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8686 IEM_MC_BEGIN(2, 0);
8687 IEM_MC_ARG(uint64_t *, pDst, 0);
8688 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8689 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8690 IEM_MC_PREPARE_FPU_USAGE();
8691 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8692 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8693 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8694 IEM_MC_ADVANCE_RIP();
8695 IEM_MC_END();
8696 return VINF_SUCCESS;
8697 }
8698 return IEMOP_RAISE_INVALID_OPCODE();
8699}
8700
8701/** Opcode 0x66 0x0f 0xd7 - */
8702FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8703{
8704 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8705 /** @todo testcase: Check that the instruction implicitly clears the high
8706 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8707 * and opcode modifications are made to work with the whole width (not
8708 * just 128). */
8709 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8710 /* Docs says register only. */
8711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8712 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8713 {
8714 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8715 IEM_MC_BEGIN(2, 0);
8716 IEM_MC_ARG(uint64_t *, pDst, 0);
8717 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8718 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8719 IEM_MC_PREPARE_SSE_USAGE();
8720 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8721 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8722 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8723 IEM_MC_ADVANCE_RIP();
8724 IEM_MC_END();
8725 return VINF_SUCCESS;
8726 }
8727 return IEMOP_RAISE_INVALID_OPCODE();
8728}
8729
8730/* Opcode 0xf3 0x0f 0xd7 - invalid */
8731/* Opcode 0xf2 0x0f 0xd7 - invalid */
8732
8733
8734/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8735FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8736/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8737FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8738/* Opcode 0xf3 0x0f 0xd8 - invalid */
8739/* Opcode 0xf2 0x0f 0xd8 - invalid */
8740
8741/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8742FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8743/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8744FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8745/* Opcode 0xf3 0x0f 0xd9 - invalid */
8746/* Opcode 0xf2 0x0f 0xd9 - invalid */
8747
8748/** Opcode 0x0f 0xda - pminub Pq, Qq */
8749FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8750/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8751FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8752/* Opcode 0xf3 0x0f 0xda - invalid */
8753/* Opcode 0xf2 0x0f 0xda - invalid */
8754
8755/** Opcode 0x0f 0xdb - pand Pq, Qq */
8756FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8757/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8758FNIEMOP_STUB(iemOp_pand_Vx_W);
8759/* Opcode 0xf3 0x0f 0xdb - invalid */
8760/* Opcode 0xf2 0x0f 0xdb - invalid */
8761
8762/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8763FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8764/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8765FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8766/* Opcode 0xf3 0x0f 0xdc - invalid */
8767/* Opcode 0xf2 0x0f 0xdc - invalid */
8768
8769/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8770FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8771/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8772FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8773/* Opcode 0xf3 0x0f 0xdd - invalid */
8774/* Opcode 0xf2 0x0f 0xdd - invalid */
8775
8776/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8777FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8778/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8779FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8780/* Opcode 0xf3 0x0f 0xde - invalid */
8781/* Opcode 0xf2 0x0f 0xde - invalid */
8782
8783/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8784FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8785/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8786FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8787/* Opcode 0xf3 0x0f 0xdf - invalid */
8788/* Opcode 0xf2 0x0f 0xdf - invalid */
8789
8790/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8791FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8792/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8793FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8794/* Opcode 0xf3 0x0f 0xe0 - invalid */
8795/* Opcode 0xf2 0x0f 0xe0 - invalid */
8796
8797/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8798FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8799/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8800FNIEMOP_STUB(iemOp_psraw_Vx_W);
8801/* Opcode 0xf3 0x0f 0xe1 - invalid */
8802/* Opcode 0xf2 0x0f 0xe1 - invalid */
8803
8804/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8805FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8806/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8807FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8808/* Opcode 0xf3 0x0f 0xe2 - invalid */
8809/* Opcode 0xf2 0x0f 0xe2 - invalid */
8810
8811/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8812FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8813/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8814FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8815/* Opcode 0xf3 0x0f 0xe3 - invalid */
8816/* Opcode 0xf2 0x0f 0xe3 - invalid */
8817
8818/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8819FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8820/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8821FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8822/* Opcode 0xf3 0x0f 0xe4 - invalid */
8823/* Opcode 0xf2 0x0f 0xe4 - invalid */
8824
8825/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8826FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8827/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8828FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8829/* Opcode 0xf3 0x0f 0xe5 - invalid */
8830/* Opcode 0xf2 0x0f 0xe5 - invalid */
8831
8832/* Opcode 0x0f 0xe6 - invalid */
8833/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8834FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8835/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8836FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8837/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8838FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8839
8840
8841/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8842FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8843{
8844 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8846 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8847 {
8848 /* Register, memory. */
8849 IEM_MC_BEGIN(0, 2);
8850 IEM_MC_LOCAL(uint64_t, uSrc);
8851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8852
8853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8855 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8856 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8857
8858 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8859 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8860
8861 IEM_MC_ADVANCE_RIP();
8862 IEM_MC_END();
8863 return VINF_SUCCESS;
8864 }
8865 /* The register, register encoding is invalid. */
8866 return IEMOP_RAISE_INVALID_OPCODE();
8867}
8868
8869/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
8870FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
8871{
8872 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8873 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8874 {
8875 /* Register, memory. */
8876 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
8877 IEM_MC_BEGIN(0, 2);
8878 IEM_MC_LOCAL(RTUINT128U, uSrc);
8879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8880
8881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8883 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8884 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8885
8886 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8887 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8888
8889 IEM_MC_ADVANCE_RIP();
8890 IEM_MC_END();
8891 return VINF_SUCCESS;
8892 }
8893
8894 /* The register, register encoding is invalid. */
8895 return IEMOP_RAISE_INVALID_OPCODE();
8896}
8897
8898/* Opcode 0xf3 0x0f 0xe7 - invalid */
8899/* Opcode 0xf2 0x0f 0xe7 - invalid */
8900
8901
8902/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8903FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8904/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8905FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8906/* Opcode 0xf3 0x0f 0xe8 - invalid */
8907/* Opcode 0xf2 0x0f 0xe8 - invalid */
8908
8909/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8910FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8911/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8912FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8913/* Opcode 0xf3 0x0f 0xe9 - invalid */
8914/* Opcode 0xf2 0x0f 0xe9 - invalid */
8915
8916/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8917FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8918/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8919FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8920/* Opcode 0xf3 0x0f 0xea - invalid */
8921/* Opcode 0xf2 0x0f 0xea - invalid */
8922
8923/** Opcode 0x0f 0xeb - por Pq, Qq */
8924FNIEMOP_STUB(iemOp_por_Pq_Qq);
8925/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8926FNIEMOP_STUB(iemOp_por_Vx_W);
8927/* Opcode 0xf3 0x0f 0xeb - invalid */
8928/* Opcode 0xf2 0x0f 0xeb - invalid */
8929
8930/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8931FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8932/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8933FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8934/* Opcode 0xf3 0x0f 0xec - invalid */
8935/* Opcode 0xf2 0x0f 0xec - invalid */
8936
8937/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8938FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8939/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8940FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8941/* Opcode 0xf3 0x0f 0xed - invalid */
8942/* Opcode 0xf2 0x0f 0xed - invalid */
8943
8944/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8945FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8946/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8947FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8948/* Opcode 0xf3 0x0f 0xee - invalid */
8949/* Opcode 0xf2 0x0f 0xee - invalid */
8950
8951
8952/** Opcode 0x0f 0xef - pxor Pq, Qq */
8953FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8954{
8955 IEMOP_MNEMONIC(pxor, "pxor");
8956 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8957}
8958
8959/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8960FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8961{
8962 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8963 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8964}
8965
8966/* Opcode 0xf3 0x0f 0xef - invalid */
8967/* Opcode 0xf2 0x0f 0xef - invalid */
8968
8969/* Opcode 0x0f 0xf0 - invalid */
8970/* Opcode 0x66 0x0f 0xf0 - invalid */
8971/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8972FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8973
8974/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8975FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8976/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8977FNIEMOP_STUB(iemOp_psllw_Vx_W);
8978/* Opcode 0xf2 0x0f 0xf1 - invalid */
8979
8980/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8981FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8982/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8983FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8984/* Opcode 0xf2 0x0f 0xf2 - invalid */
8985
8986/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8987FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8988/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8989FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8990/* Opcode 0xf2 0x0f 0xf3 - invalid */
8991
8992/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8993FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8994/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8995FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8996/* Opcode 0xf2 0x0f 0xf4 - invalid */
8997
8998/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8999FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
9000/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
9001FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
9002/* Opcode 0xf2 0x0f 0xf5 - invalid */
9003
9004/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
9005FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
9006/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
9007FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
9008/* Opcode 0xf2 0x0f 0xf6 - invalid */
9009
9010/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
9011FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
9012/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
9013FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
9014/* Opcode 0xf2 0x0f 0xf7 - invalid */
9015
9016/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
9017FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
9018/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
9019FNIEMOP_STUB(iemOp_psubb_Vx_W);
9020/* Opcode 0xf2 0x0f 0xf8 - invalid */
9021
9022/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
9023FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
9024/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
9025FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
9026/* Opcode 0xf2 0x0f 0xf9 - invalid */
9027
9028/** Opcode 0x0f 0xfa - psubd Pq, Qq */
9029FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
9030/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9031FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9032/* Opcode 0xf2 0x0f 0xfa - invalid */
9033
9034/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9035FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9036/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9037FNIEMOP_STUB(iemOp_psubq_Vx_W);
9038/* Opcode 0xf2 0x0f 0xfb - invalid */
9039
9040/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9041FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9042/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9043FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9044/* Opcode 0xf2 0x0f 0xfc - invalid */
9045
9046/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9047FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9048/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9049FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9050/* Opcode 0xf2 0x0f 0xfd - invalid */
9051
9052/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9053FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9054/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9055FNIEMOP_STUB(iemOp_paddd_Vx_W);
9056/* Opcode 0xf2 0x0f 0xfe - invalid */
9057
9058
9059/** Opcode **** 0x0f 0xff - UD0 */
9060FNIEMOP_DEF(iemOp_ud0)
9061{
9062 IEMOP_MNEMONIC(ud0, "ud0");
9063 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9064 {
9065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9066#ifndef TST_IEM_CHECK_MC
9067 RTGCPTR GCPtrEff;
9068 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9069 if (rcStrict != VINF_SUCCESS)
9070 return rcStrict;
9071#endif
9072 IEMOP_HLP_DONE_DECODING();
9073 }
9074 return IEMOP_RAISE_INVALID_OPCODE();
9075}
9076
9077
9078
9079/**
9080 * Two byte opcode map, first byte 0x0f.
9081 *
9082 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9083 * check if it needs updating as well when making changes.
9084 */
9085IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9086{
9087 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9088 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9089 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9090 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9091 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9092 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9093 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9094 /* 0x06 */ IEMOP_X4(iemOp_clts),
9095 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9096 /* 0x08 */ IEMOP_X4(iemOp_invd),
9097 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9098 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9099 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9100 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9101 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9102 /* 0x0e */ IEMOP_X4(iemOp_femms),
9103 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9104
9105 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9106 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9107 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9108 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9109 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9110 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9111 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9112 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9113 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9114 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9115 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9116 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9117 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9118 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9119 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9120 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9121
9122 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9123 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9124 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9125 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9126 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9127 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9128 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9129 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9130 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9131 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9132 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9133 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9134 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9135 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9136 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9137 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9138
9139 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9140 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9141 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9142 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9143 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9144 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9145 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9146 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9147 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9148 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9149 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9150 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9151 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9152 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9153 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9154 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9155
9156 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9157 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9158 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9159 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9160 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9161 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9162 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9163 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9164 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9165 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9166 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9167 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9168 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9169 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9170 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9171 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9172
9173 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9174 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9175 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9176 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9177 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9178 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9179 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9180 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9181 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9182 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9183 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9184 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9185 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9186 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9187 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9188 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9189
9190 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9191 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9192 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9193 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9194 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9195 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9196 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9197 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9198 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9199 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9200 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9201 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9202 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9203 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9204 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9205 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
9206
9207 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9208 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9209 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9210 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9211 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9212 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9213 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9214 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9215
9216 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9217 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9218 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9219 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9220 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9221 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9222 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9223 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9224
9225 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9226 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9227 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9228 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9229 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9230 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9231 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9232 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9233 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9234 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9235 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9236 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9237 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9238 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9239 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9240 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9241
9242 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9243 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9244 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9245 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9246 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9247 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9248 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9249 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9250 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9251 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9252 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9253 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9254 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9255 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9256 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9257 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9258
9259 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9260 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9261 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9262 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9263 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9264 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9265 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9266 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9267 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9268 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9269 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9270 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9271 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9272 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9273 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9274 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9275
9276 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9277 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9278 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9279 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9280 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9281 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9282 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9283 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9284 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9285 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9286 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9287 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9288 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9289 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9290 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9291 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9292
9293 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9294 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9295 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9296 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9297 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9298 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9299 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9300 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9301 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9302 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9303 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9304 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9305 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9306 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9307 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9308 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9309
9310 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9311 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9312 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9313 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9314 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9315 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9316 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9317 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9318 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9319 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9320 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9321 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9322 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9323 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9324 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9325 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9326
9327 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9328 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9329 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9330 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9331 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9332 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9333 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9334 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9335 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9336 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9337 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9338 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9339 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9340 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9341 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9342 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9343
9344 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9345 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9346 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9347 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9348 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9349 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9350 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9351 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9352 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9353 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9354 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9355 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9356 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9357 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9358 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9359 /* 0xff */ IEMOP_X4(iemOp_ud0),
9360};
9361AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9362
9363/** @} */
9364
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette