VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 67008

Last change on this file since 67008 was 67008, checked in by vboxsync, 8 years ago

IEM: movdqa Vdq,Wdq (0x66 0x0f 0x6f) tests.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 327.2 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 67008 2017-05-22 11:56:11Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.215389.xyz. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
38 switch (pVCpu->iem.s.enmEffOpSize)
39 {
40 case IEMMODE_16BIT:
41 IEM_MC_BEGIN(0, 1);
42 IEM_MC_LOCAL(uint16_t, u16Ldtr);
43 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
44 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
45 IEM_MC_ADVANCE_RIP();
46 IEM_MC_END();
47 break;
48
49 case IEMMODE_32BIT:
50 IEM_MC_BEGIN(0, 1);
51 IEM_MC_LOCAL(uint32_t, u32Ldtr);
52 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
53 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
54 IEM_MC_ADVANCE_RIP();
55 IEM_MC_END();
56 break;
57
58 case IEMMODE_64BIT:
59 IEM_MC_BEGIN(0, 1);
60 IEM_MC_LOCAL(uint64_t, u64Ldtr);
61 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
62 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 break;
66
67 IEM_NOT_REACHED_DEFAULT_CASE_RET();
68 }
69 }
70 else
71 {
72 IEM_MC_BEGIN(0, 2);
73 IEM_MC_LOCAL(uint16_t, u16Ldtr);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
76 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
77 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
78 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
79 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/** Opcode 0x0f 0x00 /1. */
88FNIEMOPRM_DEF(iemOp_Grp6_str)
89{
90 IEMOP_MNEMONIC(str, "str Rv/Mw");
91 IEMOP_HLP_MIN_286();
92 IEMOP_HLP_NO_REAL_OR_V86_MODE();
93
94 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
95 {
96 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
97 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
98 switch (pVCpu->iem.s.enmEffOpSize)
99 {
100 case IEMMODE_16BIT:
101 IEM_MC_BEGIN(0, 1);
102 IEM_MC_LOCAL(uint16_t, u16Tr);
103 IEM_MC_FETCH_TR_U16(u16Tr);
104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
105 IEM_MC_ADVANCE_RIP();
106 IEM_MC_END();
107 break;
108
109 case IEMMODE_32BIT:
110 IEM_MC_BEGIN(0, 1);
111 IEM_MC_LOCAL(uint32_t, u32Tr);
112 IEM_MC_FETCH_TR_U32(u32Tr);
113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
114 IEM_MC_ADVANCE_RIP();
115 IEM_MC_END();
116 break;
117
118 case IEMMODE_64BIT:
119 IEM_MC_BEGIN(0, 1);
120 IEM_MC_LOCAL(uint64_t, u64Tr);
121 IEM_MC_FETCH_TR_U64(u64Tr);
122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 break;
126
127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
128 }
129 }
130 else
131 {
132 IEM_MC_BEGIN(0, 2);
133 IEM_MC_LOCAL(uint16_t, u16Tr);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
136 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
137 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
138 IEM_MC_FETCH_TR_U16(u16Tr);
139 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
140 IEM_MC_ADVANCE_RIP();
141 IEM_MC_END();
142 }
143 return VINF_SUCCESS;
144}
145
146
147/** Opcode 0x0f 0x00 /2. */
148FNIEMOPRM_DEF(iemOp_Grp6_lldt)
149{
150 IEMOP_MNEMONIC(lldt, "lldt Ew");
151 IEMOP_HLP_MIN_286();
152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
153
154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
155 {
156 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
157 IEM_MC_BEGIN(1, 0);
158 IEM_MC_ARG(uint16_t, u16Sel, 0);
159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
161 IEM_MC_END();
162 }
163 else
164 {
165 IEM_MC_BEGIN(1, 1);
166 IEM_MC_ARG(uint16_t, u16Sel, 0);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
169 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
170 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
171 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
172 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
173 IEM_MC_END();
174 }
175 return VINF_SUCCESS;
176}
177
178
179/** Opcode 0x0f 0x00 /3. */
180FNIEMOPRM_DEF(iemOp_Grp6_ltr)
181{
182 IEMOP_MNEMONIC(ltr, "ltr Ew");
183 IEMOP_HLP_MIN_286();
184 IEMOP_HLP_NO_REAL_OR_V86_MODE();
185
186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
187 {
188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
189 IEM_MC_BEGIN(1, 0);
190 IEM_MC_ARG(uint16_t, u16Sel, 0);
191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
192 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
193 IEM_MC_END();
194 }
195 else
196 {
197 IEM_MC_BEGIN(1, 1);
198 IEM_MC_ARG(uint16_t, u16Sel, 0);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
202 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
203 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
204 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
205 IEM_MC_END();
206 }
207 return VINF_SUCCESS;
208}
209
210
211/** Opcode 0x0f 0x00 /3. */
212FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
213{
214 IEMOP_HLP_MIN_286();
215 IEMOP_HLP_NO_REAL_OR_V86_MODE();
216
217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
218 {
219 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
220 IEM_MC_BEGIN(2, 0);
221 IEM_MC_ARG(uint16_t, u16Sel, 0);
222 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
223 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
224 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
225 IEM_MC_END();
226 }
227 else
228 {
229 IEM_MC_BEGIN(2, 1);
230 IEM_MC_ARG(uint16_t, u16Sel, 0);
231 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
234 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
235 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
236 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
237 IEM_MC_END();
238 }
239 return VINF_SUCCESS;
240}
241
242
243/** Opcode 0x0f 0x00 /4. */
244FNIEMOPRM_DEF(iemOp_Grp6_verr)
245{
246 IEMOP_MNEMONIC(verr, "verr Ew");
247 IEMOP_HLP_MIN_286();
248 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
249}
250
251
252/** Opcode 0x0f 0x00 /5. */
253FNIEMOPRM_DEF(iemOp_Grp6_verw)
254{
255 IEMOP_MNEMONIC(verw, "verw Ew");
256 IEMOP_HLP_MIN_286();
257 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
258}
259
260
261/**
262 * Group 6 jump table.
263 */
264IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
265{
266 iemOp_Grp6_sldt,
267 iemOp_Grp6_str,
268 iemOp_Grp6_lldt,
269 iemOp_Grp6_ltr,
270 iemOp_Grp6_verr,
271 iemOp_Grp6_verw,
272 iemOp_InvalidWithRM,
273 iemOp_InvalidWithRM
274};
275
276/** Opcode 0x0f 0x00. */
277FNIEMOP_DEF(iemOp_Grp6)
278{
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
280 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
281}
282
283
284/** Opcode 0x0f 0x01 /0. */
285FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
286{
287 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
288 IEMOP_HLP_MIN_286();
289 IEMOP_HLP_64BIT_OP_SIZE();
290 IEM_MC_BEGIN(2, 1);
291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
292 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
295 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
296 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
297 IEM_MC_END();
298 return VINF_SUCCESS;
299}
300
301
302/** Opcode 0x0f 0x01 /0. */
303FNIEMOP_DEF(iemOp_Grp7_vmcall)
304{
305 IEMOP_BITCH_ABOUT_STUB();
306 return IEMOP_RAISE_INVALID_OPCODE();
307}
308
309
310/** Opcode 0x0f 0x01 /0. */
311FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
312{
313 IEMOP_BITCH_ABOUT_STUB();
314 return IEMOP_RAISE_INVALID_OPCODE();
315}
316
317
318/** Opcode 0x0f 0x01 /0. */
319FNIEMOP_DEF(iemOp_Grp7_vmresume)
320{
321 IEMOP_BITCH_ABOUT_STUB();
322 return IEMOP_RAISE_INVALID_OPCODE();
323}
324
325
326/** Opcode 0x0f 0x01 /0. */
327FNIEMOP_DEF(iemOp_Grp7_vmxoff)
328{
329 IEMOP_BITCH_ABOUT_STUB();
330 return IEMOP_RAISE_INVALID_OPCODE();
331}
332
333
334/** Opcode 0x0f 0x01 /1. */
335FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
336{
337 IEMOP_MNEMONIC(sidt, "sidt Ms");
338 IEMOP_HLP_MIN_286();
339 IEMOP_HLP_64BIT_OP_SIZE();
340 IEM_MC_BEGIN(2, 1);
341 IEM_MC_ARG(uint8_t, iEffSeg, 0);
342 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
345 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
346 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
347 IEM_MC_END();
348 return VINF_SUCCESS;
349}
350
351
352/** Opcode 0x0f 0x01 /1. */
353FNIEMOP_DEF(iemOp_Grp7_monitor)
354{
355 IEMOP_MNEMONIC(monitor, "monitor");
356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
357 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
358}
359
360
361/** Opcode 0x0f 0x01 /1. */
362FNIEMOP_DEF(iemOp_Grp7_mwait)
363{
364 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
367}
368
369
370/** Opcode 0x0f 0x01 /2. */
371FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
372{
373 IEMOP_MNEMONIC(lgdt, "lgdt");
374 IEMOP_HLP_64BIT_OP_SIZE();
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd0. */
389FNIEMOP_DEF(iemOp_Grp7_xgetbv)
390{
391 IEMOP_MNEMONIC(xgetbv, "xgetbv");
392 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
393 {
394 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
396 }
397 return IEMOP_RAISE_INVALID_OPCODE();
398}
399
400
401/** Opcode 0x0f 0x01 0xd1. */
402FNIEMOP_DEF(iemOp_Grp7_xsetbv)
403{
404 IEMOP_MNEMONIC(xsetbv, "xsetbv");
405 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
406 {
407 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
408 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
409 }
410 return IEMOP_RAISE_INVALID_OPCODE();
411}
412
413
414/** Opcode 0x0f 0x01 /3. */
415FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
416{
417 IEMOP_MNEMONIC(lidt, "lidt");
418 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
419 ? IEMMODE_64BIT
420 : pVCpu->iem.s.enmEffOpSize;
421 IEM_MC_BEGIN(3, 1);
422 IEM_MC_ARG(uint8_t, iEffSeg, 0);
423 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
424 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
428 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
429 IEM_MC_END();
430 return VINF_SUCCESS;
431}
432
433
434#ifdef VBOX_WITH_NESTED_HWVIRT
435/** Opcode 0x0f 0x01 0xd8. */
436FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
437{
438 IEMOP_MNEMONIC(vmrun, "vmrun");
439 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
440}
441
442/** Opcode 0x0f 0x01 0xd9. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
444{
445 IEMOP_MNEMONIC(vmmcall, "vmmcall");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
447}
448
449
450/** Opcode 0x0f 0x01 0xda. */
451FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
452{
453 IEMOP_MNEMONIC(vmload, "vmload");
454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
455}
456
457
458/** Opcode 0x0f 0x01 0xdb. */
459FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
460{
461 IEMOP_MNEMONIC(vmsave, "vmsave");
462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
463}
464
465
466/** Opcode 0x0f 0x01 0xdc. */
467FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
468{
469 IEMOP_MNEMONIC(stgi, "stgi");
470 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
471}
472
473
474/** Opcode 0x0f 0x01 0xdd. */
475FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
476{
477 IEMOP_MNEMONIC(clgi, "clgi");
478 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
479}
480
481
482/** Opcode 0x0f 0x01 0xdf. */
483FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
484{
485 IEMOP_MNEMONIC(invlpga, "invlpga");
486 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
487}
488
489
490/** Opcode 0x0f 0x01 0xde. */
491FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
492{
493 IEMOP_MNEMONIC(skinit, "skinit");
494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
495}
496#else
497/** Opcode 0x0f 0x01 0xd8. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
499
500/** Opcode 0x0f 0x01 0xd9. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
502
503/** Opcode 0x0f 0x01 0xda. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
505
506/** Opcode 0x0f 0x01 0xdb. */
507FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
508
509/** Opcode 0x0f 0x01 0xdc. */
510FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
511
512/** Opcode 0x0f 0x01 0xdd. */
513FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
514
515/** Opcode 0x0f 0x01 0xdf. */
516FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520#endif /* VBOX_WITH_NESTED_HWVIRT */
521
522/** Opcode 0x0f 0x01 /4. */
523FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
524{
525 IEMOP_MNEMONIC(smsw, "smsw");
526 IEMOP_HLP_MIN_286();
527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
528 {
529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
530 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
531 switch (pVCpu->iem.s.enmEffOpSize)
532 {
533 case IEMMODE_16BIT:
534 IEM_MC_BEGIN(0, 1);
535 IEM_MC_LOCAL(uint16_t, u16Tmp);
536 IEM_MC_FETCH_CR0_U16(u16Tmp);
537 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
538 { /* likely */ }
539 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
541 else
542 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
543 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
544 IEM_MC_ADVANCE_RIP();
545 IEM_MC_END();
546 return VINF_SUCCESS;
547
548 case IEMMODE_32BIT:
549 IEM_MC_BEGIN(0, 1);
550 IEM_MC_LOCAL(uint32_t, u32Tmp);
551 IEM_MC_FETCH_CR0_U32(u32Tmp);
552 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 return VINF_SUCCESS;
556
557 case IEMMODE_64BIT:
558 IEM_MC_BEGIN(0, 1);
559 IEM_MC_LOCAL(uint64_t, u64Tmp);
560 IEM_MC_FETCH_CR0_U64(u64Tmp);
561 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
562 IEM_MC_ADVANCE_RIP();
563 IEM_MC_END();
564 return VINF_SUCCESS;
565
566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
567 }
568 }
569 else
570 {
571 /* Ignore operand size here, memory refs are always 16-bit. */
572 IEM_MC_BEGIN(0, 2);
573 IEM_MC_LOCAL(uint16_t, u16Tmp);
574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
578 IEM_MC_FETCH_CR0_U16(u16Tmp);
579 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
580 { /* likely */ }
581 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
582 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
583 else
584 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
585 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
586 IEM_MC_ADVANCE_RIP();
587 IEM_MC_END();
588 return VINF_SUCCESS;
589 }
590}
591
592
593/** Opcode 0x0f 0x01 /6. */
594FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
595{
596 /* The operand size is effectively ignored, all is 16-bit and only the
597 lower 3-bits are used. */
598 IEMOP_MNEMONIC(lmsw, "lmsw");
599 IEMOP_HLP_MIN_286();
600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
601 {
602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
603 IEM_MC_BEGIN(1, 0);
604 IEM_MC_ARG(uint16_t, u16Tmp, 0);
605 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
606 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
607 IEM_MC_END();
608 }
609 else
610 {
611 IEM_MC_BEGIN(1, 1);
612 IEM_MC_ARG(uint16_t, u16Tmp, 0);
613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
617 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
618 IEM_MC_END();
619 }
620 return VINF_SUCCESS;
621}
622
623
624/** Opcode 0x0f 0x01 /7. */
625FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
626{
627 IEMOP_MNEMONIC(invlpg, "invlpg");
628 IEMOP_HLP_MIN_486();
629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
630 IEM_MC_BEGIN(1, 1);
631 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
633 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
634 IEM_MC_END();
635 return VINF_SUCCESS;
636}
637
638
639/** Opcode 0x0f 0x01 /7. */
640FNIEMOP_DEF(iemOp_Grp7_swapgs)
641{
642 IEMOP_MNEMONIC(swapgs, "swapgs");
643 IEMOP_HLP_ONLY_64BIT();
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
646}
647
648
649/** Opcode 0x0f 0x01 /7. */
650FNIEMOP_DEF(iemOp_Grp7_rdtscp)
651{
652 IEMOP_MNEMONIC(rdtscp, "rdtscp");
653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
654 /** @todo SVM intercept removal from here. */
655 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RDTSCP, SVM_EXIT_RDTSCP, 0, 0);
656 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
657}
658
659
660/**
661 * Group 7 jump table, memory variant.
662 */
663IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
664{
665 iemOp_Grp7_sgdt,
666 iemOp_Grp7_sidt,
667 iemOp_Grp7_lgdt,
668 iemOp_Grp7_lidt,
669 iemOp_Grp7_smsw,
670 iemOp_InvalidWithRM,
671 iemOp_Grp7_lmsw,
672 iemOp_Grp7_invlpg
673};
674
675
676/** Opcode 0x0f 0x01. */
677FNIEMOP_DEF(iemOp_Grp7)
678{
679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
680 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
681 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
682
683 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
684 {
685 case 0:
686 switch (bRm & X86_MODRM_RM_MASK)
687 {
688 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
689 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
690 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
691 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 1:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
700 }
701 return IEMOP_RAISE_INVALID_OPCODE();
702
703 case 2:
704 switch (bRm & X86_MODRM_RM_MASK)
705 {
706 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
707 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
708 }
709 return IEMOP_RAISE_INVALID_OPCODE();
710
711 case 3:
712 switch (bRm & X86_MODRM_RM_MASK)
713 {
714 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
715 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
716 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
717 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
718 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
719 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
720 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
721 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
723 }
724
725 case 4:
726 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
727
728 case 5:
729 return IEMOP_RAISE_INVALID_OPCODE();
730
731 case 6:
732 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
733
734 case 7:
735 switch (bRm & X86_MODRM_RM_MASK)
736 {
737 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
738 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
739 }
740 return IEMOP_RAISE_INVALID_OPCODE();
741
742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
743 }
744}
745
746/** Opcode 0x0f 0x00 /3. */
747FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
748{
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
751
752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
753 {
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 switch (pVCpu->iem.s.enmEffOpSize)
756 {
757 case IEMMODE_16BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 case IEMMODE_32BIT:
773 case IEMMODE_64BIT:
774 {
775 IEM_MC_BEGIN(3, 0);
776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
777 IEM_MC_ARG(uint16_t, u16Sel, 1);
778 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
779
780 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
781 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
782 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
783
784 IEM_MC_END();
785 return VINF_SUCCESS;
786 }
787
788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
789 }
790 }
791 else
792 {
793 switch (pVCpu->iem.s.enmEffOpSize)
794 {
795 case IEMMODE_16BIT:
796 {
797 IEM_MC_BEGIN(3, 1);
798 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
799 IEM_MC_ARG(uint16_t, u16Sel, 1);
800 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
802
803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
804 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
805
806 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
807 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
808 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
809
810 IEM_MC_END();
811 return VINF_SUCCESS;
812 }
813
814 case IEMMODE_32BIT:
815 case IEMMODE_64BIT:
816 {
817 IEM_MC_BEGIN(3, 1);
818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 1);
820 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
822
823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
824 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
825/** @todo testcase: make sure it's a 16-bit read. */
826
827 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
828 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
829 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
830
831 IEM_MC_END();
832 return VINF_SUCCESS;
833 }
834
835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
836 }
837 }
838}
839
840
841
842/** Opcode 0x0f 0x02. */
843FNIEMOP_DEF(iemOp_lar_Gv_Ew)
844{
845 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
846 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
847}
848
849
850/** Opcode 0x0f 0x03. */
851FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
852{
853 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
854 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
855}
856
857
858/** Opcode 0x0f 0x05. */
859FNIEMOP_DEF(iemOp_syscall)
860{
861 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
863 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
864}
865
866
867/** Opcode 0x0f 0x06. */
868FNIEMOP_DEF(iemOp_clts)
869{
870 IEMOP_MNEMONIC(clts, "clts");
871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
872 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
873}
874
875
876/** Opcode 0x0f 0x07. */
877FNIEMOP_DEF(iemOp_sysret)
878{
879 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
881 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
882}
883
884
885/** Opcode 0x0f 0x08. */
886FNIEMOP_DEF(iemOp_invd)
887{
888 IEMOP_MNEMONIC(invd, "invd");
889#ifdef VBOX_WITH_NESTED_HWVIRT
890 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
891 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
892#else
893 RT_NOREF_PV(pVCpu);
894#endif
895 /** @todo implement invd for the regular case (above only handles nested SVM
896 * exits). */
897 IEMOP_BITCH_ABOUT_STUB();
898 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
899}
900
901// IEMOP_HLP_MIN_486();
902
903
904/** Opcode 0x0f 0x09. */
905FNIEMOP_DEF(iemOp_wbinvd)
906{
907 IEMOP_MNEMONIC(wbinvd, "wbinvd");
908 IEMOP_HLP_MIN_486();
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
910 IEM_MC_BEGIN(0, 0);
911 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
912 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
913 IEM_MC_ADVANCE_RIP();
914 IEM_MC_END();
915 return VINF_SUCCESS; /* ignore for now */
916}
917
918
919/** Opcode 0x0f 0x0b. */
920FNIEMOP_DEF(iemOp_ud2)
921{
922 IEMOP_MNEMONIC(ud2, "ud2");
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926/** Opcode 0x0f 0x0d. */
927FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
928{
929 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
931 {
932 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
933 return IEMOP_RAISE_INVALID_OPCODE();
934 }
935
936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
938 {
939 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
940 return IEMOP_RAISE_INVALID_OPCODE();
941 }
942
943 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
944 {
945 case 2: /* Aliased to /0 for the time being. */
946 case 4: /* Aliased to /0 for the time being. */
947 case 5: /* Aliased to /0 for the time being. */
948 case 6: /* Aliased to /0 for the time being. */
949 case 7: /* Aliased to /0 for the time being. */
950 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
951 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
952 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
954 }
955
956 IEM_MC_BEGIN(0, 1);
957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
960 /* Currently a NOP. */
961 NOREF(GCPtrEffSrc);
962 IEM_MC_ADVANCE_RIP();
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x0e. */
969FNIEMOP_STUB(iemOp_femms);
970
971
972/** Opcode 0x0f 0x0f. */
973FNIEMOP_DEF(iemOp_3Dnow)
974{
975 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
976 {
977 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
978 return IEMOP_RAISE_INVALID_OPCODE();
979 }
980
981#ifdef IEM_WITH_3DNOW
982 /* This is pretty sparse, use switch instead of table. */
983 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
984 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
985#else
986 IEMOP_BITCH_ABOUT_STUB();
987 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
988#endif
989}
990
991
992/**
993 * @opcode 0x10
994 * @oppfx none
995 * @opcpuid sse
996 * @opgroup og_sse_simdfp_datamove
997 * @opxcpttype 4UA
998 * @optest op1=1 op2=2 -> op1=2
999 * @optest op1=0 op2=-22 -> op1=-22
1000 */
1001FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1002{
1003 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1006 {
1007 /*
1008 * Register, register.
1009 */
1010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1011 IEM_MC_BEGIN(0, 0);
1012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1013 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1014 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1015 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1016 IEM_MC_ADVANCE_RIP();
1017 IEM_MC_END();
1018 }
1019 else
1020 {
1021 /*
1022 * Memory, register.
1023 */
1024 IEM_MC_BEGIN(0, 2);
1025 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1027
1028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1032
1033 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1034 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1035
1036 IEM_MC_ADVANCE_RIP();
1037 IEM_MC_END();
1038 }
1039 return VINF_SUCCESS;
1040
1041}
1042
1043
1044/**
1045 * @opcode 0x10
1046 * @oppfx 0x66
1047 * @opcpuid sse2
1048 * @opgroup og_sse2_pcksclr_datamove
1049 * @opxcpttype 4UA
1050 * @optest op1=1 op2=2 -> op1=2
1051 * @optest op1=0 op2=-42 -> op1=-42
1052 */
1053FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1054{
1055 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1058 {
1059 /*
1060 * Register, register.
1061 */
1062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1063 IEM_MC_BEGIN(0, 0);
1064 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1066 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1067 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1068 IEM_MC_ADVANCE_RIP();
1069 IEM_MC_END();
1070 }
1071 else
1072 {
1073 /*
1074 * Memory, register.
1075 */
1076 IEM_MC_BEGIN(0, 2);
1077 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1079
1080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1083 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1084
1085 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1086 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1087
1088 IEM_MC_ADVANCE_RIP();
1089 IEM_MC_END();
1090 }
1091 return VINF_SUCCESS;
1092}
1093
1094
1095/**
1096 * @opcode 0x10
1097 * @oppfx 0xf3
1098 * @opcpuid sse
1099 * @opgroup og_sse_simdfp_datamove
1100 * @opxcpttype 5
1101 * @optest op1=1 op2=2 -> op1=2
1102 * @optest op1=0 op2=-22 -> op1=-22
1103 */
1104FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1105{
1106 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1109 {
1110 /*
1111 * Register, register.
1112 */
1113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1114 IEM_MC_BEGIN(0, 1);
1115 IEM_MC_LOCAL(uint32_t, uSrc);
1116
1117 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1118 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1119 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1120 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 else
1126 {
1127 /*
1128 * Memory, register.
1129 */
1130 IEM_MC_BEGIN(0, 2);
1131 IEM_MC_LOCAL(uint32_t, uSrc);
1132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1133
1134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1136 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1138
1139 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1140 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1141
1142 IEM_MC_ADVANCE_RIP();
1143 IEM_MC_END();
1144 }
1145 return VINF_SUCCESS;
1146}
1147
1148
1149/**
1150 * @opcode 0x10
1151 * @oppfx 0xf2
1152 * @opcpuid sse2
1153 * @opgroup og_sse2_pcksclr_datamove
1154 * @opxcpttype 5
1155 * @optest op1=1 op2=2 -> op1=2
1156 * @optest op1=0 op2=-42 -> op1=-42
1157 */
1158FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1159{
1160 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1163 {
1164 /*
1165 * Register, register.
1166 */
1167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1168 IEM_MC_BEGIN(0, 1);
1169 IEM_MC_LOCAL(uint64_t, uSrc);
1170
1171 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1172 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1173 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1174 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1175
1176 IEM_MC_ADVANCE_RIP();
1177 IEM_MC_END();
1178 }
1179 else
1180 {
1181 /*
1182 * Memory, register.
1183 */
1184 IEM_MC_BEGIN(0, 2);
1185 IEM_MC_LOCAL(uint64_t, uSrc);
1186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1187
1188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1190 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1191 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1192
1193 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1194 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1195
1196 IEM_MC_ADVANCE_RIP();
1197 IEM_MC_END();
1198 }
1199 return VINF_SUCCESS;
1200}
1201
1202
1203/**
1204 * @opcode 0x11
1205 * @oppfx none
1206 * @opcpuid sse
1207 * @opgroup og_sse_simdfp_datamove
1208 * @opxcpttype 4UA
1209 * @optest op1=1 op2=2 -> op1=2
1210 * @optest op1=0 op2=-42 -> op1=-42
1211 */
1212FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1213{
1214 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1217 {
1218 /*
1219 * Register, register.
1220 */
1221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1222 IEM_MC_BEGIN(0, 0);
1223 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1224 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1225 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1226 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1227 IEM_MC_ADVANCE_RIP();
1228 IEM_MC_END();
1229 }
1230 else
1231 {
1232 /*
1233 * Memory, register.
1234 */
1235 IEM_MC_BEGIN(0, 2);
1236 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1238
1239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1243
1244 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1245 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1246
1247 IEM_MC_ADVANCE_RIP();
1248 IEM_MC_END();
1249 }
1250 return VINF_SUCCESS;
1251}
1252
1253
1254/**
1255 * @opcode 0x11
1256 * @oppfx 0x66
1257 * @opcpuid sse2
1258 * @opgroup og_sse2_pcksclr_datamove
1259 * @opxcpttype 4UA
1260 * @optest op1=1 op2=2 -> op1=2
1261 * @optest op1=0 op2=-42 -> op1=-42
1262 */
1263FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1264{
1265 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1268 {
1269 /*
1270 * Register, register.
1271 */
1272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1273 IEM_MC_BEGIN(0, 0);
1274 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1275 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1276 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1277 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1278 IEM_MC_ADVANCE_RIP();
1279 IEM_MC_END();
1280 }
1281 else
1282 {
1283 /*
1284 * Memory, register.
1285 */
1286 IEM_MC_BEGIN(0, 2);
1287 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1289
1290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1292 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1294
1295 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1296 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1297
1298 IEM_MC_ADVANCE_RIP();
1299 IEM_MC_END();
1300 }
1301 return VINF_SUCCESS;
1302}
1303
1304
1305/**
1306 * @opcode 0x11
1307 * @oppfx 0xf3
1308 * @opcpuid sse
1309 * @opgroup og_sse_simdfp_datamove
1310 * @opxcpttype 5
1311 * @optest op1=1 op2=2 -> op1=2
1312 * @optest op1=0 op2=-22 -> op1=-22
1313 */
1314FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1315{
1316 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1319 {
1320 /*
1321 * Register, register.
1322 */
1323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1324 IEM_MC_BEGIN(0, 1);
1325 IEM_MC_LOCAL(uint32_t, uSrc);
1326
1327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1328 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1329 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1330 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1331
1332 IEM_MC_ADVANCE_RIP();
1333 IEM_MC_END();
1334 }
1335 else
1336 {
1337 /*
1338 * Memory, register.
1339 */
1340 IEM_MC_BEGIN(0, 2);
1341 IEM_MC_LOCAL(uint32_t, uSrc);
1342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1343
1344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1346 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1348
1349 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1350 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1351
1352 IEM_MC_ADVANCE_RIP();
1353 IEM_MC_END();
1354 }
1355 return VINF_SUCCESS;
1356}
1357
1358
1359/**
1360 * @opcode 0x11
1361 * @oppfx 0xf2
1362 * @opcpuid sse2
1363 * @opgroup og_sse2_pcksclr_datamove
1364 * @opxcpttype 5
1365 * @optest op1=1 op2=2 -> op1=2
1366 * @optest op1=0 op2=-42 -> op1=-42
1367 */
1368FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1369{
1370 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1373 {
1374 /*
1375 * Register, register.
1376 */
1377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1378 IEM_MC_BEGIN(0, 1);
1379 IEM_MC_LOCAL(uint64_t, uSrc);
1380
1381 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1382 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1383 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1384 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1385
1386 IEM_MC_ADVANCE_RIP();
1387 IEM_MC_END();
1388 }
1389 else
1390 {
1391 /*
1392 * Memory, register.
1393 */
1394 IEM_MC_BEGIN(0, 2);
1395 IEM_MC_LOCAL(uint64_t, uSrc);
1396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1397
1398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1400 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1402
1403 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1404 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1405
1406 IEM_MC_ADVANCE_RIP();
1407 IEM_MC_END();
1408 }
1409 return VINF_SUCCESS;
1410}
1411
1412
1413FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1414{
1415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1416 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1417 {
1418 /**
1419 * @opcode 0x12
1420 * @opcodesub 11 mr/reg
1421 * @oppfx none
1422 * @opcpuid sse
1423 * @opgroup og_sse_simdfp_datamove
1424 * @opxcpttype 5
1425 * @optest op1=1 op2=2 -> op1=2
1426 * @optest op1=0 op2=-42 -> op1=-42
1427 */
1428 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1429
1430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1431 IEM_MC_BEGIN(0, 1);
1432 IEM_MC_LOCAL(uint64_t, uSrc);
1433
1434 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1436 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1437 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1438
1439 IEM_MC_ADVANCE_RIP();
1440 IEM_MC_END();
1441 }
1442 else
1443 {
1444 /**
1445 * @opdone
1446 * @opcode 0x12
1447 * @opcodesub !11 mr/reg
1448 * @oppfx none
1449 * @opcpuid sse
1450 * @opgroup og_sse_simdfp_datamove
1451 * @opxcpttype 5
1452 * @optest op1=1 op2=2 -> op1=2
1453 * @optest op1=0 op2=-42 -> op1=-42
1454 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1455 */
1456 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1457
1458 IEM_MC_BEGIN(0, 2);
1459 IEM_MC_LOCAL(uint64_t, uSrc);
1460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1461
1462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1466
1467 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1468 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1469
1470 IEM_MC_ADVANCE_RIP();
1471 IEM_MC_END();
1472 }
1473 return VINF_SUCCESS;
1474}
1475
1476
1477/**
1478 * @opcode 0x12
1479 * @opcodesub !11 mr/reg
1480 * @oppfx 0x66
1481 * @opcpuid sse2
1482 * @opgroup og_sse2_pcksclr_datamove
1483 * @opxcpttype 5
1484 * @optest op1=1 op2=2 -> op1=2
1485 * @optest op1=0 op2=-42 -> op1=-42
1486 */
1487FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1488{
1489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1490 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1491 {
1492 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1493
1494 IEM_MC_BEGIN(0, 2);
1495 IEM_MC_LOCAL(uint64_t, uSrc);
1496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1497
1498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1502
1503 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1504 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1505
1506 IEM_MC_ADVANCE_RIP();
1507 IEM_MC_END();
1508 return VINF_SUCCESS;
1509 }
1510
1511 /**
1512 * @opdone
1513 * @opmnemonic ud660f12m3
1514 * @opcode 0x12
1515 * @opcodesub 11 mr/reg
1516 * @oppfx 0x66
1517 * @opunused immediate
1518 * @opcpuid sse
1519 * @optest ->
1520 */
1521 return IEMOP_RAISE_INVALID_OPCODE();
1522}
1523
1524
1525/**
1526 * @opcode 0x12
1527 * @oppfx 0xf3
1528 * @opcpuid sse3
1529 * @opgroup og_sse3_pcksclr_datamove
1530 * @opxcpttype 4
1531 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1532 * op1=0x00000002000000020000000100000001
1533 */
1534FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1535{
1536 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1538 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1539 {
1540 /*
1541 * Register, register.
1542 */
1543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1544 IEM_MC_BEGIN(2, 0);
1545 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1546 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1547
1548 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1549 IEM_MC_PREPARE_SSE_USAGE();
1550
1551 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1552 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1553 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1554
1555 IEM_MC_ADVANCE_RIP();
1556 IEM_MC_END();
1557 }
1558 else
1559 {
1560 /*
1561 * Register, memory.
1562 */
1563 IEM_MC_BEGIN(2, 2);
1564 IEM_MC_LOCAL(RTUINT128U, uSrc);
1565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1566 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1567 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1568
1569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1571 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1572 IEM_MC_PREPARE_SSE_USAGE();
1573
1574 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1575 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1576 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1577
1578 IEM_MC_ADVANCE_RIP();
1579 IEM_MC_END();
1580 }
1581 return VINF_SUCCESS;
1582}
1583
1584
1585/**
1586 * @opcode 0x12
1587 * @oppfx 0xf2
1588 * @opcpuid sse3
1589 * @opgroup og_sse3_pcksclr_datamove
1590 * @opxcpttype 5
1591 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1592 * op1=0x22222222111111112222222211111111
1593 */
1594FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1595{
1596 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1599 {
1600 /*
1601 * Register, register.
1602 */
1603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1604 IEM_MC_BEGIN(2, 0);
1605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1606 IEM_MC_ARG(uint64_t, uSrc, 1);
1607
1608 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1609 IEM_MC_PREPARE_SSE_USAGE();
1610
1611 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1612 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1613 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1614
1615 IEM_MC_ADVANCE_RIP();
1616 IEM_MC_END();
1617 }
1618 else
1619 {
1620 /*
1621 * Register, memory.
1622 */
1623 IEM_MC_BEGIN(2, 2);
1624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1625 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1626 IEM_MC_ARG(uint64_t, uSrc, 1);
1627
1628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1630 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1631 IEM_MC_PREPARE_SSE_USAGE();
1632
1633 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1634 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1635 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1636
1637 IEM_MC_ADVANCE_RIP();
1638 IEM_MC_END();
1639 }
1640 return VINF_SUCCESS;
1641}
1642
1643
1644/**
1645 * @opcode 0x13
1646 * @opcodesub !11 mr/reg
1647 * @oppfx none
1648 * @opcpuid sse
1649 * @opgroup og_sse_simdfp_datamove
1650 * @opxcpttype 5
1651 * @optest op1=1 op2=2 -> op1=2
1652 * @optest op1=0 op2=-42 -> op1=-42
1653 */
1654FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1655{
1656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1657 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1658 {
1659 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1660
1661 IEM_MC_BEGIN(0, 2);
1662 IEM_MC_LOCAL(uint64_t, uSrc);
1663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1664
1665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1667 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1668 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1669
1670 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1671 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1672
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 return VINF_SUCCESS;
1676 }
1677
1678 /**
1679 * @opdone
1680 * @opmnemonic ud0f13m3
1681 * @opcode 0x13
1682 * @opcodesub 11 mr/reg
1683 * @oppfx none
1684 * @opunused immediate
1685 * @opcpuid sse
1686 * @optest ->
1687 */
1688 return IEMOP_RAISE_INVALID_OPCODE();
1689}
1690
1691
1692/**
1693 * @opcode 0x13
1694 * @opcodesub !11 mr/reg
1695 * @oppfx 0x66
1696 * @opcpuid sse2
1697 * @opgroup og_sse2_pcksclr_datamove
1698 * @opxcpttype 5
1699 * @optest op1=1 op2=2 -> op1=2
1700 * @optest op1=0 op2=-42 -> op1=-42
1701 */
1702FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1703{
1704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1705 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1706 {
1707 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1708 IEM_MC_BEGIN(0, 2);
1709 IEM_MC_LOCAL(uint64_t, uSrc);
1710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1711
1712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1714 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1715 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1716
1717 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1718 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1719
1720 IEM_MC_ADVANCE_RIP();
1721 IEM_MC_END();
1722 return VINF_SUCCESS;
1723 }
1724
1725 /**
1726 * @opdone
1727 * @opmnemonic ud660f13m3
1728 * @opcode 0x13
1729 * @opcodesub 11 mr/reg
1730 * @oppfx 0x66
1731 * @opunused immediate
1732 * @opcpuid sse
1733 * @optest ->
1734 */
1735 return IEMOP_RAISE_INVALID_OPCODE();
1736}
1737
1738
1739/**
1740 * @opmnemonic udf30f13
1741 * @opcode 0x13
1742 * @oppfx 0xf3
1743 * @opunused intel-modrm
1744 * @opcpuid sse
1745 * @optest ->
1746 * @opdone
1747 */
1748
1749/**
1750 * @opmnemonic udf20f13
1751 * @opcode 0x13
1752 * @oppfx 0xf2
1753 * @opunused intel-modrm
1754 * @opcpuid sse
1755 * @optest ->
1756 * @opdone
1757 */
1758
1759/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1760FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1761/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1762FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1763
1764/**
1765 * @opdone
1766 * @opmnemonic udf30f14
1767 * @opcode 0x14
1768 * @oppfx 0xf3
1769 * @opunused intel-modrm
1770 * @opcpuid sse
1771 * @optest ->
1772 * @opdone
1773 */
1774
1775/**
1776 * @opmnemonic udf20f14
1777 * @opcode 0x14
1778 * @oppfx 0xf2
1779 * @opunused intel-modrm
1780 * @opcpuid sse
1781 * @optest ->
1782 * @opdone
1783 */
1784
1785/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1786FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1787/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1788FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1789/* Opcode 0xf3 0x0f 0x15 - invalid */
1790/* Opcode 0xf2 0x0f 0x15 - invalid */
1791
1792/**
1793 * @opdone
1794 * @opmnemonic udf30f15
1795 * @opcode 0x15
1796 * @oppfx 0xf3
1797 * @opunused intel-modrm
1798 * @opcpuid sse
1799 * @optest ->
1800 * @opdone
1801 */
1802
1803/**
1804 * @opmnemonic udf20f15
1805 * @opcode 0x15
1806 * @oppfx 0xf2
1807 * @opunused intel-modrm
1808 * @opcpuid sse
1809 * @optest ->
1810 * @opdone
1811 */
1812
1813FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1814{
1815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1817 {
1818 /**
1819 * @opcode 0x16
1820 * @opcodesub 11 mr/reg
1821 * @oppfx none
1822 * @opcpuid sse
1823 * @opgroup og_sse_simdfp_datamove
1824 * @opxcpttype 5
1825 * @optest op1=1 op2=2 -> op1=2
1826 * @optest op1=0 op2=-42 -> op1=-42
1827 */
1828 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1829
1830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1831 IEM_MC_BEGIN(0, 1);
1832 IEM_MC_LOCAL(uint64_t, uSrc);
1833
1834 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1835 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1836 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1837 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1838
1839 IEM_MC_ADVANCE_RIP();
1840 IEM_MC_END();
1841 }
1842 else
1843 {
1844 /**
1845 * @opdone
1846 * @opcode 0x16
1847 * @opcodesub !11 mr/reg
1848 * @oppfx none
1849 * @opcpuid sse
1850 * @opgroup og_sse_simdfp_datamove
1851 * @opxcpttype 5
1852 * @optest op1=1 op2=2 -> op1=2
1853 * @optest op1=0 op2=-42 -> op1=-42
1854 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1855 */
1856 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1857
1858 IEM_MC_BEGIN(0, 2);
1859 IEM_MC_LOCAL(uint64_t, uSrc);
1860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1861
1862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1866
1867 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1868 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1869
1870 IEM_MC_ADVANCE_RIP();
1871 IEM_MC_END();
1872 }
1873 return VINF_SUCCESS;
1874}
1875
1876
1877/**
1878 * @opcode 0x16
1879 * @opcodesub !11 mr/reg
1880 * @oppfx 0x66
1881 * @opcpuid sse2
1882 * @opgroup og_sse2_pcksclr_datamove
1883 * @opxcpttype 5
1884 * @optest op1=1 op2=2 -> op1=2
1885 * @optest op1=0 op2=-42 -> op1=-42
1886 */
1887FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1888{
1889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1890 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1891 {
1892 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1893 IEM_MC_BEGIN(0, 2);
1894 IEM_MC_LOCAL(uint64_t, uSrc);
1895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1896
1897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1899 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1900 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1901
1902 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1903 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1904
1905 IEM_MC_ADVANCE_RIP();
1906 IEM_MC_END();
1907 return VINF_SUCCESS;
1908 }
1909
1910 /**
1911 * @opdone
1912 * @opmnemonic ud660f16m3
1913 * @opcode 0x16
1914 * @opcodesub 11 mr/reg
1915 * @oppfx 0x66
1916 * @opunused immediate
1917 * @opcpuid sse
1918 * @optest ->
1919 */
1920 return IEMOP_RAISE_INVALID_OPCODE();
1921}
1922
1923
1924/**
1925 * @opcode 0x16
1926 * @oppfx 0xf3
1927 * @opcpuid sse3
1928 * @opgroup og_sse3_pcksclr_datamove
1929 * @opxcpttype 4
1930 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1931 * op1=0x00000002000000020000000100000001
1932 */
1933FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1934{
1935 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1938 {
1939 /*
1940 * Register, register.
1941 */
1942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1943 IEM_MC_BEGIN(2, 0);
1944 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1945 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1946
1947 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1948 IEM_MC_PREPARE_SSE_USAGE();
1949
1950 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1951 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1952 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1953
1954 IEM_MC_ADVANCE_RIP();
1955 IEM_MC_END();
1956 }
1957 else
1958 {
1959 /*
1960 * Register, memory.
1961 */
1962 IEM_MC_BEGIN(2, 2);
1963 IEM_MC_LOCAL(RTUINT128U, uSrc);
1964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1965 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1966 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1967
1968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1970 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1971 IEM_MC_PREPARE_SSE_USAGE();
1972
1973 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1974 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1975 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1976
1977 IEM_MC_ADVANCE_RIP();
1978 IEM_MC_END();
1979 }
1980 return VINF_SUCCESS;
1981}
1982
1983/**
1984 * @opdone
1985 * @opmnemonic udf30f16
1986 * @opcode 0x16
1987 * @oppfx 0xf2
1988 * @opunused intel-modrm
1989 * @opcpuid sse
1990 * @optest ->
1991 * @opdone
1992 */
1993
1994
1995/**
1996 * @opcode 0x17
1997 * @opcodesub !11 mr/reg
1998 * @oppfx none
1999 * @opcpuid sse
2000 * @opgroup og_sse_simdfp_datamove
2001 * @opxcpttype 5
2002 * @optest op1=1 op2=2 -> op1=2
2003 * @optest op1=0 op2=-42 -> op1=-42
2004 */
2005FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2006{
2007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2008 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2009 {
2010 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2011
2012 IEM_MC_BEGIN(0, 2);
2013 IEM_MC_LOCAL(uint64_t, uSrc);
2014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2015
2016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2018 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2019 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2020
2021 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2022 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2023
2024 IEM_MC_ADVANCE_RIP();
2025 IEM_MC_END();
2026 return VINF_SUCCESS;
2027 }
2028
2029 /**
2030 * @opdone
2031 * @opmnemonic ud0f17m3
2032 * @opcode 0x17
2033 * @opcodesub 11 mr/reg
2034 * @oppfx none
2035 * @opunused immediate
2036 * @opcpuid sse
2037 * @optest ->
2038 */
2039 return IEMOP_RAISE_INVALID_OPCODE();
2040}
2041
2042
2043/**
2044 * @opcode 0x17
2045 * @opcodesub !11 mr/reg
2046 * @oppfx 0x66
2047 * @opcpuid sse2
2048 * @opgroup og_sse2_pcksclr_datamove
2049 * @opxcpttype 5
2050 * @optest op1=1 op2=2 -> op1=2
2051 * @optest op1=0 op2=-42 -> op1=-42
2052 */
2053FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
2054{
2055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2056 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2057 {
2058 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2059
2060 IEM_MC_BEGIN(0, 2);
2061 IEM_MC_LOCAL(uint64_t, uSrc);
2062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2063
2064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2066 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2067 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2068
2069 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2070 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2071
2072 IEM_MC_ADVANCE_RIP();
2073 IEM_MC_END();
2074 return VINF_SUCCESS;
2075 }
2076
2077 /**
2078 * @opdone
2079 * @opmnemonic ud660f17m3
2080 * @opcode 0x17
2081 * @opcodesub 11 mr/reg
2082 * @oppfx 0x66
2083 * @opunused immediate
2084 * @opcpuid sse
2085 * @optest ->
2086 */
2087 return IEMOP_RAISE_INVALID_OPCODE();
2088}
2089
2090
2091/**
2092 * @opdone
2093 * @opmnemonic udf30f17
2094 * @opcode 0x17
2095 * @oppfx 0xf3
2096 * @opunused intel-modrm
2097 * @opcpuid sse
2098 * @optest ->
2099 * @opdone
2100 */
2101
2102/**
2103 * @opmnemonic udf20f17
2104 * @opcode 0x17
2105 * @oppfx 0xf2
2106 * @opunused intel-modrm
2107 * @opcpuid sse
2108 * @optest ->
2109 * @opdone
2110 */
2111
2112
2113/** Opcode 0x0f 0x18. */
2114FNIEMOP_DEF(iemOp_prefetch_Grp16)
2115{
2116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2117 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2118 {
2119 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2120 {
2121 case 4: /* Aliased to /0 for the time being according to AMD. */
2122 case 5: /* Aliased to /0 for the time being according to AMD. */
2123 case 6: /* Aliased to /0 for the time being according to AMD. */
2124 case 7: /* Aliased to /0 for the time being according to AMD. */
2125 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2126 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2127 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2128 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2130 }
2131
2132 IEM_MC_BEGIN(0, 1);
2133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2136 /* Currently a NOP. */
2137 NOREF(GCPtrEffSrc);
2138 IEM_MC_ADVANCE_RIP();
2139 IEM_MC_END();
2140 return VINF_SUCCESS;
2141 }
2142
2143 return IEMOP_RAISE_INVALID_OPCODE();
2144}
2145
2146
2147/** Opcode 0x0f 0x19..0x1f. */
2148FNIEMOP_DEF(iemOp_nop_Ev)
2149{
2150 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2151 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2152 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2153 {
2154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2155 IEM_MC_BEGIN(0, 0);
2156 IEM_MC_ADVANCE_RIP();
2157 IEM_MC_END();
2158 }
2159 else
2160 {
2161 IEM_MC_BEGIN(0, 1);
2162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2165 /* Currently a NOP. */
2166 NOREF(GCPtrEffSrc);
2167 IEM_MC_ADVANCE_RIP();
2168 IEM_MC_END();
2169 }
2170 return VINF_SUCCESS;
2171}
2172
2173
2174/** Opcode 0x0f 0x20. */
2175FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2176{
2177 /* mod is ignored, as is operand size overrides. */
2178 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2179 IEMOP_HLP_MIN_386();
2180 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2181 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2182 else
2183 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2184
2185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2186 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2187 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2188 {
2189 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2190 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2191 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2192 iCrReg |= 8;
2193 }
2194 switch (iCrReg)
2195 {
2196 case 0: case 2: case 3: case 4: case 8:
2197 break;
2198 default:
2199 return IEMOP_RAISE_INVALID_OPCODE();
2200 }
2201 IEMOP_HLP_DONE_DECODING();
2202
2203 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2204}
2205
2206
2207/** Opcode 0x0f 0x21. */
2208FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2209{
2210 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2211 IEMOP_HLP_MIN_386();
2212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2214 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2215 return IEMOP_RAISE_INVALID_OPCODE();
2216 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2217 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2218 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2219}
2220
2221
2222/** Opcode 0x0f 0x22. */
2223FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2224{
2225 /* mod is ignored, as is operand size overrides. */
2226 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2227 IEMOP_HLP_MIN_386();
2228 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2229 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2230 else
2231 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2232
2233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2234 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2235 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2236 {
2237 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2238 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2239 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2240 iCrReg |= 8;
2241 }
2242 switch (iCrReg)
2243 {
2244 case 0: case 2: case 3: case 4: case 8:
2245 break;
2246 default:
2247 return IEMOP_RAISE_INVALID_OPCODE();
2248 }
2249 IEMOP_HLP_DONE_DECODING();
2250
2251 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2252}
2253
2254
2255/** Opcode 0x0f 0x23. */
2256FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2257{
2258 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2259 IEMOP_HLP_MIN_386();
2260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2262 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2263 return IEMOP_RAISE_INVALID_OPCODE();
2264 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2265 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2266 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2267}
2268
2269
2270/** Opcode 0x0f 0x24. */
2271FNIEMOP_DEF(iemOp_mov_Rd_Td)
2272{
2273 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2274 /** @todo works on 386 and 486. */
2275 /* The RM byte is not considered, see testcase. */
2276 return IEMOP_RAISE_INVALID_OPCODE();
2277}
2278
2279
2280/** Opcode 0x0f 0x26. */
2281FNIEMOP_DEF(iemOp_mov_Td_Rd)
2282{
2283 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2284 /** @todo works on 386 and 486. */
2285 /* The RM byte is not considered, see testcase. */
2286 return IEMOP_RAISE_INVALID_OPCODE();
2287}
2288
2289
2290/**
2291 * @opcode 0x28
2292 * @oppfx none
2293 * @opcpuid sse
2294 * @opgroup og_sse_simdfp_datamove
2295 * @opxcpttype 1
2296 * @optest op1=1 op2=2 -> op1=2
2297 * @optest op1=0 op2=-42 -> op1=-42
2298 */
2299FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2300{
2301 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2303 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2304 {
2305 /*
2306 * Register, register.
2307 */
2308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2309 IEM_MC_BEGIN(0, 0);
2310 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2312 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2313 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2314 IEM_MC_ADVANCE_RIP();
2315 IEM_MC_END();
2316 }
2317 else
2318 {
2319 /*
2320 * Register, memory.
2321 */
2322 IEM_MC_BEGIN(0, 2);
2323 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2325
2326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2328 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2329 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2330
2331 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2332 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2333
2334 IEM_MC_ADVANCE_RIP();
2335 IEM_MC_END();
2336 }
2337 return VINF_SUCCESS;
2338}
2339
2340/**
2341 * @opcode 0x28
2342 * @oppfx 66
2343 * @opcpuid sse2
2344 * @opgroup og_sse2_pcksclr_datamove
2345 * @opxcpttype 1
2346 * @optest op1=1 op2=2 -> op1=2
2347 * @optest op1=0 op2=-42 -> op1=-42
2348 */
2349FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2350{
2351 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2352 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2353 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2354 {
2355 /*
2356 * Register, register.
2357 */
2358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2359 IEM_MC_BEGIN(0, 0);
2360 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2361 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2362 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2363 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2364 IEM_MC_ADVANCE_RIP();
2365 IEM_MC_END();
2366 }
2367 else
2368 {
2369 /*
2370 * Register, memory.
2371 */
2372 IEM_MC_BEGIN(0, 2);
2373 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2375
2376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2378 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2379 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2380
2381 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2382 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2383
2384 IEM_MC_ADVANCE_RIP();
2385 IEM_MC_END();
2386 }
2387 return VINF_SUCCESS;
2388}
2389
2390/* Opcode 0xf3 0x0f 0x28 - invalid */
2391/* Opcode 0xf2 0x0f 0x28 - invalid */
2392
2393/**
2394 * @opcode 0x29
2395 * @oppfx none
2396 * @opcpuid sse
2397 * @opgroup og_sse_simdfp_datamove
2398 * @opxcpttype 1
2399 * @optest op1=1 op2=2 -> op1=2
2400 * @optest op1=0 op2=-42 -> op1=-42
2401 */
2402FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2403{
2404 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2406 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2407 {
2408 /*
2409 * Register, register.
2410 */
2411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2412 IEM_MC_BEGIN(0, 0);
2413 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2414 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2415 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2416 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2417 IEM_MC_ADVANCE_RIP();
2418 IEM_MC_END();
2419 }
2420 else
2421 {
2422 /*
2423 * Memory, register.
2424 */
2425 IEM_MC_BEGIN(0, 2);
2426 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2428
2429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2431 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2432 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2433
2434 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2435 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2436
2437 IEM_MC_ADVANCE_RIP();
2438 IEM_MC_END();
2439 }
2440 return VINF_SUCCESS;
2441}
2442
2443/**
2444 * @opcode 0x29
2445 * @oppfx 66
2446 * @opcpuid sse2
2447 * @opgroup og_sse2_pcksclr_datamove
2448 * @opxcpttype 1
2449 * @optest op1=1 op2=2 -> op1=2
2450 * @optest op1=0 op2=-42 -> op1=-42
2451 */
2452FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2453{
2454 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2456 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2457 {
2458 /*
2459 * Register, register.
2460 */
2461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2462 IEM_MC_BEGIN(0, 0);
2463 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2464 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2465 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2466 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2467 IEM_MC_ADVANCE_RIP();
2468 IEM_MC_END();
2469 }
2470 else
2471 {
2472 /*
2473 * Memory, register.
2474 */
2475 IEM_MC_BEGIN(0, 2);
2476 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2478
2479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2481 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2483
2484 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2485 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2486
2487 IEM_MC_ADVANCE_RIP();
2488 IEM_MC_END();
2489 }
2490 return VINF_SUCCESS;
2491}
2492
2493/* Opcode 0xf3 0x0f 0x29 - invalid */
2494/* Opcode 0xf2 0x0f 0x29 - invalid */
2495
2496
2497/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2498FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2499/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2500FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2501/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2502FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2503/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2504FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2505
2506
2507/**
2508 * @opcode 0x2b
2509 * @opcodesub !11 mr/reg
2510 * @oppfx none
2511 * @opcpuid sse
2512 * @opgroup og_sse1_cachect
2513 * @opxcpttype 1
2514 * @optest op1=1 op2=2 -> op1=2
2515 * @optest op1=0 op2=-42 -> op1=-42
2516 */
2517FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2518{
2519 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2521 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2522 {
2523 /*
2524 * memory, register.
2525 */
2526 IEM_MC_BEGIN(0, 2);
2527 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2529
2530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2532 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2533 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2534
2535 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2536 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2537
2538 IEM_MC_ADVANCE_RIP();
2539 IEM_MC_END();
2540 }
2541 /* The register, register encoding is invalid. */
2542 else
2543 return IEMOP_RAISE_INVALID_OPCODE();
2544 return VINF_SUCCESS;
2545}
2546
2547/**
2548 * @opcode 0x2b
2549 * @opcodesub !11 mr/reg
2550 * @oppfx 0x66
2551 * @opcpuid sse2
2552 * @opgroup og_sse2_cachect
2553 * @opxcpttype 1
2554 * @optest op1=1 op2=2 -> op1=2
2555 * @optest op1=0 op2=-42 -> op1=-42
2556 */
2557FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2558{
2559 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2561 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2562 {
2563 /*
2564 * memory, register.
2565 */
2566 IEM_MC_BEGIN(0, 2);
2567 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2569
2570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2572 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2573 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2574
2575 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2576 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2577
2578 IEM_MC_ADVANCE_RIP();
2579 IEM_MC_END();
2580 }
2581 /* The register, register encoding is invalid. */
2582 else
2583 return IEMOP_RAISE_INVALID_OPCODE();
2584 return VINF_SUCCESS;
2585}
2586/* Opcode 0xf3 0x0f 0x2b - invalid */
2587/* Opcode 0xf2 0x0f 0x2b - invalid */
2588
2589
2590/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2591FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2592/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2593FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2594/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2595FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2596/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2597FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2598
2599/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2600FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2601/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2602FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2603/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2604FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2605/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2606FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2607
2608/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2609FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2610/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2611FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2612/* Opcode 0xf3 0x0f 0x2e - invalid */
2613/* Opcode 0xf2 0x0f 0x2e - invalid */
2614
2615/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2616FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2617/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2618FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2619/* Opcode 0xf3 0x0f 0x2f - invalid */
2620/* Opcode 0xf2 0x0f 0x2f - invalid */
2621
2622/** Opcode 0x0f 0x30. */
2623FNIEMOP_DEF(iemOp_wrmsr)
2624{
2625 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2627 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2628}
2629
2630
2631/** Opcode 0x0f 0x31. */
2632FNIEMOP_DEF(iemOp_rdtsc)
2633{
2634 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2636 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2637}
2638
2639
2640/** Opcode 0x0f 0x33. */
2641FNIEMOP_DEF(iemOp_rdmsr)
2642{
2643 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2646}
2647
2648
2649/** Opcode 0x0f 0x34. */
2650FNIEMOP_DEF(iemOp_rdpmc)
2651{
2652 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2654 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2655}
2656
2657
2658/** Opcode 0x0f 0x34. */
2659FNIEMOP_STUB(iemOp_sysenter);
2660/** Opcode 0x0f 0x35. */
2661FNIEMOP_STUB(iemOp_sysexit);
2662/** Opcode 0x0f 0x37. */
2663FNIEMOP_STUB(iemOp_getsec);
2664
2665
2666/** Opcode 0x0f 0x38. */
2667FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2668{
2669#ifdef IEM_WITH_THREE_0F_38
2670 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2671 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2672#else
2673 IEMOP_BITCH_ABOUT_STUB();
2674 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2675#endif
2676}
2677
2678
2679/** Opcode 0x0f 0x3a. */
2680FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2681{
2682#ifdef IEM_WITH_THREE_0F_3A
2683 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2684 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2685#else
2686 IEMOP_BITCH_ABOUT_STUB();
2687 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2688#endif
2689}
2690
2691
2692/**
2693 * Implements a conditional move.
2694 *
2695 * Wish there was an obvious way to do this where we could share and reduce
2696 * code bloat.
2697 *
2698 * @param a_Cnd The conditional "microcode" operation.
2699 */
2700#define CMOV_X(a_Cnd) \
2701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2702 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2703 { \
2704 switch (pVCpu->iem.s.enmEffOpSize) \
2705 { \
2706 case IEMMODE_16BIT: \
2707 IEM_MC_BEGIN(0, 1); \
2708 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2709 a_Cnd { \
2710 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2711 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2712 } IEM_MC_ENDIF(); \
2713 IEM_MC_ADVANCE_RIP(); \
2714 IEM_MC_END(); \
2715 return VINF_SUCCESS; \
2716 \
2717 case IEMMODE_32BIT: \
2718 IEM_MC_BEGIN(0, 1); \
2719 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2720 a_Cnd { \
2721 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2722 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2723 } IEM_MC_ELSE() { \
2724 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2725 } IEM_MC_ENDIF(); \
2726 IEM_MC_ADVANCE_RIP(); \
2727 IEM_MC_END(); \
2728 return VINF_SUCCESS; \
2729 \
2730 case IEMMODE_64BIT: \
2731 IEM_MC_BEGIN(0, 1); \
2732 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2733 a_Cnd { \
2734 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2735 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2736 } IEM_MC_ENDIF(); \
2737 IEM_MC_ADVANCE_RIP(); \
2738 IEM_MC_END(); \
2739 return VINF_SUCCESS; \
2740 \
2741 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2742 } \
2743 } \
2744 else \
2745 { \
2746 switch (pVCpu->iem.s.enmEffOpSize) \
2747 { \
2748 case IEMMODE_16BIT: \
2749 IEM_MC_BEGIN(0, 2); \
2750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2751 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2753 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2754 a_Cnd { \
2755 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2756 } IEM_MC_ENDIF(); \
2757 IEM_MC_ADVANCE_RIP(); \
2758 IEM_MC_END(); \
2759 return VINF_SUCCESS; \
2760 \
2761 case IEMMODE_32BIT: \
2762 IEM_MC_BEGIN(0, 2); \
2763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2764 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2766 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2767 a_Cnd { \
2768 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2769 } IEM_MC_ELSE() { \
2770 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2771 } IEM_MC_ENDIF(); \
2772 IEM_MC_ADVANCE_RIP(); \
2773 IEM_MC_END(); \
2774 return VINF_SUCCESS; \
2775 \
2776 case IEMMODE_64BIT: \
2777 IEM_MC_BEGIN(0, 2); \
2778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2779 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2781 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2782 a_Cnd { \
2783 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2784 } IEM_MC_ENDIF(); \
2785 IEM_MC_ADVANCE_RIP(); \
2786 IEM_MC_END(); \
2787 return VINF_SUCCESS; \
2788 \
2789 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2790 } \
2791 } do {} while (0)
2792
2793
2794
2795/** Opcode 0x0f 0x40. */
2796FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2797{
2798 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2799 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2800}
2801
2802
2803/** Opcode 0x0f 0x41. */
2804FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2805{
2806 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2807 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2808}
2809
2810
2811/** Opcode 0x0f 0x42. */
2812FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2813{
2814 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2815 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2816}
2817
2818
2819/** Opcode 0x0f 0x43. */
2820FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2821{
2822 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2823 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2824}
2825
2826
2827/** Opcode 0x0f 0x44. */
2828FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2829{
2830 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2831 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2832}
2833
2834
2835/** Opcode 0x0f 0x45. */
2836FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2837{
2838 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2839 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2840}
2841
2842
2843/** Opcode 0x0f 0x46. */
2844FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2845{
2846 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2847 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2848}
2849
2850
2851/** Opcode 0x0f 0x47. */
2852FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2853{
2854 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2855 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2856}
2857
2858
2859/** Opcode 0x0f 0x48. */
2860FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2861{
2862 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2863 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2864}
2865
2866
2867/** Opcode 0x0f 0x49. */
2868FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2869{
2870 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2871 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2872}
2873
2874
2875/** Opcode 0x0f 0x4a. */
2876FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2877{
2878 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2879 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2880}
2881
2882
2883/** Opcode 0x0f 0x4b. */
2884FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2885{
2886 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2887 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2888}
2889
2890
2891/** Opcode 0x0f 0x4c. */
2892FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2893{
2894 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2895 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2896}
2897
2898
2899/** Opcode 0x0f 0x4d. */
2900FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2901{
2902 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2903 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2904}
2905
2906
2907/** Opcode 0x0f 0x4e. */
2908FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2909{
2910 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2911 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2912}
2913
2914
2915/** Opcode 0x0f 0x4f. */
2916FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2917{
2918 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2919 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2920}
2921
2922#undef CMOV_X
2923
2924/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2925FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2926/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2927FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2928/* Opcode 0xf3 0x0f 0x50 - invalid */
2929/* Opcode 0xf2 0x0f 0x50 - invalid */
2930
2931/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2932FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2933/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2934FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2935/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2936FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2937/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2938FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2939
2940/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2941FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2942/* Opcode 0x66 0x0f 0x52 - invalid */
2943/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2944FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2945/* Opcode 0xf2 0x0f 0x52 - invalid */
2946
2947/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2948FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2949/* Opcode 0x66 0x0f 0x53 - invalid */
2950/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2951FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2952/* Opcode 0xf2 0x0f 0x53 - invalid */
2953
2954/** Opcode 0x0f 0x54 - andps Vps, Wps */
2955FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2956/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2957FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2958/* Opcode 0xf3 0x0f 0x54 - invalid */
2959/* Opcode 0xf2 0x0f 0x54 - invalid */
2960
2961/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2962FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2963/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2964FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2965/* Opcode 0xf3 0x0f 0x55 - invalid */
2966/* Opcode 0xf2 0x0f 0x55 - invalid */
2967
2968/** Opcode 0x0f 0x56 - orps Vps, Wps */
2969FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2970/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2971FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2972/* Opcode 0xf3 0x0f 0x56 - invalid */
2973/* Opcode 0xf2 0x0f 0x56 - invalid */
2974
2975/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2976FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2977/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2978FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2979/* Opcode 0xf3 0x0f 0x57 - invalid */
2980/* Opcode 0xf2 0x0f 0x57 - invalid */
2981
2982/** Opcode 0x0f 0x58 - addps Vps, Wps */
2983FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2984/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2985FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2986/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2987FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2988/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2989FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2990
2991/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2992FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2993/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2994FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2995/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2996FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2997/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2998FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2999
3000/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
3001FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
3002/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
3003FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
3004/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
3005FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
3006/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
3007FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
3008
3009/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
3010FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
3011/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
3012FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
3013/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
3014FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
3015/* Opcode 0xf2 0x0f 0x5b - invalid */
3016
3017/** Opcode 0x0f 0x5c - subps Vps, Wps */
3018FNIEMOP_STUB(iemOp_subps_Vps_Wps);
3019/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
3020FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
3021/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
3022FNIEMOP_STUB(iemOp_subss_Vss_Wss);
3023/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
3024FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
3025
3026/** Opcode 0x0f 0x5d - minps Vps, Wps */
3027FNIEMOP_STUB(iemOp_minps_Vps_Wps);
3028/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
3029FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
3030/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
3031FNIEMOP_STUB(iemOp_minss_Vss_Wss);
3032/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
3033FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
3034
3035/** Opcode 0x0f 0x5e - divps Vps, Wps */
3036FNIEMOP_STUB(iemOp_divps_Vps_Wps);
3037/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
3038FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
3039/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
3040FNIEMOP_STUB(iemOp_divss_Vss_Wss);
3041/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
3042FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
3043
3044/** Opcode 0x0f 0x5f - maxps Vps, Wps */
3045FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
3046/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
3047FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
3048/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
3049FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
3050/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
3051FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
3052
3053/**
3054 * Common worker for MMX instructions on the forms:
3055 * pxxxx mm1, mm2/mem32
3056 *
3057 * The 2nd operand is the first half of a register, which in the memory case
3058 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3059 * memory accessed for MMX.
3060 *
3061 * Exceptions type 4.
3062 */
3063FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3064{
3065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3067 {
3068 /*
3069 * Register, register.
3070 */
3071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3072 IEM_MC_BEGIN(2, 0);
3073 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3074 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3075 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3076 IEM_MC_PREPARE_SSE_USAGE();
3077 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3078 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3079 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3080 IEM_MC_ADVANCE_RIP();
3081 IEM_MC_END();
3082 }
3083 else
3084 {
3085 /*
3086 * Register, memory.
3087 */
3088 IEM_MC_BEGIN(2, 2);
3089 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3090 IEM_MC_LOCAL(uint64_t, uSrc);
3091 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3093
3094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3096 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3097 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3098
3099 IEM_MC_PREPARE_SSE_USAGE();
3100 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3101 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3102
3103 IEM_MC_ADVANCE_RIP();
3104 IEM_MC_END();
3105 }
3106 return VINF_SUCCESS;
3107}
3108
3109
3110/**
3111 * Common worker for SSE2 instructions on the forms:
3112 * pxxxx xmm1, xmm2/mem128
3113 *
3114 * The 2nd operand is the first half of a register, which in the memory case
3115 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3116 * memory accessed for MMX.
3117 *
3118 * Exceptions type 4.
3119 */
3120FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3121{
3122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3123 if (!pImpl->pfnU64)
3124 return IEMOP_RAISE_INVALID_OPCODE();
3125 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3126 {
3127 /*
3128 * Register, register.
3129 */
3130 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3131 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3133 IEM_MC_BEGIN(2, 0);
3134 IEM_MC_ARG(uint64_t *, pDst, 0);
3135 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3136 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3137 IEM_MC_PREPARE_FPU_USAGE();
3138 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3139 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3140 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3141 IEM_MC_ADVANCE_RIP();
3142 IEM_MC_END();
3143 }
3144 else
3145 {
3146 /*
3147 * Register, memory.
3148 */
3149 IEM_MC_BEGIN(2, 2);
3150 IEM_MC_ARG(uint64_t *, pDst, 0);
3151 IEM_MC_LOCAL(uint32_t, uSrc);
3152 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3154
3155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3157 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3158 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3159
3160 IEM_MC_PREPARE_FPU_USAGE();
3161 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3162 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3163
3164 IEM_MC_ADVANCE_RIP();
3165 IEM_MC_END();
3166 }
3167 return VINF_SUCCESS;
3168}
3169
3170
3171/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3172FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3173{
3174 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3175 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3176}
3177
3178/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3179FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3180{
3181 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3182 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3183}
3184
3185/* Opcode 0xf3 0x0f 0x60 - invalid */
3186
3187
3188/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3189FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3190{
3191 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3192 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3193}
3194
3195/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3196FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3197{
3198 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3199 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3200}
3201
3202/* Opcode 0xf3 0x0f 0x61 - invalid */
3203
3204
3205/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3206FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3207{
3208 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3209 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3210}
3211
3212/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3213FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3214{
3215 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3216 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3217}
3218
3219/* Opcode 0xf3 0x0f 0x62 - invalid */
3220
3221
3222
3223/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3224FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3225/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3226FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3227/* Opcode 0xf3 0x0f 0x63 - invalid */
3228
3229/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3230FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3231/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3232FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3233/* Opcode 0xf3 0x0f 0x64 - invalid */
3234
3235/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3236FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3237/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3238FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3239/* Opcode 0xf3 0x0f 0x65 - invalid */
3240
3241/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3242FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3243/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3244FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3245/* Opcode 0xf3 0x0f 0x66 - invalid */
3246
3247/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3248FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3249/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3250FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3251/* Opcode 0xf3 0x0f 0x67 - invalid */
3252
3253
3254/**
3255 * Common worker for MMX instructions on the form:
3256 * pxxxx mm1, mm2/mem64
3257 *
3258 * The 2nd operand is the second half of a register, which in the memory case
3259 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3260 * where it may read the full 128 bits or only the upper 64 bits.
3261 *
3262 * Exceptions type 4.
3263 */
3264FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3265{
3266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3267 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3269 {
3270 /*
3271 * Register, register.
3272 */
3273 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3274 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3276 IEM_MC_BEGIN(2, 0);
3277 IEM_MC_ARG(uint64_t *, pDst, 0);
3278 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3279 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3280 IEM_MC_PREPARE_FPU_USAGE();
3281 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3282 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3283 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3284 IEM_MC_ADVANCE_RIP();
3285 IEM_MC_END();
3286 }
3287 else
3288 {
3289 /*
3290 * Register, memory.
3291 */
3292 IEM_MC_BEGIN(2, 2);
3293 IEM_MC_ARG(uint64_t *, pDst, 0);
3294 IEM_MC_LOCAL(uint64_t, uSrc);
3295 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3297
3298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3300 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3301 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3302
3303 IEM_MC_PREPARE_FPU_USAGE();
3304 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3305 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3306
3307 IEM_MC_ADVANCE_RIP();
3308 IEM_MC_END();
3309 }
3310 return VINF_SUCCESS;
3311}
3312
3313
3314/**
3315 * Common worker for SSE2 instructions on the form:
3316 * pxxxx xmm1, xmm2/mem128
3317 *
3318 * The 2nd operand is the second half of a register, which in the memory case
3319 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3320 * where it may read the full 128 bits or only the upper 64 bits.
3321 *
3322 * Exceptions type 4.
3323 */
3324FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3325{
3326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3327 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3328 {
3329 /*
3330 * Register, register.
3331 */
3332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3333 IEM_MC_BEGIN(2, 0);
3334 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3335 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3336 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3337 IEM_MC_PREPARE_SSE_USAGE();
3338 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3339 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3340 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3341 IEM_MC_ADVANCE_RIP();
3342 IEM_MC_END();
3343 }
3344 else
3345 {
3346 /*
3347 * Register, memory.
3348 */
3349 IEM_MC_BEGIN(2, 2);
3350 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3351 IEM_MC_LOCAL(RTUINT128U, uSrc);
3352 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3354
3355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3357 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3358 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3359
3360 IEM_MC_PREPARE_SSE_USAGE();
3361 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3362 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3363
3364 IEM_MC_ADVANCE_RIP();
3365 IEM_MC_END();
3366 }
3367 return VINF_SUCCESS;
3368}
3369
3370
3371/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3372FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3373{
3374 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3375 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3376}
3377
3378/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3379FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3380{
3381 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3382 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3383}
3384/* Opcode 0xf3 0x0f 0x68 - invalid */
3385
3386
3387/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3388FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3389{
3390 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3391 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3392}
3393
3394/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3395FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3396{
3397 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3398 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3399
3400}
3401/* Opcode 0xf3 0x0f 0x69 - invalid */
3402
3403
3404/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3405FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3406{
3407 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3408 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3409}
3410
3411/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3412FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3413{
3414 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3415 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3416}
3417/* Opcode 0xf3 0x0f 0x6a - invalid */
3418
3419
3420/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3421FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3422/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3423FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3424/* Opcode 0xf3 0x0f 0x6b - invalid */
3425
3426
3427/* Opcode 0x0f 0x6c - invalid */
3428
3429/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3430FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3431{
3432 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3433 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3434}
3435
3436/* Opcode 0xf3 0x0f 0x6c - invalid */
3437/* Opcode 0xf2 0x0f 0x6c - invalid */
3438
3439
3440/* Opcode 0x0f 0x6d - invalid */
3441
3442/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3443FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3444{
3445 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3446 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3447}
3448
3449/* Opcode 0xf3 0x0f 0x6d - invalid */
3450
3451
3452FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3453{
3454 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3455 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3456 {
3457 /**
3458 * @opcode 0x6e
3459 * @opcodesub rex.w=1
3460 * @oppfx none
3461 * @opcpuid mmx
3462 * @opgroup og_mmx_datamove
3463 * @opxcpttype 5
3464 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3465 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3466 */
3467 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3468 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3469 {
3470 /* MMX, greg64 */
3471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3472 IEM_MC_BEGIN(0, 1);
3473 IEM_MC_LOCAL(uint64_t, u64Tmp);
3474
3475 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3476 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3477
3478 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3479 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3480 IEM_MC_FPU_TO_MMX_MODE();
3481
3482 IEM_MC_ADVANCE_RIP();
3483 IEM_MC_END();
3484 }
3485 else
3486 {
3487 /* MMX, [mem64] */
3488 IEM_MC_BEGIN(0, 2);
3489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3490 IEM_MC_LOCAL(uint64_t, u64Tmp);
3491
3492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3494 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3495 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3496
3497 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3498 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3499 IEM_MC_FPU_TO_MMX_MODE();
3500
3501 IEM_MC_ADVANCE_RIP();
3502 IEM_MC_END();
3503 }
3504 }
3505 else
3506 {
3507 /**
3508 * @opdone
3509 * @opcode 0x6e
3510 * @opcodesub rex.w=0
3511 * @oppfx none
3512 * @opcpuid mmx
3513 * @opgroup og_mmx_datamove
3514 * @opxcpttype 5
3515 * @opfunction iemOp_movd_q_Pd_Ey
3516 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3517 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3518 */
3519 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3521 {
3522 /* MMX, greg */
3523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3524 IEM_MC_BEGIN(0, 1);
3525 IEM_MC_LOCAL(uint64_t, u64Tmp);
3526
3527 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3528 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3529
3530 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3531 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3532 IEM_MC_FPU_TO_MMX_MODE();
3533
3534 IEM_MC_ADVANCE_RIP();
3535 IEM_MC_END();
3536 }
3537 else
3538 {
3539 /* MMX, [mem] */
3540 IEM_MC_BEGIN(0, 2);
3541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3542 IEM_MC_LOCAL(uint32_t, u32Tmp);
3543
3544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3546 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3547 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3548
3549 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3550 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3551 IEM_MC_FPU_TO_MMX_MODE();
3552
3553 IEM_MC_ADVANCE_RIP();
3554 IEM_MC_END();
3555 }
3556 }
3557 return VINF_SUCCESS;
3558}
3559
3560/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
3561FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3562{
3563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3564 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3565 {
3566 /**
3567 * @opcode 0x6e
3568 * @opcodesub rex.w=1
3569 * @oppfx 0x66
3570 * @opcpuid sse2
3571 * @opgroup og_sse2_simdint_datamove
3572 * @opxcpttype 5
3573 * @optest 64-bit / op1=1 op2=2 -> op1=2
3574 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3575 */
3576 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3577 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3578 {
3579 /* XMM, greg64 */
3580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3581 IEM_MC_BEGIN(0, 1);
3582 IEM_MC_LOCAL(uint64_t, u64Tmp);
3583
3584 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3585 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3586
3587 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3588 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3589
3590 IEM_MC_ADVANCE_RIP();
3591 IEM_MC_END();
3592 }
3593 else
3594 {
3595 /* XMM, [mem64] */
3596 IEM_MC_BEGIN(0, 2);
3597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3598 IEM_MC_LOCAL(uint64_t, u64Tmp);
3599
3600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3602 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3603 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3604
3605 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3606 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3607
3608 IEM_MC_ADVANCE_RIP();
3609 IEM_MC_END();
3610 }
3611 }
3612 else
3613 {
3614 /**
3615 * @opdone
3616 * @opcode 0x6e
3617 * @opcodesub rex.w=0
3618 * @oppfx 0x66
3619 * @opcpuid sse2
3620 * @opgroup og_sse2_simdint_datamove
3621 * @opxcpttype 5
3622 * @opfunction iemOp_movd_q_Vy_Ey
3623 * @optest op1=1 op2=2 -> op1=2
3624 * @optest op1=0 op2=-42 -> op1=-42
3625 */
3626 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3627 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3628 {
3629 /* XMM, greg32 */
3630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3631 IEM_MC_BEGIN(0, 1);
3632 IEM_MC_LOCAL(uint32_t, u32Tmp);
3633
3634 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3635 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3636
3637 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3638 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3639
3640 IEM_MC_ADVANCE_RIP();
3641 IEM_MC_END();
3642 }
3643 else
3644 {
3645 /* XMM, [mem32] */
3646 IEM_MC_BEGIN(0, 2);
3647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3648 IEM_MC_LOCAL(uint32_t, u32Tmp);
3649
3650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3652 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3653 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3654
3655 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3656 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3657
3658 IEM_MC_ADVANCE_RIP();
3659 IEM_MC_END();
3660 }
3661 }
3662 return VINF_SUCCESS;
3663}
3664
3665/* Opcode 0xf3 0x0f 0x6e - invalid */
3666
3667
3668/**
3669 * @opcode 0x6f
3670 * @oppfx none
3671 * @opcpuid mmx
3672 * @opgroup og_mmx_datamove
3673 * @opxcpttype 5
3674 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3675 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3676 */
3677FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3678{
3679 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3681 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3682 {
3683 /*
3684 * Register, register.
3685 */
3686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3687 IEM_MC_BEGIN(0, 1);
3688 IEM_MC_LOCAL(uint64_t, u64Tmp);
3689
3690 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3691 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3692
3693 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3694 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3695 IEM_MC_FPU_TO_MMX_MODE();
3696
3697 IEM_MC_ADVANCE_RIP();
3698 IEM_MC_END();
3699 }
3700 else
3701 {
3702 /*
3703 * Register, memory.
3704 */
3705 IEM_MC_BEGIN(0, 2);
3706 IEM_MC_LOCAL(uint64_t, u64Tmp);
3707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3708
3709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3712 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3713
3714 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3715 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3716 IEM_MC_FPU_TO_MMX_MODE();
3717
3718 IEM_MC_ADVANCE_RIP();
3719 IEM_MC_END();
3720 }
3721 return VINF_SUCCESS;
3722}
3723
3724/**
3725 * @opcode 0x6f
3726 * @oppfx 0x66
3727 * @opcpuid sse2
3728 * @opgroup og_sse2_simdint_datamove
3729 * @opxcpttype 1
3730 * @optest op1=1 op2=2 -> op1=2
3731 * @optest op1=0 op2=-42 -> op1=-42
3732 */
3733FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
3734{
3735 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3737 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3738 {
3739 /*
3740 * Register, register.
3741 */
3742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3743 IEM_MC_BEGIN(0, 0);
3744
3745 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3746 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3747
3748 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3749 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3750 IEM_MC_ADVANCE_RIP();
3751 IEM_MC_END();
3752 }
3753 else
3754 {
3755 /*
3756 * Register, memory.
3757 */
3758 IEM_MC_BEGIN(0, 2);
3759 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3761
3762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3764 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3765 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3766
3767 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3768 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3769
3770 IEM_MC_ADVANCE_RIP();
3771 IEM_MC_END();
3772 }
3773 return VINF_SUCCESS;
3774}
3775
3776/** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */
3777FNIEMOP_DEF(iemOp_movdqu_Vx_Wx)
3778{
3779 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3780 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3781 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3782 {
3783 /*
3784 * Register, register.
3785 */
3786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3787 IEM_MC_BEGIN(0, 0);
3788 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3789 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3790 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3791 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3792 IEM_MC_ADVANCE_RIP();
3793 IEM_MC_END();
3794 }
3795 else
3796 {
3797 /*
3798 * Register, memory.
3799 */
3800 IEM_MC_BEGIN(0, 2);
3801 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3802 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3803
3804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3806 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3807 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3808 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3809 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3810
3811 IEM_MC_ADVANCE_RIP();
3812 IEM_MC_END();
3813 }
3814 return VINF_SUCCESS;
3815}
3816
3817
3818/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3819FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3820{
3821 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3822 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3823 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3824 {
3825 /*
3826 * Register, register.
3827 */
3828 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3830
3831 IEM_MC_BEGIN(3, 0);
3832 IEM_MC_ARG(uint64_t *, pDst, 0);
3833 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3834 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3835 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3836 IEM_MC_PREPARE_FPU_USAGE();
3837 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3838 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3839 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3840 IEM_MC_ADVANCE_RIP();
3841 IEM_MC_END();
3842 }
3843 else
3844 {
3845 /*
3846 * Register, memory.
3847 */
3848 IEM_MC_BEGIN(3, 2);
3849 IEM_MC_ARG(uint64_t *, pDst, 0);
3850 IEM_MC_LOCAL(uint64_t, uSrc);
3851 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3853
3854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3855 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3856 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3858 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3859
3860 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3861 IEM_MC_PREPARE_FPU_USAGE();
3862 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3863 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3864
3865 IEM_MC_ADVANCE_RIP();
3866 IEM_MC_END();
3867 }
3868 return VINF_SUCCESS;
3869}
3870
3871/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3872FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3873{
3874 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3875 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3876 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3877 {
3878 /*
3879 * Register, register.
3880 */
3881 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3883
3884 IEM_MC_BEGIN(3, 0);
3885 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3886 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3887 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3888 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3889 IEM_MC_PREPARE_SSE_USAGE();
3890 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3891 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3892 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3893 IEM_MC_ADVANCE_RIP();
3894 IEM_MC_END();
3895 }
3896 else
3897 {
3898 /*
3899 * Register, memory.
3900 */
3901 IEM_MC_BEGIN(3, 2);
3902 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3903 IEM_MC_LOCAL(RTUINT128U, uSrc);
3904 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3906
3907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3908 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3909 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3911 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3912
3913 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3914 IEM_MC_PREPARE_SSE_USAGE();
3915 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3916 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3917
3918 IEM_MC_ADVANCE_RIP();
3919 IEM_MC_END();
3920 }
3921 return VINF_SUCCESS;
3922}
3923
3924/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3925FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3926{
3927 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3928 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3929 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3930 {
3931 /*
3932 * Register, register.
3933 */
3934 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3936
3937 IEM_MC_BEGIN(3, 0);
3938 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3939 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3940 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3941 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3942 IEM_MC_PREPARE_SSE_USAGE();
3943 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3944 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3945 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3946 IEM_MC_ADVANCE_RIP();
3947 IEM_MC_END();
3948 }
3949 else
3950 {
3951 /*
3952 * Register, memory.
3953 */
3954 IEM_MC_BEGIN(3, 2);
3955 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3956 IEM_MC_LOCAL(RTUINT128U, uSrc);
3957 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3959
3960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3961 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3962 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3964 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3965
3966 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3967 IEM_MC_PREPARE_SSE_USAGE();
3968 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3969 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3970
3971 IEM_MC_ADVANCE_RIP();
3972 IEM_MC_END();
3973 }
3974 return VINF_SUCCESS;
3975}
3976
3977/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3978FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3979{
3980 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3981 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3982 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3983 {
3984 /*
3985 * Register, register.
3986 */
3987 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3989
3990 IEM_MC_BEGIN(3, 0);
3991 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3992 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3993 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3994 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3995 IEM_MC_PREPARE_SSE_USAGE();
3996 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3997 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3998 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3999 IEM_MC_ADVANCE_RIP();
4000 IEM_MC_END();
4001 }
4002 else
4003 {
4004 /*
4005 * Register, memory.
4006 */
4007 IEM_MC_BEGIN(3, 2);
4008 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4009 IEM_MC_LOCAL(RTUINT128U, uSrc);
4010 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4012
4013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4014 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4015 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4017 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4018
4019 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4020 IEM_MC_PREPARE_SSE_USAGE();
4021 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4022 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
4023
4024 IEM_MC_ADVANCE_RIP();
4025 IEM_MC_END();
4026 }
4027 return VINF_SUCCESS;
4028}
4029
4030
4031/** Opcode 0x0f 0x71 11/2. */
4032FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
4033
4034/** Opcode 0x66 0x0f 0x71 11/2. */
4035FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
4036
4037/** Opcode 0x0f 0x71 11/4. */
4038FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
4039
4040/** Opcode 0x66 0x0f 0x71 11/4. */
4041FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
4042
4043/** Opcode 0x0f 0x71 11/6. */
4044FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
4045
4046/** Opcode 0x66 0x0f 0x71 11/6. */
4047FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
4048
4049
4050/**
4051 * Group 12 jump table for register variant.
4052 */
4053IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
4054{
4055 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4056 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4057 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4058 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4059 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4060 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4061 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4062 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4063};
4064AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
4065
4066
4067/** Opcode 0x0f 0x71. */
4068FNIEMOP_DEF(iemOp_Grp12)
4069{
4070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4071 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4072 /* register, register */
4073 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4074 + pVCpu->iem.s.idxPrefix], bRm);
4075 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4076}
4077
4078
4079/** Opcode 0x0f 0x72 11/2. */
4080FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
4081
4082/** Opcode 0x66 0x0f 0x72 11/2. */
4083FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
4084
4085/** Opcode 0x0f 0x72 11/4. */
4086FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
4087
4088/** Opcode 0x66 0x0f 0x72 11/4. */
4089FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4090
4091/** Opcode 0x0f 0x72 11/6. */
4092FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4093
4094/** Opcode 0x66 0x0f 0x72 11/6. */
4095FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4096
4097
4098/**
4099 * Group 13 jump table for register variant.
4100 */
4101IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4102{
4103 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4104 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4105 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4106 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4107 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4108 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4109 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4110 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4111};
4112AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4113
4114/** Opcode 0x0f 0x72. */
4115FNIEMOP_DEF(iemOp_Grp13)
4116{
4117 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4118 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4119 /* register, register */
4120 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4121 + pVCpu->iem.s.idxPrefix], bRm);
4122 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4123}
4124
4125
4126/** Opcode 0x0f 0x73 11/2. */
4127FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4128
4129/** Opcode 0x66 0x0f 0x73 11/2. */
4130FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4131
4132/** Opcode 0x66 0x0f 0x73 11/3. */
4133FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4134
4135/** Opcode 0x0f 0x73 11/6. */
4136FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4137
4138/** Opcode 0x66 0x0f 0x73 11/6. */
4139FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4140
4141/** Opcode 0x66 0x0f 0x73 11/7. */
4142FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4143
4144/**
4145 * Group 14 jump table for register variant.
4146 */
4147IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4148{
4149 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4150 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4151 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4152 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4153 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4154 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4155 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4156 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4157};
4158AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4159
4160
4161/** Opcode 0x0f 0x73. */
4162FNIEMOP_DEF(iemOp_Grp14)
4163{
4164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4165 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4166 /* register, register */
4167 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4168 + pVCpu->iem.s.idxPrefix], bRm);
4169 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4170}
4171
4172
4173/**
4174 * Common worker for MMX instructions on the form:
4175 * pxxx mm1, mm2/mem64
4176 */
4177FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4178{
4179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4180 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4181 {
4182 /*
4183 * Register, register.
4184 */
4185 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4186 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4188 IEM_MC_BEGIN(2, 0);
4189 IEM_MC_ARG(uint64_t *, pDst, 0);
4190 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4191 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4192 IEM_MC_PREPARE_FPU_USAGE();
4193 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4194 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4195 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4196 IEM_MC_ADVANCE_RIP();
4197 IEM_MC_END();
4198 }
4199 else
4200 {
4201 /*
4202 * Register, memory.
4203 */
4204 IEM_MC_BEGIN(2, 2);
4205 IEM_MC_ARG(uint64_t *, pDst, 0);
4206 IEM_MC_LOCAL(uint64_t, uSrc);
4207 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4209
4210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4212 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4213 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4214
4215 IEM_MC_PREPARE_FPU_USAGE();
4216 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4217 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4218
4219 IEM_MC_ADVANCE_RIP();
4220 IEM_MC_END();
4221 }
4222 return VINF_SUCCESS;
4223}
4224
4225
4226/**
4227 * Common worker for SSE2 instructions on the forms:
4228 * pxxx xmm1, xmm2/mem128
4229 *
4230 * Proper alignment of the 128-bit operand is enforced.
4231 * Exceptions type 4. SSE2 cpuid checks.
4232 */
4233FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4234{
4235 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4236 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4237 {
4238 /*
4239 * Register, register.
4240 */
4241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4242 IEM_MC_BEGIN(2, 0);
4243 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4244 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4245 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4246 IEM_MC_PREPARE_SSE_USAGE();
4247 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4248 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4249 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4250 IEM_MC_ADVANCE_RIP();
4251 IEM_MC_END();
4252 }
4253 else
4254 {
4255 /*
4256 * Register, memory.
4257 */
4258 IEM_MC_BEGIN(2, 2);
4259 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4260 IEM_MC_LOCAL(RTUINT128U, uSrc);
4261 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4263
4264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4266 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4267 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4268
4269 IEM_MC_PREPARE_SSE_USAGE();
4270 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4271 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4272
4273 IEM_MC_ADVANCE_RIP();
4274 IEM_MC_END();
4275 }
4276 return VINF_SUCCESS;
4277}
4278
4279
4280/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4281FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4282{
4283 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4284 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4285}
4286
4287/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4288FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4289{
4290 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4291 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4292}
4293
4294/* Opcode 0xf3 0x0f 0x74 - invalid */
4295/* Opcode 0xf2 0x0f 0x74 - invalid */
4296
4297
4298/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4299FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4300{
4301 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4302 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4303}
4304
4305/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4306FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4307{
4308 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4309 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4310}
4311
4312/* Opcode 0xf3 0x0f 0x75 - invalid */
4313/* Opcode 0xf2 0x0f 0x75 - invalid */
4314
4315
4316/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4317FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4318{
4319 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4320 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4321}
4322
4323/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4324FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4325{
4326 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4327 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4328}
4329
4330/* Opcode 0xf3 0x0f 0x76 - invalid */
4331/* Opcode 0xf2 0x0f 0x76 - invalid */
4332
4333
4334/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4335FNIEMOP_STUB(iemOp_emms);
4336/* Opcode 0x66 0x0f 0x77 - invalid */
4337/* Opcode 0xf3 0x0f 0x77 - invalid */
4338/* Opcode 0xf2 0x0f 0x77 - invalid */
4339
4340/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4341FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4342/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4343FNIEMOP_STUB(iemOp_AmdGrp17);
4344/* Opcode 0xf3 0x0f 0x78 - invalid */
4345/* Opcode 0xf2 0x0f 0x78 - invalid */
4346
4347/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4348FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4349/* Opcode 0x66 0x0f 0x79 - invalid */
4350/* Opcode 0xf3 0x0f 0x79 - invalid */
4351/* Opcode 0xf2 0x0f 0x79 - invalid */
4352
4353/* Opcode 0x0f 0x7a - invalid */
4354/* Opcode 0x66 0x0f 0x7a - invalid */
4355/* Opcode 0xf3 0x0f 0x7a - invalid */
4356/* Opcode 0xf2 0x0f 0x7a - invalid */
4357
4358/* Opcode 0x0f 0x7b - invalid */
4359/* Opcode 0x66 0x0f 0x7b - invalid */
4360/* Opcode 0xf3 0x0f 0x7b - invalid */
4361/* Opcode 0xf2 0x0f 0x7b - invalid */
4362
4363/* Opcode 0x0f 0x7c - invalid */
4364/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4365FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4366/* Opcode 0xf3 0x0f 0x7c - invalid */
4367/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4368FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4369
4370/* Opcode 0x0f 0x7d - invalid */
4371/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4372FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4373/* Opcode 0xf3 0x0f 0x7d - invalid */
4374/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4375FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4376
4377
4378/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4379FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4380{
4381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4382 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4383 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
4384 else
4385 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
4386 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4387 {
4388 /* greg, MMX */
4389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4390 IEM_MC_BEGIN(0, 1);
4391 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4392 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4393 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4394 {
4395 IEM_MC_LOCAL(uint64_t, u64Tmp);
4396 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4397 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4398 }
4399 else
4400 {
4401 IEM_MC_LOCAL(uint32_t, u32Tmp);
4402 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4403 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4404 }
4405 IEM_MC_ADVANCE_RIP();
4406 IEM_MC_END();
4407 }
4408 else
4409 {
4410 /* [mem], MMX */
4411 IEM_MC_BEGIN(0, 2);
4412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4413 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4416 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4417 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4418 {
4419 IEM_MC_LOCAL(uint64_t, u64Tmp);
4420 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4421 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4422 }
4423 else
4424 {
4425 IEM_MC_LOCAL(uint32_t, u32Tmp);
4426 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4427 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4428 }
4429 IEM_MC_ADVANCE_RIP();
4430 IEM_MC_END();
4431 }
4432 return VINF_SUCCESS;
4433}
4434
4435/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
4436FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4437{
4438 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4439 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4440 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
4441 else
4442 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
4443 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4444 {
4445 /* greg, XMM */
4446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4447 IEM_MC_BEGIN(0, 1);
4448 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4449 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4450 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4451 {
4452 IEM_MC_LOCAL(uint64_t, u64Tmp);
4453 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4454 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4455 }
4456 else
4457 {
4458 IEM_MC_LOCAL(uint32_t, u32Tmp);
4459 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4460 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4461 }
4462 IEM_MC_ADVANCE_RIP();
4463 IEM_MC_END();
4464 }
4465 else
4466 {
4467 /* [mem], XMM */
4468 IEM_MC_BEGIN(0, 2);
4469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4470 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4473 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4474 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4475 {
4476 IEM_MC_LOCAL(uint64_t, u64Tmp);
4477 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4478 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4479 }
4480 else
4481 {
4482 IEM_MC_LOCAL(uint32_t, u32Tmp);
4483 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4484 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4485 }
4486 IEM_MC_ADVANCE_RIP();
4487 IEM_MC_END();
4488 }
4489 return VINF_SUCCESS;
4490}
4491
4492
4493/**
4494 * @opcode 0x7e
4495 * @opcodesub !11 mr/reg
4496 * @oppfx 0xf3
4497 * @opcpuid sse2
4498 * @opgroup og_sse2_pcksclr_datamove
4499 * @opxcpttype 5
4500 * @optest op1=1 op2=2 -> op1=2
4501 * @optest op1=0 op2=-42 -> op1=-42
4502 */
4503FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4504{
4505 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4507 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4508 {
4509 /*
4510 * Register, register.
4511 */
4512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4513 IEM_MC_BEGIN(0, 2);
4514 IEM_MC_LOCAL(uint64_t, uSrc);
4515
4516 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4517 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4518
4519 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4520 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4521
4522 IEM_MC_ADVANCE_RIP();
4523 IEM_MC_END();
4524 }
4525 else
4526 {
4527 /*
4528 * Memory, register.
4529 */
4530 IEM_MC_BEGIN(0, 2);
4531 IEM_MC_LOCAL(uint64_t, uSrc);
4532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4533
4534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4536 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4537 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4538
4539 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4540 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4541
4542 IEM_MC_ADVANCE_RIP();
4543 IEM_MC_END();
4544 }
4545 return VINF_SUCCESS;
4546}
4547
4548/* Opcode 0xf2 0x0f 0x7e - invalid */
4549
4550
4551/** Opcode 0x0f 0x7f - movq Qq, Pq */
4552FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4553{
4554 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4555 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4556 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4557 {
4558 /*
4559 * Register, register.
4560 */
4561 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4562 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4564 IEM_MC_BEGIN(0, 1);
4565 IEM_MC_LOCAL(uint64_t, u64Tmp);
4566 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4567 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4568 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4569 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4570 IEM_MC_ADVANCE_RIP();
4571 IEM_MC_END();
4572 }
4573 else
4574 {
4575 /*
4576 * Register, memory.
4577 */
4578 IEM_MC_BEGIN(0, 2);
4579 IEM_MC_LOCAL(uint64_t, u64Tmp);
4580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4581
4582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4584 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4585 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4586
4587 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4588 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4589
4590 IEM_MC_ADVANCE_RIP();
4591 IEM_MC_END();
4592 }
4593 return VINF_SUCCESS;
4594}
4595
4596/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4597FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4598{
4599 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4600 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4601 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4602 {
4603 /*
4604 * Register, register.
4605 */
4606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4607 IEM_MC_BEGIN(0, 0);
4608 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4609 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4610 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4611 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4612 IEM_MC_ADVANCE_RIP();
4613 IEM_MC_END();
4614 }
4615 else
4616 {
4617 /*
4618 * Register, memory.
4619 */
4620 IEM_MC_BEGIN(0, 2);
4621 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4623
4624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4626 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4627 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4628
4629 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4630 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4631
4632 IEM_MC_ADVANCE_RIP();
4633 IEM_MC_END();
4634 }
4635 return VINF_SUCCESS;
4636}
4637
4638/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4639FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4640{
4641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4642 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4643 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4644 {
4645 /*
4646 * Register, register.
4647 */
4648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4649 IEM_MC_BEGIN(0, 0);
4650 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4651 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4652 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4653 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4654 IEM_MC_ADVANCE_RIP();
4655 IEM_MC_END();
4656 }
4657 else
4658 {
4659 /*
4660 * Register, memory.
4661 */
4662 IEM_MC_BEGIN(0, 2);
4663 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4665
4666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4668 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4669 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4670
4671 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4672 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4673
4674 IEM_MC_ADVANCE_RIP();
4675 IEM_MC_END();
4676 }
4677 return VINF_SUCCESS;
4678}
4679
4680/* Opcode 0xf2 0x0f 0x7f - invalid */
4681
4682
4683
4684/** Opcode 0x0f 0x80. */
4685FNIEMOP_DEF(iemOp_jo_Jv)
4686{
4687 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4688 IEMOP_HLP_MIN_386();
4689 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4690 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4691 {
4692 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4694
4695 IEM_MC_BEGIN(0, 0);
4696 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4697 IEM_MC_REL_JMP_S16(i16Imm);
4698 } IEM_MC_ELSE() {
4699 IEM_MC_ADVANCE_RIP();
4700 } IEM_MC_ENDIF();
4701 IEM_MC_END();
4702 }
4703 else
4704 {
4705 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4707
4708 IEM_MC_BEGIN(0, 0);
4709 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4710 IEM_MC_REL_JMP_S32(i32Imm);
4711 } IEM_MC_ELSE() {
4712 IEM_MC_ADVANCE_RIP();
4713 } IEM_MC_ENDIF();
4714 IEM_MC_END();
4715 }
4716 return VINF_SUCCESS;
4717}
4718
4719
4720/** Opcode 0x0f 0x81. */
4721FNIEMOP_DEF(iemOp_jno_Jv)
4722{
4723 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4724 IEMOP_HLP_MIN_386();
4725 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4726 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4727 {
4728 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4730
4731 IEM_MC_BEGIN(0, 0);
4732 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4733 IEM_MC_ADVANCE_RIP();
4734 } IEM_MC_ELSE() {
4735 IEM_MC_REL_JMP_S16(i16Imm);
4736 } IEM_MC_ENDIF();
4737 IEM_MC_END();
4738 }
4739 else
4740 {
4741 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4743
4744 IEM_MC_BEGIN(0, 0);
4745 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4746 IEM_MC_ADVANCE_RIP();
4747 } IEM_MC_ELSE() {
4748 IEM_MC_REL_JMP_S32(i32Imm);
4749 } IEM_MC_ENDIF();
4750 IEM_MC_END();
4751 }
4752 return VINF_SUCCESS;
4753}
4754
4755
4756/** Opcode 0x0f 0x82. */
4757FNIEMOP_DEF(iemOp_jc_Jv)
4758{
4759 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4760 IEMOP_HLP_MIN_386();
4761 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4762 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4763 {
4764 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4766
4767 IEM_MC_BEGIN(0, 0);
4768 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4769 IEM_MC_REL_JMP_S16(i16Imm);
4770 } IEM_MC_ELSE() {
4771 IEM_MC_ADVANCE_RIP();
4772 } IEM_MC_ENDIF();
4773 IEM_MC_END();
4774 }
4775 else
4776 {
4777 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4779
4780 IEM_MC_BEGIN(0, 0);
4781 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4782 IEM_MC_REL_JMP_S32(i32Imm);
4783 } IEM_MC_ELSE() {
4784 IEM_MC_ADVANCE_RIP();
4785 } IEM_MC_ENDIF();
4786 IEM_MC_END();
4787 }
4788 return VINF_SUCCESS;
4789}
4790
4791
4792/** Opcode 0x0f 0x83. */
4793FNIEMOP_DEF(iemOp_jnc_Jv)
4794{
4795 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4796 IEMOP_HLP_MIN_386();
4797 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4798 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4799 {
4800 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4802
4803 IEM_MC_BEGIN(0, 0);
4804 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4805 IEM_MC_ADVANCE_RIP();
4806 } IEM_MC_ELSE() {
4807 IEM_MC_REL_JMP_S16(i16Imm);
4808 } IEM_MC_ENDIF();
4809 IEM_MC_END();
4810 }
4811 else
4812 {
4813 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4815
4816 IEM_MC_BEGIN(0, 0);
4817 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4818 IEM_MC_ADVANCE_RIP();
4819 } IEM_MC_ELSE() {
4820 IEM_MC_REL_JMP_S32(i32Imm);
4821 } IEM_MC_ENDIF();
4822 IEM_MC_END();
4823 }
4824 return VINF_SUCCESS;
4825}
4826
4827
4828/** Opcode 0x0f 0x84. */
4829FNIEMOP_DEF(iemOp_je_Jv)
4830{
4831 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4832 IEMOP_HLP_MIN_386();
4833 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4834 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4835 {
4836 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4838
4839 IEM_MC_BEGIN(0, 0);
4840 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4841 IEM_MC_REL_JMP_S16(i16Imm);
4842 } IEM_MC_ELSE() {
4843 IEM_MC_ADVANCE_RIP();
4844 } IEM_MC_ENDIF();
4845 IEM_MC_END();
4846 }
4847 else
4848 {
4849 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4851
4852 IEM_MC_BEGIN(0, 0);
4853 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4854 IEM_MC_REL_JMP_S32(i32Imm);
4855 } IEM_MC_ELSE() {
4856 IEM_MC_ADVANCE_RIP();
4857 } IEM_MC_ENDIF();
4858 IEM_MC_END();
4859 }
4860 return VINF_SUCCESS;
4861}
4862
4863
4864/** Opcode 0x0f 0x85. */
4865FNIEMOP_DEF(iemOp_jne_Jv)
4866{
4867 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4868 IEMOP_HLP_MIN_386();
4869 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4870 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4871 {
4872 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4874
4875 IEM_MC_BEGIN(0, 0);
4876 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4877 IEM_MC_ADVANCE_RIP();
4878 } IEM_MC_ELSE() {
4879 IEM_MC_REL_JMP_S16(i16Imm);
4880 } IEM_MC_ENDIF();
4881 IEM_MC_END();
4882 }
4883 else
4884 {
4885 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4887
4888 IEM_MC_BEGIN(0, 0);
4889 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4890 IEM_MC_ADVANCE_RIP();
4891 } IEM_MC_ELSE() {
4892 IEM_MC_REL_JMP_S32(i32Imm);
4893 } IEM_MC_ENDIF();
4894 IEM_MC_END();
4895 }
4896 return VINF_SUCCESS;
4897}
4898
4899
4900/** Opcode 0x0f 0x86. */
4901FNIEMOP_DEF(iemOp_jbe_Jv)
4902{
4903 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4904 IEMOP_HLP_MIN_386();
4905 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4906 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4907 {
4908 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4910
4911 IEM_MC_BEGIN(0, 0);
4912 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4913 IEM_MC_REL_JMP_S16(i16Imm);
4914 } IEM_MC_ELSE() {
4915 IEM_MC_ADVANCE_RIP();
4916 } IEM_MC_ENDIF();
4917 IEM_MC_END();
4918 }
4919 else
4920 {
4921 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4923
4924 IEM_MC_BEGIN(0, 0);
4925 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4926 IEM_MC_REL_JMP_S32(i32Imm);
4927 } IEM_MC_ELSE() {
4928 IEM_MC_ADVANCE_RIP();
4929 } IEM_MC_ENDIF();
4930 IEM_MC_END();
4931 }
4932 return VINF_SUCCESS;
4933}
4934
4935
4936/** Opcode 0x0f 0x87. */
4937FNIEMOP_DEF(iemOp_jnbe_Jv)
4938{
4939 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4940 IEMOP_HLP_MIN_386();
4941 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4942 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4943 {
4944 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4946
4947 IEM_MC_BEGIN(0, 0);
4948 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4949 IEM_MC_ADVANCE_RIP();
4950 } IEM_MC_ELSE() {
4951 IEM_MC_REL_JMP_S16(i16Imm);
4952 } IEM_MC_ENDIF();
4953 IEM_MC_END();
4954 }
4955 else
4956 {
4957 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4959
4960 IEM_MC_BEGIN(0, 0);
4961 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4962 IEM_MC_ADVANCE_RIP();
4963 } IEM_MC_ELSE() {
4964 IEM_MC_REL_JMP_S32(i32Imm);
4965 } IEM_MC_ENDIF();
4966 IEM_MC_END();
4967 }
4968 return VINF_SUCCESS;
4969}
4970
4971
4972/** Opcode 0x0f 0x88. */
4973FNIEMOP_DEF(iemOp_js_Jv)
4974{
4975 IEMOP_MNEMONIC(js_Jv, "js Jv");
4976 IEMOP_HLP_MIN_386();
4977 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4978 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4979 {
4980 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4982
4983 IEM_MC_BEGIN(0, 0);
4984 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4985 IEM_MC_REL_JMP_S16(i16Imm);
4986 } IEM_MC_ELSE() {
4987 IEM_MC_ADVANCE_RIP();
4988 } IEM_MC_ENDIF();
4989 IEM_MC_END();
4990 }
4991 else
4992 {
4993 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4995
4996 IEM_MC_BEGIN(0, 0);
4997 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4998 IEM_MC_REL_JMP_S32(i32Imm);
4999 } IEM_MC_ELSE() {
5000 IEM_MC_ADVANCE_RIP();
5001 } IEM_MC_ENDIF();
5002 IEM_MC_END();
5003 }
5004 return VINF_SUCCESS;
5005}
5006
5007
5008/** Opcode 0x0f 0x89. */
5009FNIEMOP_DEF(iemOp_jns_Jv)
5010{
5011 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5012 IEMOP_HLP_MIN_386();
5013 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5014 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5015 {
5016 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5018
5019 IEM_MC_BEGIN(0, 0);
5020 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5021 IEM_MC_ADVANCE_RIP();
5022 } IEM_MC_ELSE() {
5023 IEM_MC_REL_JMP_S16(i16Imm);
5024 } IEM_MC_ENDIF();
5025 IEM_MC_END();
5026 }
5027 else
5028 {
5029 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5031
5032 IEM_MC_BEGIN(0, 0);
5033 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5034 IEM_MC_ADVANCE_RIP();
5035 } IEM_MC_ELSE() {
5036 IEM_MC_REL_JMP_S32(i32Imm);
5037 } IEM_MC_ENDIF();
5038 IEM_MC_END();
5039 }
5040 return VINF_SUCCESS;
5041}
5042
5043
5044/** Opcode 0x0f 0x8a. */
5045FNIEMOP_DEF(iemOp_jp_Jv)
5046{
5047 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5048 IEMOP_HLP_MIN_386();
5049 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5050 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5051 {
5052 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5054
5055 IEM_MC_BEGIN(0, 0);
5056 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5057 IEM_MC_REL_JMP_S16(i16Imm);
5058 } IEM_MC_ELSE() {
5059 IEM_MC_ADVANCE_RIP();
5060 } IEM_MC_ENDIF();
5061 IEM_MC_END();
5062 }
5063 else
5064 {
5065 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5067
5068 IEM_MC_BEGIN(0, 0);
5069 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5070 IEM_MC_REL_JMP_S32(i32Imm);
5071 } IEM_MC_ELSE() {
5072 IEM_MC_ADVANCE_RIP();
5073 } IEM_MC_ENDIF();
5074 IEM_MC_END();
5075 }
5076 return VINF_SUCCESS;
5077}
5078
5079
5080/** Opcode 0x0f 0x8b. */
5081FNIEMOP_DEF(iemOp_jnp_Jv)
5082{
5083 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5084 IEMOP_HLP_MIN_386();
5085 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5086 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5087 {
5088 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5090
5091 IEM_MC_BEGIN(0, 0);
5092 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5093 IEM_MC_ADVANCE_RIP();
5094 } IEM_MC_ELSE() {
5095 IEM_MC_REL_JMP_S16(i16Imm);
5096 } IEM_MC_ENDIF();
5097 IEM_MC_END();
5098 }
5099 else
5100 {
5101 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5103
5104 IEM_MC_BEGIN(0, 0);
5105 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5106 IEM_MC_ADVANCE_RIP();
5107 } IEM_MC_ELSE() {
5108 IEM_MC_REL_JMP_S32(i32Imm);
5109 } IEM_MC_ENDIF();
5110 IEM_MC_END();
5111 }
5112 return VINF_SUCCESS;
5113}
5114
5115
5116/** Opcode 0x0f 0x8c. */
5117FNIEMOP_DEF(iemOp_jl_Jv)
5118{
5119 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5120 IEMOP_HLP_MIN_386();
5121 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5122 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5123 {
5124 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5126
5127 IEM_MC_BEGIN(0, 0);
5128 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5129 IEM_MC_REL_JMP_S16(i16Imm);
5130 } IEM_MC_ELSE() {
5131 IEM_MC_ADVANCE_RIP();
5132 } IEM_MC_ENDIF();
5133 IEM_MC_END();
5134 }
5135 else
5136 {
5137 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5139
5140 IEM_MC_BEGIN(0, 0);
5141 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5142 IEM_MC_REL_JMP_S32(i32Imm);
5143 } IEM_MC_ELSE() {
5144 IEM_MC_ADVANCE_RIP();
5145 } IEM_MC_ENDIF();
5146 IEM_MC_END();
5147 }
5148 return VINF_SUCCESS;
5149}
5150
5151
5152/** Opcode 0x0f 0x8d. */
5153FNIEMOP_DEF(iemOp_jnl_Jv)
5154{
5155 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5156 IEMOP_HLP_MIN_386();
5157 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5158 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5159 {
5160 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5162
5163 IEM_MC_BEGIN(0, 0);
5164 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5165 IEM_MC_ADVANCE_RIP();
5166 } IEM_MC_ELSE() {
5167 IEM_MC_REL_JMP_S16(i16Imm);
5168 } IEM_MC_ENDIF();
5169 IEM_MC_END();
5170 }
5171 else
5172 {
5173 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5175
5176 IEM_MC_BEGIN(0, 0);
5177 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5178 IEM_MC_ADVANCE_RIP();
5179 } IEM_MC_ELSE() {
5180 IEM_MC_REL_JMP_S32(i32Imm);
5181 } IEM_MC_ENDIF();
5182 IEM_MC_END();
5183 }
5184 return VINF_SUCCESS;
5185}
5186
5187
5188/** Opcode 0x0f 0x8e. */
5189FNIEMOP_DEF(iemOp_jle_Jv)
5190{
5191 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5192 IEMOP_HLP_MIN_386();
5193 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5194 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5195 {
5196 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5198
5199 IEM_MC_BEGIN(0, 0);
5200 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5201 IEM_MC_REL_JMP_S16(i16Imm);
5202 } IEM_MC_ELSE() {
5203 IEM_MC_ADVANCE_RIP();
5204 } IEM_MC_ENDIF();
5205 IEM_MC_END();
5206 }
5207 else
5208 {
5209 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5211
5212 IEM_MC_BEGIN(0, 0);
5213 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5214 IEM_MC_REL_JMP_S32(i32Imm);
5215 } IEM_MC_ELSE() {
5216 IEM_MC_ADVANCE_RIP();
5217 } IEM_MC_ENDIF();
5218 IEM_MC_END();
5219 }
5220 return VINF_SUCCESS;
5221}
5222
5223
5224/** Opcode 0x0f 0x8f. */
5225FNIEMOP_DEF(iemOp_jnle_Jv)
5226{
5227 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5228 IEMOP_HLP_MIN_386();
5229 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5230 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5231 {
5232 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5234
5235 IEM_MC_BEGIN(0, 0);
5236 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5237 IEM_MC_ADVANCE_RIP();
5238 } IEM_MC_ELSE() {
5239 IEM_MC_REL_JMP_S16(i16Imm);
5240 } IEM_MC_ENDIF();
5241 IEM_MC_END();
5242 }
5243 else
5244 {
5245 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5247
5248 IEM_MC_BEGIN(0, 0);
5249 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5250 IEM_MC_ADVANCE_RIP();
5251 } IEM_MC_ELSE() {
5252 IEM_MC_REL_JMP_S32(i32Imm);
5253 } IEM_MC_ENDIF();
5254 IEM_MC_END();
5255 }
5256 return VINF_SUCCESS;
5257}
5258
5259
5260/** Opcode 0x0f 0x90. */
5261FNIEMOP_DEF(iemOp_seto_Eb)
5262{
5263 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5264 IEMOP_HLP_MIN_386();
5265 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5266
5267 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5268 * any way. AMD says it's "unused", whatever that means. We're
5269 * ignoring for now. */
5270 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5271 {
5272 /* register target */
5273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5274 IEM_MC_BEGIN(0, 0);
5275 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5276 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5277 } IEM_MC_ELSE() {
5278 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5279 } IEM_MC_ENDIF();
5280 IEM_MC_ADVANCE_RIP();
5281 IEM_MC_END();
5282 }
5283 else
5284 {
5285 /* memory target */
5286 IEM_MC_BEGIN(0, 1);
5287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5290 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5291 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5292 } IEM_MC_ELSE() {
5293 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5294 } IEM_MC_ENDIF();
5295 IEM_MC_ADVANCE_RIP();
5296 IEM_MC_END();
5297 }
5298 return VINF_SUCCESS;
5299}
5300
5301
5302/** Opcode 0x0f 0x91. */
5303FNIEMOP_DEF(iemOp_setno_Eb)
5304{
5305 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5306 IEMOP_HLP_MIN_386();
5307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5308
5309 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5310 * any way. AMD says it's "unused", whatever that means. We're
5311 * ignoring for now. */
5312 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5313 {
5314 /* register target */
5315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5316 IEM_MC_BEGIN(0, 0);
5317 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5318 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5319 } IEM_MC_ELSE() {
5320 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5321 } IEM_MC_ENDIF();
5322 IEM_MC_ADVANCE_RIP();
5323 IEM_MC_END();
5324 }
5325 else
5326 {
5327 /* memory target */
5328 IEM_MC_BEGIN(0, 1);
5329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5332 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5333 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5334 } IEM_MC_ELSE() {
5335 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5336 } IEM_MC_ENDIF();
5337 IEM_MC_ADVANCE_RIP();
5338 IEM_MC_END();
5339 }
5340 return VINF_SUCCESS;
5341}
5342
5343
5344/** Opcode 0x0f 0x92. */
5345FNIEMOP_DEF(iemOp_setc_Eb)
5346{
5347 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5348 IEMOP_HLP_MIN_386();
5349 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5350
5351 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5352 * any way. AMD says it's "unused", whatever that means. We're
5353 * ignoring for now. */
5354 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5355 {
5356 /* register target */
5357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5358 IEM_MC_BEGIN(0, 0);
5359 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5360 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5361 } IEM_MC_ELSE() {
5362 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5363 } IEM_MC_ENDIF();
5364 IEM_MC_ADVANCE_RIP();
5365 IEM_MC_END();
5366 }
5367 else
5368 {
5369 /* memory target */
5370 IEM_MC_BEGIN(0, 1);
5371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5374 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5375 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5376 } IEM_MC_ELSE() {
5377 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5378 } IEM_MC_ENDIF();
5379 IEM_MC_ADVANCE_RIP();
5380 IEM_MC_END();
5381 }
5382 return VINF_SUCCESS;
5383}
5384
5385
5386/** Opcode 0x0f 0x93. */
5387FNIEMOP_DEF(iemOp_setnc_Eb)
5388{
5389 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5390 IEMOP_HLP_MIN_386();
5391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5392
5393 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5394 * any way. AMD says it's "unused", whatever that means. We're
5395 * ignoring for now. */
5396 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5397 {
5398 /* register target */
5399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5400 IEM_MC_BEGIN(0, 0);
5401 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5402 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5403 } IEM_MC_ELSE() {
5404 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5405 } IEM_MC_ENDIF();
5406 IEM_MC_ADVANCE_RIP();
5407 IEM_MC_END();
5408 }
5409 else
5410 {
5411 /* memory target */
5412 IEM_MC_BEGIN(0, 1);
5413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5416 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5417 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5418 } IEM_MC_ELSE() {
5419 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5420 } IEM_MC_ENDIF();
5421 IEM_MC_ADVANCE_RIP();
5422 IEM_MC_END();
5423 }
5424 return VINF_SUCCESS;
5425}
5426
5427
5428/** Opcode 0x0f 0x94. */
5429FNIEMOP_DEF(iemOp_sete_Eb)
5430{
5431 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5432 IEMOP_HLP_MIN_386();
5433 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5434
5435 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5436 * any way. AMD says it's "unused", whatever that means. We're
5437 * ignoring for now. */
5438 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5439 {
5440 /* register target */
5441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5442 IEM_MC_BEGIN(0, 0);
5443 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5444 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5445 } IEM_MC_ELSE() {
5446 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5447 } IEM_MC_ENDIF();
5448 IEM_MC_ADVANCE_RIP();
5449 IEM_MC_END();
5450 }
5451 else
5452 {
5453 /* memory target */
5454 IEM_MC_BEGIN(0, 1);
5455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5458 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5459 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5460 } IEM_MC_ELSE() {
5461 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5462 } IEM_MC_ENDIF();
5463 IEM_MC_ADVANCE_RIP();
5464 IEM_MC_END();
5465 }
5466 return VINF_SUCCESS;
5467}
5468
5469
5470/** Opcode 0x0f 0x95. */
5471FNIEMOP_DEF(iemOp_setne_Eb)
5472{
5473 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5474 IEMOP_HLP_MIN_386();
5475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5476
5477 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5478 * any way. AMD says it's "unused", whatever that means. We're
5479 * ignoring for now. */
5480 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5481 {
5482 /* register target */
5483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5484 IEM_MC_BEGIN(0, 0);
5485 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5486 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5487 } IEM_MC_ELSE() {
5488 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5489 } IEM_MC_ENDIF();
5490 IEM_MC_ADVANCE_RIP();
5491 IEM_MC_END();
5492 }
5493 else
5494 {
5495 /* memory target */
5496 IEM_MC_BEGIN(0, 1);
5497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5500 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5501 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5502 } IEM_MC_ELSE() {
5503 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5504 } IEM_MC_ENDIF();
5505 IEM_MC_ADVANCE_RIP();
5506 IEM_MC_END();
5507 }
5508 return VINF_SUCCESS;
5509}
5510
5511
5512/** Opcode 0x0f 0x96. */
5513FNIEMOP_DEF(iemOp_setbe_Eb)
5514{
5515 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5516 IEMOP_HLP_MIN_386();
5517 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5518
5519 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5520 * any way. AMD says it's "unused", whatever that means. We're
5521 * ignoring for now. */
5522 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5523 {
5524 /* register target */
5525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5526 IEM_MC_BEGIN(0, 0);
5527 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5528 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5529 } IEM_MC_ELSE() {
5530 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5531 } IEM_MC_ENDIF();
5532 IEM_MC_ADVANCE_RIP();
5533 IEM_MC_END();
5534 }
5535 else
5536 {
5537 /* memory target */
5538 IEM_MC_BEGIN(0, 1);
5539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5542 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5543 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5544 } IEM_MC_ELSE() {
5545 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5546 } IEM_MC_ENDIF();
5547 IEM_MC_ADVANCE_RIP();
5548 IEM_MC_END();
5549 }
5550 return VINF_SUCCESS;
5551}
5552
5553
5554/** Opcode 0x0f 0x97. */
5555FNIEMOP_DEF(iemOp_setnbe_Eb)
5556{
5557 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5558 IEMOP_HLP_MIN_386();
5559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5560
5561 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5562 * any way. AMD says it's "unused", whatever that means. We're
5563 * ignoring for now. */
5564 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5565 {
5566 /* register target */
5567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5568 IEM_MC_BEGIN(0, 0);
5569 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5570 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5571 } IEM_MC_ELSE() {
5572 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5573 } IEM_MC_ENDIF();
5574 IEM_MC_ADVANCE_RIP();
5575 IEM_MC_END();
5576 }
5577 else
5578 {
5579 /* memory target */
5580 IEM_MC_BEGIN(0, 1);
5581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5584 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5585 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5586 } IEM_MC_ELSE() {
5587 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5588 } IEM_MC_ENDIF();
5589 IEM_MC_ADVANCE_RIP();
5590 IEM_MC_END();
5591 }
5592 return VINF_SUCCESS;
5593}
5594
5595
5596/** Opcode 0x0f 0x98. */
5597FNIEMOP_DEF(iemOp_sets_Eb)
5598{
5599 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5600 IEMOP_HLP_MIN_386();
5601 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5602
5603 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5604 * any way. AMD says it's "unused", whatever that means. We're
5605 * ignoring for now. */
5606 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5607 {
5608 /* register target */
5609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5610 IEM_MC_BEGIN(0, 0);
5611 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5612 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5613 } IEM_MC_ELSE() {
5614 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5615 } IEM_MC_ENDIF();
5616 IEM_MC_ADVANCE_RIP();
5617 IEM_MC_END();
5618 }
5619 else
5620 {
5621 /* memory target */
5622 IEM_MC_BEGIN(0, 1);
5623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5626 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5627 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5628 } IEM_MC_ELSE() {
5629 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5630 } IEM_MC_ENDIF();
5631 IEM_MC_ADVANCE_RIP();
5632 IEM_MC_END();
5633 }
5634 return VINF_SUCCESS;
5635}
5636
5637
5638/** Opcode 0x0f 0x99. */
5639FNIEMOP_DEF(iemOp_setns_Eb)
5640{
5641 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5642 IEMOP_HLP_MIN_386();
5643 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5644
5645 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5646 * any way. AMD says it's "unused", whatever that means. We're
5647 * ignoring for now. */
5648 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5649 {
5650 /* register target */
5651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5652 IEM_MC_BEGIN(0, 0);
5653 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5654 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5655 } IEM_MC_ELSE() {
5656 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5657 } IEM_MC_ENDIF();
5658 IEM_MC_ADVANCE_RIP();
5659 IEM_MC_END();
5660 }
5661 else
5662 {
5663 /* memory target */
5664 IEM_MC_BEGIN(0, 1);
5665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5668 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5669 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5670 } IEM_MC_ELSE() {
5671 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5672 } IEM_MC_ENDIF();
5673 IEM_MC_ADVANCE_RIP();
5674 IEM_MC_END();
5675 }
5676 return VINF_SUCCESS;
5677}
5678
5679
5680/** Opcode 0x0f 0x9a. */
5681FNIEMOP_DEF(iemOp_setp_Eb)
5682{
5683 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5684 IEMOP_HLP_MIN_386();
5685 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5686
5687 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5688 * any way. AMD says it's "unused", whatever that means. We're
5689 * ignoring for now. */
5690 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5691 {
5692 /* register target */
5693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5694 IEM_MC_BEGIN(0, 0);
5695 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5696 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5697 } IEM_MC_ELSE() {
5698 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5699 } IEM_MC_ENDIF();
5700 IEM_MC_ADVANCE_RIP();
5701 IEM_MC_END();
5702 }
5703 else
5704 {
5705 /* memory target */
5706 IEM_MC_BEGIN(0, 1);
5707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5710 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5711 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5712 } IEM_MC_ELSE() {
5713 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5714 } IEM_MC_ENDIF();
5715 IEM_MC_ADVANCE_RIP();
5716 IEM_MC_END();
5717 }
5718 return VINF_SUCCESS;
5719}
5720
5721
5722/** Opcode 0x0f 0x9b. */
5723FNIEMOP_DEF(iemOp_setnp_Eb)
5724{
5725 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5726 IEMOP_HLP_MIN_386();
5727 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5728
5729 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5730 * any way. AMD says it's "unused", whatever that means. We're
5731 * ignoring for now. */
5732 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5733 {
5734 /* register target */
5735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5736 IEM_MC_BEGIN(0, 0);
5737 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5738 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5739 } IEM_MC_ELSE() {
5740 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5741 } IEM_MC_ENDIF();
5742 IEM_MC_ADVANCE_RIP();
5743 IEM_MC_END();
5744 }
5745 else
5746 {
5747 /* memory target */
5748 IEM_MC_BEGIN(0, 1);
5749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5752 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5753 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5754 } IEM_MC_ELSE() {
5755 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5756 } IEM_MC_ENDIF();
5757 IEM_MC_ADVANCE_RIP();
5758 IEM_MC_END();
5759 }
5760 return VINF_SUCCESS;
5761}
5762
5763
5764/** Opcode 0x0f 0x9c. */
5765FNIEMOP_DEF(iemOp_setl_Eb)
5766{
5767 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5768 IEMOP_HLP_MIN_386();
5769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5770
5771 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5772 * any way. AMD says it's "unused", whatever that means. We're
5773 * ignoring for now. */
5774 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5775 {
5776 /* register target */
5777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5778 IEM_MC_BEGIN(0, 0);
5779 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5780 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5781 } IEM_MC_ELSE() {
5782 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5783 } IEM_MC_ENDIF();
5784 IEM_MC_ADVANCE_RIP();
5785 IEM_MC_END();
5786 }
5787 else
5788 {
5789 /* memory target */
5790 IEM_MC_BEGIN(0, 1);
5791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5794 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5795 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5796 } IEM_MC_ELSE() {
5797 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5798 } IEM_MC_ENDIF();
5799 IEM_MC_ADVANCE_RIP();
5800 IEM_MC_END();
5801 }
5802 return VINF_SUCCESS;
5803}
5804
5805
5806/** Opcode 0x0f 0x9d. */
5807FNIEMOP_DEF(iemOp_setnl_Eb)
5808{
5809 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5810 IEMOP_HLP_MIN_386();
5811 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5812
5813 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5814 * any way. AMD says it's "unused", whatever that means. We're
5815 * ignoring for now. */
5816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5817 {
5818 /* register target */
5819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5820 IEM_MC_BEGIN(0, 0);
5821 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5822 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5823 } IEM_MC_ELSE() {
5824 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5825 } IEM_MC_ENDIF();
5826 IEM_MC_ADVANCE_RIP();
5827 IEM_MC_END();
5828 }
5829 else
5830 {
5831 /* memory target */
5832 IEM_MC_BEGIN(0, 1);
5833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5836 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5837 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5838 } IEM_MC_ELSE() {
5839 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5840 } IEM_MC_ENDIF();
5841 IEM_MC_ADVANCE_RIP();
5842 IEM_MC_END();
5843 }
5844 return VINF_SUCCESS;
5845}
5846
5847
5848/** Opcode 0x0f 0x9e. */
5849FNIEMOP_DEF(iemOp_setle_Eb)
5850{
5851 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5852 IEMOP_HLP_MIN_386();
5853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5854
5855 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5856 * any way. AMD says it's "unused", whatever that means. We're
5857 * ignoring for now. */
5858 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5859 {
5860 /* register target */
5861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5862 IEM_MC_BEGIN(0, 0);
5863 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5864 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5865 } IEM_MC_ELSE() {
5866 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5867 } IEM_MC_ENDIF();
5868 IEM_MC_ADVANCE_RIP();
5869 IEM_MC_END();
5870 }
5871 else
5872 {
5873 /* memory target */
5874 IEM_MC_BEGIN(0, 1);
5875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5878 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5879 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5880 } IEM_MC_ELSE() {
5881 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5882 } IEM_MC_ENDIF();
5883 IEM_MC_ADVANCE_RIP();
5884 IEM_MC_END();
5885 }
5886 return VINF_SUCCESS;
5887}
5888
5889
5890/** Opcode 0x0f 0x9f. */
5891FNIEMOP_DEF(iemOp_setnle_Eb)
5892{
5893 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5894 IEMOP_HLP_MIN_386();
5895 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5896
5897 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5898 * any way. AMD says it's "unused", whatever that means. We're
5899 * ignoring for now. */
5900 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5901 {
5902 /* register target */
5903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5904 IEM_MC_BEGIN(0, 0);
5905 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5906 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5907 } IEM_MC_ELSE() {
5908 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5909 } IEM_MC_ENDIF();
5910 IEM_MC_ADVANCE_RIP();
5911 IEM_MC_END();
5912 }
5913 else
5914 {
5915 /* memory target */
5916 IEM_MC_BEGIN(0, 1);
5917 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5920 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5921 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5922 } IEM_MC_ELSE() {
5923 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5924 } IEM_MC_ENDIF();
5925 IEM_MC_ADVANCE_RIP();
5926 IEM_MC_END();
5927 }
5928 return VINF_SUCCESS;
5929}
5930
5931
5932/**
5933 * Common 'push segment-register' helper.
5934 */
5935FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5936{
5937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5938 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5939 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5940
5941 switch (pVCpu->iem.s.enmEffOpSize)
5942 {
5943 case IEMMODE_16BIT:
5944 IEM_MC_BEGIN(0, 1);
5945 IEM_MC_LOCAL(uint16_t, u16Value);
5946 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5947 IEM_MC_PUSH_U16(u16Value);
5948 IEM_MC_ADVANCE_RIP();
5949 IEM_MC_END();
5950 break;
5951
5952 case IEMMODE_32BIT:
5953 IEM_MC_BEGIN(0, 1);
5954 IEM_MC_LOCAL(uint32_t, u32Value);
5955 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5956 IEM_MC_PUSH_U32_SREG(u32Value);
5957 IEM_MC_ADVANCE_RIP();
5958 IEM_MC_END();
5959 break;
5960
5961 case IEMMODE_64BIT:
5962 IEM_MC_BEGIN(0, 1);
5963 IEM_MC_LOCAL(uint64_t, u64Value);
5964 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5965 IEM_MC_PUSH_U64(u64Value);
5966 IEM_MC_ADVANCE_RIP();
5967 IEM_MC_END();
5968 break;
5969 }
5970
5971 return VINF_SUCCESS;
5972}
5973
5974
5975/** Opcode 0x0f 0xa0. */
5976FNIEMOP_DEF(iemOp_push_fs)
5977{
5978 IEMOP_MNEMONIC(push_fs, "push fs");
5979 IEMOP_HLP_MIN_386();
5980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5981 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5982}
5983
5984
5985/** Opcode 0x0f 0xa1. */
5986FNIEMOP_DEF(iemOp_pop_fs)
5987{
5988 IEMOP_MNEMONIC(pop_fs, "pop fs");
5989 IEMOP_HLP_MIN_386();
5990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5991 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5992}
5993
5994
5995/** Opcode 0x0f 0xa2. */
5996FNIEMOP_DEF(iemOp_cpuid)
5997{
5998 IEMOP_MNEMONIC(cpuid, "cpuid");
5999 IEMOP_HLP_MIN_486(); /* not all 486es. */
6000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6001 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6002}
6003
6004
6005/**
6006 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6007 * iemOp_bts_Ev_Gv.
6008 */
6009FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6010{
6011 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6012 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6013
6014 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6015 {
6016 /* register destination. */
6017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6018 switch (pVCpu->iem.s.enmEffOpSize)
6019 {
6020 case IEMMODE_16BIT:
6021 IEM_MC_BEGIN(3, 0);
6022 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6023 IEM_MC_ARG(uint16_t, u16Src, 1);
6024 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6025
6026 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6027 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6028 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6029 IEM_MC_REF_EFLAGS(pEFlags);
6030 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6031
6032 IEM_MC_ADVANCE_RIP();
6033 IEM_MC_END();
6034 return VINF_SUCCESS;
6035
6036 case IEMMODE_32BIT:
6037 IEM_MC_BEGIN(3, 0);
6038 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6039 IEM_MC_ARG(uint32_t, u32Src, 1);
6040 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6041
6042 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6043 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6044 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6045 IEM_MC_REF_EFLAGS(pEFlags);
6046 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6047
6048 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6049 IEM_MC_ADVANCE_RIP();
6050 IEM_MC_END();
6051 return VINF_SUCCESS;
6052
6053 case IEMMODE_64BIT:
6054 IEM_MC_BEGIN(3, 0);
6055 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6056 IEM_MC_ARG(uint64_t, u64Src, 1);
6057 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6058
6059 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6060 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6061 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6062 IEM_MC_REF_EFLAGS(pEFlags);
6063 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6064
6065 IEM_MC_ADVANCE_RIP();
6066 IEM_MC_END();
6067 return VINF_SUCCESS;
6068
6069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6070 }
6071 }
6072 else
6073 {
6074 /* memory destination. */
6075
6076 uint32_t fAccess;
6077 if (pImpl->pfnLockedU16)
6078 fAccess = IEM_ACCESS_DATA_RW;
6079 else /* BT */
6080 fAccess = IEM_ACCESS_DATA_R;
6081
6082 /** @todo test negative bit offsets! */
6083 switch (pVCpu->iem.s.enmEffOpSize)
6084 {
6085 case IEMMODE_16BIT:
6086 IEM_MC_BEGIN(3, 2);
6087 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6088 IEM_MC_ARG(uint16_t, u16Src, 1);
6089 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6091 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6092
6093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6094 if (pImpl->pfnLockedU16)
6095 IEMOP_HLP_DONE_DECODING();
6096 else
6097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6098 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6099 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6100 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6101 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6102 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6103 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6104 IEM_MC_FETCH_EFLAGS(EFlags);
6105
6106 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6107 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6108 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6109 else
6110 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6111 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6112
6113 IEM_MC_COMMIT_EFLAGS(EFlags);
6114 IEM_MC_ADVANCE_RIP();
6115 IEM_MC_END();
6116 return VINF_SUCCESS;
6117
6118 case IEMMODE_32BIT:
6119 IEM_MC_BEGIN(3, 2);
6120 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6121 IEM_MC_ARG(uint32_t, u32Src, 1);
6122 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6124 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6125
6126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6127 if (pImpl->pfnLockedU16)
6128 IEMOP_HLP_DONE_DECODING();
6129 else
6130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6131 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6132 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6133 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6134 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6135 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6136 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6137 IEM_MC_FETCH_EFLAGS(EFlags);
6138
6139 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6140 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6141 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6142 else
6143 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6144 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6145
6146 IEM_MC_COMMIT_EFLAGS(EFlags);
6147 IEM_MC_ADVANCE_RIP();
6148 IEM_MC_END();
6149 return VINF_SUCCESS;
6150
6151 case IEMMODE_64BIT:
6152 IEM_MC_BEGIN(3, 2);
6153 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6154 IEM_MC_ARG(uint64_t, u64Src, 1);
6155 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6157 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6158
6159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6160 if (pImpl->pfnLockedU16)
6161 IEMOP_HLP_DONE_DECODING();
6162 else
6163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6164 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6165 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6166 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6167 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6168 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6169 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6170 IEM_MC_FETCH_EFLAGS(EFlags);
6171
6172 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6173 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6174 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6175 else
6176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6177 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6178
6179 IEM_MC_COMMIT_EFLAGS(EFlags);
6180 IEM_MC_ADVANCE_RIP();
6181 IEM_MC_END();
6182 return VINF_SUCCESS;
6183
6184 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6185 }
6186 }
6187}
6188
6189
6190/** Opcode 0x0f 0xa3. */
6191FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6192{
6193 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6194 IEMOP_HLP_MIN_386();
6195 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6196}
6197
6198
6199/**
6200 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6201 */
6202FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6203{
6204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6205 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6206
6207 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6208 {
6209 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6211
6212 switch (pVCpu->iem.s.enmEffOpSize)
6213 {
6214 case IEMMODE_16BIT:
6215 IEM_MC_BEGIN(4, 0);
6216 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6217 IEM_MC_ARG(uint16_t, u16Src, 1);
6218 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6219 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6220
6221 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6222 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6223 IEM_MC_REF_EFLAGS(pEFlags);
6224 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6225
6226 IEM_MC_ADVANCE_RIP();
6227 IEM_MC_END();
6228 return VINF_SUCCESS;
6229
6230 case IEMMODE_32BIT:
6231 IEM_MC_BEGIN(4, 0);
6232 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6233 IEM_MC_ARG(uint32_t, u32Src, 1);
6234 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6235 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6236
6237 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6238 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6239 IEM_MC_REF_EFLAGS(pEFlags);
6240 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6241
6242 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6243 IEM_MC_ADVANCE_RIP();
6244 IEM_MC_END();
6245 return VINF_SUCCESS;
6246
6247 case IEMMODE_64BIT:
6248 IEM_MC_BEGIN(4, 0);
6249 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6250 IEM_MC_ARG(uint64_t, u64Src, 1);
6251 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6252 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6253
6254 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6255 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6256 IEM_MC_REF_EFLAGS(pEFlags);
6257 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6258
6259 IEM_MC_ADVANCE_RIP();
6260 IEM_MC_END();
6261 return VINF_SUCCESS;
6262
6263 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6264 }
6265 }
6266 else
6267 {
6268 switch (pVCpu->iem.s.enmEffOpSize)
6269 {
6270 case IEMMODE_16BIT:
6271 IEM_MC_BEGIN(4, 2);
6272 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6273 IEM_MC_ARG(uint16_t, u16Src, 1);
6274 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6275 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6277
6278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6279 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6280 IEM_MC_ASSIGN(cShiftArg, cShift);
6281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6282 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6283 IEM_MC_FETCH_EFLAGS(EFlags);
6284 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6285 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6286
6287 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6288 IEM_MC_COMMIT_EFLAGS(EFlags);
6289 IEM_MC_ADVANCE_RIP();
6290 IEM_MC_END();
6291 return VINF_SUCCESS;
6292
6293 case IEMMODE_32BIT:
6294 IEM_MC_BEGIN(4, 2);
6295 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6296 IEM_MC_ARG(uint32_t, u32Src, 1);
6297 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6298 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6300
6301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6302 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6303 IEM_MC_ASSIGN(cShiftArg, cShift);
6304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6305 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6306 IEM_MC_FETCH_EFLAGS(EFlags);
6307 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6308 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6309
6310 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6311 IEM_MC_COMMIT_EFLAGS(EFlags);
6312 IEM_MC_ADVANCE_RIP();
6313 IEM_MC_END();
6314 return VINF_SUCCESS;
6315
6316 case IEMMODE_64BIT:
6317 IEM_MC_BEGIN(4, 2);
6318 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6319 IEM_MC_ARG(uint64_t, u64Src, 1);
6320 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6321 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6323
6324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6325 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6326 IEM_MC_ASSIGN(cShiftArg, cShift);
6327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6328 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6329 IEM_MC_FETCH_EFLAGS(EFlags);
6330 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6331 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6332
6333 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6334 IEM_MC_COMMIT_EFLAGS(EFlags);
6335 IEM_MC_ADVANCE_RIP();
6336 IEM_MC_END();
6337 return VINF_SUCCESS;
6338
6339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6340 }
6341 }
6342}
6343
6344
6345/**
6346 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6347 */
6348FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6349{
6350 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6351 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6352
6353 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6354 {
6355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6356
6357 switch (pVCpu->iem.s.enmEffOpSize)
6358 {
6359 case IEMMODE_16BIT:
6360 IEM_MC_BEGIN(4, 0);
6361 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6362 IEM_MC_ARG(uint16_t, u16Src, 1);
6363 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6364 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6365
6366 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6367 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6368 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6369 IEM_MC_REF_EFLAGS(pEFlags);
6370 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6371
6372 IEM_MC_ADVANCE_RIP();
6373 IEM_MC_END();
6374 return VINF_SUCCESS;
6375
6376 case IEMMODE_32BIT:
6377 IEM_MC_BEGIN(4, 0);
6378 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6379 IEM_MC_ARG(uint32_t, u32Src, 1);
6380 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6381 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6382
6383 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6384 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6385 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6386 IEM_MC_REF_EFLAGS(pEFlags);
6387 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6388
6389 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6390 IEM_MC_ADVANCE_RIP();
6391 IEM_MC_END();
6392 return VINF_SUCCESS;
6393
6394 case IEMMODE_64BIT:
6395 IEM_MC_BEGIN(4, 0);
6396 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6397 IEM_MC_ARG(uint64_t, u64Src, 1);
6398 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6399 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6400
6401 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6402 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6403 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6404 IEM_MC_REF_EFLAGS(pEFlags);
6405 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6406
6407 IEM_MC_ADVANCE_RIP();
6408 IEM_MC_END();
6409 return VINF_SUCCESS;
6410
6411 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6412 }
6413 }
6414 else
6415 {
6416 switch (pVCpu->iem.s.enmEffOpSize)
6417 {
6418 case IEMMODE_16BIT:
6419 IEM_MC_BEGIN(4, 2);
6420 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6421 IEM_MC_ARG(uint16_t, u16Src, 1);
6422 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6423 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6425
6426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6428 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6429 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6430 IEM_MC_FETCH_EFLAGS(EFlags);
6431 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6432 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6433
6434 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6435 IEM_MC_COMMIT_EFLAGS(EFlags);
6436 IEM_MC_ADVANCE_RIP();
6437 IEM_MC_END();
6438 return VINF_SUCCESS;
6439
6440 case IEMMODE_32BIT:
6441 IEM_MC_BEGIN(4, 2);
6442 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6443 IEM_MC_ARG(uint32_t, u32Src, 1);
6444 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6445 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6447
6448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6450 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6451 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6452 IEM_MC_FETCH_EFLAGS(EFlags);
6453 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6454 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6455
6456 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6457 IEM_MC_COMMIT_EFLAGS(EFlags);
6458 IEM_MC_ADVANCE_RIP();
6459 IEM_MC_END();
6460 return VINF_SUCCESS;
6461
6462 case IEMMODE_64BIT:
6463 IEM_MC_BEGIN(4, 2);
6464 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6465 IEM_MC_ARG(uint64_t, u64Src, 1);
6466 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6467 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6469
6470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6472 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6473 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6474 IEM_MC_FETCH_EFLAGS(EFlags);
6475 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6476 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6477
6478 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6479 IEM_MC_COMMIT_EFLAGS(EFlags);
6480 IEM_MC_ADVANCE_RIP();
6481 IEM_MC_END();
6482 return VINF_SUCCESS;
6483
6484 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6485 }
6486 }
6487}
6488
6489
6490
6491/** Opcode 0x0f 0xa4. */
6492FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6493{
6494 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6495 IEMOP_HLP_MIN_386();
6496 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6497}
6498
6499
6500/** Opcode 0x0f 0xa5. */
6501FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6502{
6503 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6504 IEMOP_HLP_MIN_386();
6505 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6506}
6507
6508
6509/** Opcode 0x0f 0xa8. */
6510FNIEMOP_DEF(iemOp_push_gs)
6511{
6512 IEMOP_MNEMONIC(push_gs, "push gs");
6513 IEMOP_HLP_MIN_386();
6514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6515 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6516}
6517
6518
6519/** Opcode 0x0f 0xa9. */
6520FNIEMOP_DEF(iemOp_pop_gs)
6521{
6522 IEMOP_MNEMONIC(pop_gs, "pop gs");
6523 IEMOP_HLP_MIN_386();
6524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6525 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6526}
6527
6528
6529/** Opcode 0x0f 0xaa. */
6530FNIEMOP_DEF(iemOp_rsm)
6531{
6532 IEMOP_MNEMONIC(rsm, "rsm");
6533 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
6534 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
6535 * intercept). */
6536 IEMOP_BITCH_ABOUT_STUB();
6537 return IEMOP_RAISE_INVALID_OPCODE();
6538}
6539
6540//IEMOP_HLP_MIN_386();
6541
6542
6543/** Opcode 0x0f 0xab. */
6544FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6545{
6546 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6547 IEMOP_HLP_MIN_386();
6548 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6549}
6550
6551
6552/** Opcode 0x0f 0xac. */
6553FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6554{
6555 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6556 IEMOP_HLP_MIN_386();
6557 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6558}
6559
6560
6561/** Opcode 0x0f 0xad. */
6562FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6563{
6564 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6565 IEMOP_HLP_MIN_386();
6566 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6567}
6568
6569
6570/** Opcode 0x0f 0xae mem/0. */
6571FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6572{
6573 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6574 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6575 return IEMOP_RAISE_INVALID_OPCODE();
6576
6577 IEM_MC_BEGIN(3, 1);
6578 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6579 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6580 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6583 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6584 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6585 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6586 IEM_MC_END();
6587 return VINF_SUCCESS;
6588}
6589
6590
6591/** Opcode 0x0f 0xae mem/1. */
6592FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6593{
6594 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6595 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6596 return IEMOP_RAISE_INVALID_OPCODE();
6597
6598 IEM_MC_BEGIN(3, 1);
6599 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6600 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6601 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6604 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6605 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6606 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6607 IEM_MC_END();
6608 return VINF_SUCCESS;
6609}
6610
6611
6612/**
6613 * @opmaps grp15
6614 * @opcode !11/2
6615 * @oppfx none
6616 * @opcpuid sse
6617 * @opgroup og_sse_mxcsrsm
6618 * @opxcpttype 5
6619 * @optest op1=0 -> mxcsr=0
6620 * @optest op1=0x2083 -> mxcsr=0x2083
6621 * @optest op1=0xfffffffe -> value.xcpt=0xd
6622 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6623 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6624 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6625 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6626 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6627 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6628 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6629 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6630 */
6631FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6632{
6633 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6634 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6635 return IEMOP_RAISE_INVALID_OPCODE();
6636
6637 IEM_MC_BEGIN(2, 0);
6638 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6639 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6642 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6643 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6644 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6645 IEM_MC_END();
6646 return VINF_SUCCESS;
6647}
6648
6649
6650/**
6651 * @opmaps grp15
6652 * @opcode !11/3
6653 * @oppfx none
6654 * @opcpuid sse
6655 * @opgroup og_sse_mxcsrsm
6656 * @opxcpttype 5
6657 * @optest mxcsr=0 -> op1=0
6658 * @optest mxcsr=0x2083 -> op1=0x2083
6659 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6660 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6661 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6662 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6663 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6664 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6665 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6666 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6667 */
6668FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6669{
6670 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6671 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6672 return IEMOP_RAISE_INVALID_OPCODE();
6673
6674 IEM_MC_BEGIN(2, 0);
6675 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6676 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6679 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6680 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6681 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6682 IEM_MC_END();
6683 return VINF_SUCCESS;
6684}
6685
6686
6687/**
6688 * @opmaps grp15
6689 * @opcode !11/4
6690 * @oppfx none
6691 * @opcpuid xsave
6692 * @opgroup og_system
6693 * @opxcpttype none
6694 */
6695FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6696{
6697 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6698 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6699 return IEMOP_RAISE_INVALID_OPCODE();
6700
6701 IEM_MC_BEGIN(3, 0);
6702 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6703 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6704 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6707 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6708 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6709 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6710 IEM_MC_END();
6711 return VINF_SUCCESS;
6712}
6713
6714
6715/**
6716 * @opmaps grp15
6717 * @opcode !11/5
6718 * @oppfx none
6719 * @opcpuid xsave
6720 * @opgroup og_system
6721 * @opxcpttype none
6722 */
6723FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6724{
6725 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6726 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6727 return IEMOP_RAISE_INVALID_OPCODE();
6728
6729 IEM_MC_BEGIN(3, 0);
6730 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6731 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6732 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6735 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6736 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6737 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6738 IEM_MC_END();
6739 return VINF_SUCCESS;
6740}
6741
6742/** Opcode 0x0f 0xae mem/6. */
6743FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6744
6745/**
6746 * @opmaps grp15
6747 * @opcode !11/7
6748 * @oppfx none
6749 * @opcpuid clfsh
6750 * @opgroup og_cachectl
6751 * @optest op1=1 ->
6752 */
6753FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6754{
6755 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6756 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6757 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6758
6759 IEM_MC_BEGIN(2, 0);
6760 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6761 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6764 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6765 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6766 IEM_MC_END();
6767 return VINF_SUCCESS;
6768}
6769
6770/**
6771 * @opmaps grp15
6772 * @opcode !11/7
6773 * @oppfx 0x66
6774 * @opcpuid clflushopt
6775 * @opgroup og_cachectl
6776 * @optest op1=1 ->
6777 */
6778FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6779{
6780 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6781 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6782 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6783
6784 IEM_MC_BEGIN(2, 0);
6785 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6786 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6789 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6790 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6791 IEM_MC_END();
6792 return VINF_SUCCESS;
6793}
6794
6795
6796/** Opcode 0x0f 0xae 11b/5. */
6797FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6798{
6799 RT_NOREF_PV(bRm);
6800 IEMOP_MNEMONIC(lfence, "lfence");
6801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6802 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6803 return IEMOP_RAISE_INVALID_OPCODE();
6804
6805 IEM_MC_BEGIN(0, 0);
6806 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6807 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6808 else
6809 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6810 IEM_MC_ADVANCE_RIP();
6811 IEM_MC_END();
6812 return VINF_SUCCESS;
6813}
6814
6815
6816/** Opcode 0x0f 0xae 11b/6. */
6817FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6818{
6819 RT_NOREF_PV(bRm);
6820 IEMOP_MNEMONIC(mfence, "mfence");
6821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6822 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6823 return IEMOP_RAISE_INVALID_OPCODE();
6824
6825 IEM_MC_BEGIN(0, 0);
6826 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6827 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6828 else
6829 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6830 IEM_MC_ADVANCE_RIP();
6831 IEM_MC_END();
6832 return VINF_SUCCESS;
6833}
6834
6835
6836/** Opcode 0x0f 0xae 11b/7. */
6837FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6838{
6839 RT_NOREF_PV(bRm);
6840 IEMOP_MNEMONIC(sfence, "sfence");
6841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6842 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6843 return IEMOP_RAISE_INVALID_OPCODE();
6844
6845 IEM_MC_BEGIN(0, 0);
6846 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6847 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6848 else
6849 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6850 IEM_MC_ADVANCE_RIP();
6851 IEM_MC_END();
6852 return VINF_SUCCESS;
6853}
6854
6855
6856/** Opcode 0xf3 0x0f 0xae 11b/0. */
6857FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6858
6859/** Opcode 0xf3 0x0f 0xae 11b/1. */
6860FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6861
6862/** Opcode 0xf3 0x0f 0xae 11b/2. */
6863FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6864
6865/** Opcode 0xf3 0x0f 0xae 11b/3. */
6866FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6867
6868
6869/**
6870 * Group 15 jump table for register variant.
6871 */
6872IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6873{ /* pfx: none, 066h, 0f3h, 0f2h */
6874 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6875 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6876 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6877 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6878 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6879 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6880 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6881 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6882};
6883AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6884
6885
6886/**
6887 * Group 15 jump table for memory variant.
6888 */
6889IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6890{ /* pfx: none, 066h, 0f3h, 0f2h */
6891 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6892 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6893 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6894 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6895 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6896 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6897 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6898 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6899};
6900AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6901
6902
6903/** Opcode 0x0f 0xae. */
6904FNIEMOP_DEF(iemOp_Grp15)
6905{
6906 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6907 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6908 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6909 /* register, register */
6910 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6911 + pVCpu->iem.s.idxPrefix], bRm);
6912 /* memory, register */
6913 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6914 + pVCpu->iem.s.idxPrefix], bRm);
6915}
6916
6917
6918/** Opcode 0x0f 0xaf. */
6919FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6920{
6921 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6922 IEMOP_HLP_MIN_386();
6923 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6924 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6925}
6926
6927
6928/** Opcode 0x0f 0xb0. */
6929FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6930{
6931 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6932 IEMOP_HLP_MIN_486();
6933 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6934
6935 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6936 {
6937 IEMOP_HLP_DONE_DECODING();
6938 IEM_MC_BEGIN(4, 0);
6939 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6940 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6941 IEM_MC_ARG(uint8_t, u8Src, 2);
6942 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6943
6944 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6945 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6946 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6947 IEM_MC_REF_EFLAGS(pEFlags);
6948 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6949 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6950 else
6951 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6952
6953 IEM_MC_ADVANCE_RIP();
6954 IEM_MC_END();
6955 }
6956 else
6957 {
6958 IEM_MC_BEGIN(4, 3);
6959 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6960 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6961 IEM_MC_ARG(uint8_t, u8Src, 2);
6962 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6964 IEM_MC_LOCAL(uint8_t, u8Al);
6965
6966 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6967 IEMOP_HLP_DONE_DECODING();
6968 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6969 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6970 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6971 IEM_MC_FETCH_EFLAGS(EFlags);
6972 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6973 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6974 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6975 else
6976 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6977
6978 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6979 IEM_MC_COMMIT_EFLAGS(EFlags);
6980 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6981 IEM_MC_ADVANCE_RIP();
6982 IEM_MC_END();
6983 }
6984 return VINF_SUCCESS;
6985}
6986
6987/** Opcode 0x0f 0xb1. */
6988FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6989{
6990 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6991 IEMOP_HLP_MIN_486();
6992 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6993
6994 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6995 {
6996 IEMOP_HLP_DONE_DECODING();
6997 switch (pVCpu->iem.s.enmEffOpSize)
6998 {
6999 case IEMMODE_16BIT:
7000 IEM_MC_BEGIN(4, 0);
7001 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7002 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7003 IEM_MC_ARG(uint16_t, u16Src, 2);
7004 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7005
7006 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7007 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7008 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
7009 IEM_MC_REF_EFLAGS(pEFlags);
7010 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7011 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7012 else
7013 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7014
7015 IEM_MC_ADVANCE_RIP();
7016 IEM_MC_END();
7017 return VINF_SUCCESS;
7018
7019 case IEMMODE_32BIT:
7020 IEM_MC_BEGIN(4, 0);
7021 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7022 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7023 IEM_MC_ARG(uint32_t, u32Src, 2);
7024 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7025
7026 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7027 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7028 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7029 IEM_MC_REF_EFLAGS(pEFlags);
7030 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7031 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7032 else
7033 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7034
7035 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7036 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7037 IEM_MC_ADVANCE_RIP();
7038 IEM_MC_END();
7039 return VINF_SUCCESS;
7040
7041 case IEMMODE_64BIT:
7042 IEM_MC_BEGIN(4, 0);
7043 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7044 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7045#ifdef RT_ARCH_X86
7046 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7047#else
7048 IEM_MC_ARG(uint64_t, u64Src, 2);
7049#endif
7050 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7051
7052 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7053 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7054 IEM_MC_REF_EFLAGS(pEFlags);
7055#ifdef RT_ARCH_X86
7056 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7057 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7058 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7059 else
7060 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7061#else
7062 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7063 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7064 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7065 else
7066 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7067#endif
7068
7069 IEM_MC_ADVANCE_RIP();
7070 IEM_MC_END();
7071 return VINF_SUCCESS;
7072
7073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7074 }
7075 }
7076 else
7077 {
7078 switch (pVCpu->iem.s.enmEffOpSize)
7079 {
7080 case IEMMODE_16BIT:
7081 IEM_MC_BEGIN(4, 3);
7082 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7083 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7084 IEM_MC_ARG(uint16_t, u16Src, 2);
7085 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7087 IEM_MC_LOCAL(uint16_t, u16Ax);
7088
7089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7090 IEMOP_HLP_DONE_DECODING();
7091 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7092 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7093 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7094 IEM_MC_FETCH_EFLAGS(EFlags);
7095 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7096 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7097 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7098 else
7099 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7100
7101 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7102 IEM_MC_COMMIT_EFLAGS(EFlags);
7103 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7104 IEM_MC_ADVANCE_RIP();
7105 IEM_MC_END();
7106 return VINF_SUCCESS;
7107
7108 case IEMMODE_32BIT:
7109 IEM_MC_BEGIN(4, 3);
7110 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7111 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7112 IEM_MC_ARG(uint32_t, u32Src, 2);
7113 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7115 IEM_MC_LOCAL(uint32_t, u32Eax);
7116
7117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7118 IEMOP_HLP_DONE_DECODING();
7119 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7120 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7121 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7122 IEM_MC_FETCH_EFLAGS(EFlags);
7123 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7124 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7125 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7126 else
7127 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7128
7129 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7130 IEM_MC_COMMIT_EFLAGS(EFlags);
7131 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7132 IEM_MC_ADVANCE_RIP();
7133 IEM_MC_END();
7134 return VINF_SUCCESS;
7135
7136 case IEMMODE_64BIT:
7137 IEM_MC_BEGIN(4, 3);
7138 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7139 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7140#ifdef RT_ARCH_X86
7141 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7142#else
7143 IEM_MC_ARG(uint64_t, u64Src, 2);
7144#endif
7145 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7147 IEM_MC_LOCAL(uint64_t, u64Rax);
7148
7149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7150 IEMOP_HLP_DONE_DECODING();
7151 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7152 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7153 IEM_MC_FETCH_EFLAGS(EFlags);
7154 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7155#ifdef RT_ARCH_X86
7156 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7157 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7158 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7159 else
7160 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7161#else
7162 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7163 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7164 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7165 else
7166 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7167#endif
7168
7169 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7170 IEM_MC_COMMIT_EFLAGS(EFlags);
7171 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7172 IEM_MC_ADVANCE_RIP();
7173 IEM_MC_END();
7174 return VINF_SUCCESS;
7175
7176 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7177 }
7178 }
7179}
7180
7181
7182FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7183{
7184 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7185 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7186
7187 switch (pVCpu->iem.s.enmEffOpSize)
7188 {
7189 case IEMMODE_16BIT:
7190 IEM_MC_BEGIN(5, 1);
7191 IEM_MC_ARG(uint16_t, uSel, 0);
7192 IEM_MC_ARG(uint16_t, offSeg, 1);
7193 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7194 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7195 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7196 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7199 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7200 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7201 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7202 IEM_MC_END();
7203 return VINF_SUCCESS;
7204
7205 case IEMMODE_32BIT:
7206 IEM_MC_BEGIN(5, 1);
7207 IEM_MC_ARG(uint16_t, uSel, 0);
7208 IEM_MC_ARG(uint32_t, offSeg, 1);
7209 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7210 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7211 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7212 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7215 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7216 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7217 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7218 IEM_MC_END();
7219 return VINF_SUCCESS;
7220
7221 case IEMMODE_64BIT:
7222 IEM_MC_BEGIN(5, 1);
7223 IEM_MC_ARG(uint16_t, uSel, 0);
7224 IEM_MC_ARG(uint64_t, offSeg, 1);
7225 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7226 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7227 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7228 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7231 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7232 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7233 else
7234 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7235 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7236 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7237 IEM_MC_END();
7238 return VINF_SUCCESS;
7239
7240 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7241 }
7242}
7243
7244
7245/** Opcode 0x0f 0xb2. */
7246FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7247{
7248 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7249 IEMOP_HLP_MIN_386();
7250 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7251 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7252 return IEMOP_RAISE_INVALID_OPCODE();
7253 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7254}
7255
7256
7257/** Opcode 0x0f 0xb3. */
7258FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7259{
7260 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7261 IEMOP_HLP_MIN_386();
7262 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7263}
7264
7265
7266/** Opcode 0x0f 0xb4. */
7267FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7268{
7269 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7270 IEMOP_HLP_MIN_386();
7271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7272 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7273 return IEMOP_RAISE_INVALID_OPCODE();
7274 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7275}
7276
7277
7278/** Opcode 0x0f 0xb5. */
7279FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7280{
7281 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7282 IEMOP_HLP_MIN_386();
7283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7284 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7285 return IEMOP_RAISE_INVALID_OPCODE();
7286 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7287}
7288
7289
7290/** Opcode 0x0f 0xb6. */
7291FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7292{
7293 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7294 IEMOP_HLP_MIN_386();
7295
7296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7297
7298 /*
7299 * If rm is denoting a register, no more instruction bytes.
7300 */
7301 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7302 {
7303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7304 switch (pVCpu->iem.s.enmEffOpSize)
7305 {
7306 case IEMMODE_16BIT:
7307 IEM_MC_BEGIN(0, 1);
7308 IEM_MC_LOCAL(uint16_t, u16Value);
7309 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7310 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7311 IEM_MC_ADVANCE_RIP();
7312 IEM_MC_END();
7313 return VINF_SUCCESS;
7314
7315 case IEMMODE_32BIT:
7316 IEM_MC_BEGIN(0, 1);
7317 IEM_MC_LOCAL(uint32_t, u32Value);
7318 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7319 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7320 IEM_MC_ADVANCE_RIP();
7321 IEM_MC_END();
7322 return VINF_SUCCESS;
7323
7324 case IEMMODE_64BIT:
7325 IEM_MC_BEGIN(0, 1);
7326 IEM_MC_LOCAL(uint64_t, u64Value);
7327 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7328 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7329 IEM_MC_ADVANCE_RIP();
7330 IEM_MC_END();
7331 return VINF_SUCCESS;
7332
7333 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7334 }
7335 }
7336 else
7337 {
7338 /*
7339 * We're loading a register from memory.
7340 */
7341 switch (pVCpu->iem.s.enmEffOpSize)
7342 {
7343 case IEMMODE_16BIT:
7344 IEM_MC_BEGIN(0, 2);
7345 IEM_MC_LOCAL(uint16_t, u16Value);
7346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7349 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7350 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7351 IEM_MC_ADVANCE_RIP();
7352 IEM_MC_END();
7353 return VINF_SUCCESS;
7354
7355 case IEMMODE_32BIT:
7356 IEM_MC_BEGIN(0, 2);
7357 IEM_MC_LOCAL(uint32_t, u32Value);
7358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7361 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7362 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7363 IEM_MC_ADVANCE_RIP();
7364 IEM_MC_END();
7365 return VINF_SUCCESS;
7366
7367 case IEMMODE_64BIT:
7368 IEM_MC_BEGIN(0, 2);
7369 IEM_MC_LOCAL(uint64_t, u64Value);
7370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7373 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7374 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7375 IEM_MC_ADVANCE_RIP();
7376 IEM_MC_END();
7377 return VINF_SUCCESS;
7378
7379 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7380 }
7381 }
7382}
7383
7384
7385/** Opcode 0x0f 0xb7. */
7386FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7387{
7388 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7389 IEMOP_HLP_MIN_386();
7390
7391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7392
7393 /** @todo Not entirely sure how the operand size prefix is handled here,
7394 * assuming that it will be ignored. Would be nice to have a few
7395 * test for this. */
7396 /*
7397 * If rm is denoting a register, no more instruction bytes.
7398 */
7399 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7400 {
7401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7402 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7403 {
7404 IEM_MC_BEGIN(0, 1);
7405 IEM_MC_LOCAL(uint32_t, u32Value);
7406 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7407 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7408 IEM_MC_ADVANCE_RIP();
7409 IEM_MC_END();
7410 }
7411 else
7412 {
7413 IEM_MC_BEGIN(0, 1);
7414 IEM_MC_LOCAL(uint64_t, u64Value);
7415 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7416 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7417 IEM_MC_ADVANCE_RIP();
7418 IEM_MC_END();
7419 }
7420 }
7421 else
7422 {
7423 /*
7424 * We're loading a register from memory.
7425 */
7426 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7427 {
7428 IEM_MC_BEGIN(0, 2);
7429 IEM_MC_LOCAL(uint32_t, u32Value);
7430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7433 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7434 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7435 IEM_MC_ADVANCE_RIP();
7436 IEM_MC_END();
7437 }
7438 else
7439 {
7440 IEM_MC_BEGIN(0, 2);
7441 IEM_MC_LOCAL(uint64_t, u64Value);
7442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7445 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7446 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7447 IEM_MC_ADVANCE_RIP();
7448 IEM_MC_END();
7449 }
7450 }
7451 return VINF_SUCCESS;
7452}
7453
7454
7455/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7456FNIEMOP_UD_STUB(iemOp_jmpe);
7457/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7458FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7459
7460
7461/**
7462 * @opcode 0xb9
7463 * @opinvalid intel-modrm
7464 * @optest ->
7465 */
7466FNIEMOP_DEF(iemOp_Grp10)
7467{
7468 /*
7469 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7470 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7471 */
7472 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7473 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7474 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7475}
7476
7477
7478/** Opcode 0x0f 0xba. */
7479FNIEMOP_DEF(iemOp_Grp8)
7480{
7481 IEMOP_HLP_MIN_386();
7482 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7483 PCIEMOPBINSIZES pImpl;
7484 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7485 {
7486 case 0: case 1: case 2: case 3:
7487 /* Both AMD and Intel want full modr/m decoding and imm8. */
7488 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7489 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7490 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7491 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7492 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7493 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7494 }
7495 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7496
7497 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7498 {
7499 /* register destination. */
7500 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7502
7503 switch (pVCpu->iem.s.enmEffOpSize)
7504 {
7505 case IEMMODE_16BIT:
7506 IEM_MC_BEGIN(3, 0);
7507 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7508 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7509 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7510
7511 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7512 IEM_MC_REF_EFLAGS(pEFlags);
7513 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7514
7515 IEM_MC_ADVANCE_RIP();
7516 IEM_MC_END();
7517 return VINF_SUCCESS;
7518
7519 case IEMMODE_32BIT:
7520 IEM_MC_BEGIN(3, 0);
7521 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7522 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7523 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7524
7525 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7526 IEM_MC_REF_EFLAGS(pEFlags);
7527 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7528
7529 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7530 IEM_MC_ADVANCE_RIP();
7531 IEM_MC_END();
7532 return VINF_SUCCESS;
7533
7534 case IEMMODE_64BIT:
7535 IEM_MC_BEGIN(3, 0);
7536 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7537 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7538 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7539
7540 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7541 IEM_MC_REF_EFLAGS(pEFlags);
7542 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7543
7544 IEM_MC_ADVANCE_RIP();
7545 IEM_MC_END();
7546 return VINF_SUCCESS;
7547
7548 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7549 }
7550 }
7551 else
7552 {
7553 /* memory destination. */
7554
7555 uint32_t fAccess;
7556 if (pImpl->pfnLockedU16)
7557 fAccess = IEM_ACCESS_DATA_RW;
7558 else /* BT */
7559 fAccess = IEM_ACCESS_DATA_R;
7560
7561 /** @todo test negative bit offsets! */
7562 switch (pVCpu->iem.s.enmEffOpSize)
7563 {
7564 case IEMMODE_16BIT:
7565 IEM_MC_BEGIN(3, 1);
7566 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7567 IEM_MC_ARG(uint16_t, u16Src, 1);
7568 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7570
7571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7572 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7573 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7574 if (pImpl->pfnLockedU16)
7575 IEMOP_HLP_DONE_DECODING();
7576 else
7577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7578 IEM_MC_FETCH_EFLAGS(EFlags);
7579 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7580 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7581 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7582 else
7583 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7584 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7585
7586 IEM_MC_COMMIT_EFLAGS(EFlags);
7587 IEM_MC_ADVANCE_RIP();
7588 IEM_MC_END();
7589 return VINF_SUCCESS;
7590
7591 case IEMMODE_32BIT:
7592 IEM_MC_BEGIN(3, 1);
7593 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7594 IEM_MC_ARG(uint32_t, u32Src, 1);
7595 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7597
7598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7599 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7600 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7601 if (pImpl->pfnLockedU16)
7602 IEMOP_HLP_DONE_DECODING();
7603 else
7604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7605 IEM_MC_FETCH_EFLAGS(EFlags);
7606 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7607 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7608 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7609 else
7610 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7611 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7612
7613 IEM_MC_COMMIT_EFLAGS(EFlags);
7614 IEM_MC_ADVANCE_RIP();
7615 IEM_MC_END();
7616 return VINF_SUCCESS;
7617
7618 case IEMMODE_64BIT:
7619 IEM_MC_BEGIN(3, 1);
7620 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7621 IEM_MC_ARG(uint64_t, u64Src, 1);
7622 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7624
7625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7626 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7627 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7628 if (pImpl->pfnLockedU16)
7629 IEMOP_HLP_DONE_DECODING();
7630 else
7631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7632 IEM_MC_FETCH_EFLAGS(EFlags);
7633 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7634 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7635 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7636 else
7637 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7638 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7639
7640 IEM_MC_COMMIT_EFLAGS(EFlags);
7641 IEM_MC_ADVANCE_RIP();
7642 IEM_MC_END();
7643 return VINF_SUCCESS;
7644
7645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7646 }
7647 }
7648}
7649
7650
7651/** Opcode 0x0f 0xbb. */
7652FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7653{
7654 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7655 IEMOP_HLP_MIN_386();
7656 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7657}
7658
7659
7660/** Opcode 0x0f 0xbc. */
7661FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7662{
7663 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7664 IEMOP_HLP_MIN_386();
7665 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7666 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7667}
7668
7669
7670/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7671FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7672
7673
7674/** Opcode 0x0f 0xbd. */
7675FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7676{
7677 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7678 IEMOP_HLP_MIN_386();
7679 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7680 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7681}
7682
7683
7684/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7685FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7686
7687
7688/** Opcode 0x0f 0xbe. */
7689FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7690{
7691 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7692 IEMOP_HLP_MIN_386();
7693
7694 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7695
7696 /*
7697 * If rm is denoting a register, no more instruction bytes.
7698 */
7699 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7700 {
7701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7702 switch (pVCpu->iem.s.enmEffOpSize)
7703 {
7704 case IEMMODE_16BIT:
7705 IEM_MC_BEGIN(0, 1);
7706 IEM_MC_LOCAL(uint16_t, u16Value);
7707 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7708 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7709 IEM_MC_ADVANCE_RIP();
7710 IEM_MC_END();
7711 return VINF_SUCCESS;
7712
7713 case IEMMODE_32BIT:
7714 IEM_MC_BEGIN(0, 1);
7715 IEM_MC_LOCAL(uint32_t, u32Value);
7716 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7717 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7718 IEM_MC_ADVANCE_RIP();
7719 IEM_MC_END();
7720 return VINF_SUCCESS;
7721
7722 case IEMMODE_64BIT:
7723 IEM_MC_BEGIN(0, 1);
7724 IEM_MC_LOCAL(uint64_t, u64Value);
7725 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7726 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7727 IEM_MC_ADVANCE_RIP();
7728 IEM_MC_END();
7729 return VINF_SUCCESS;
7730
7731 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7732 }
7733 }
7734 else
7735 {
7736 /*
7737 * We're loading a register from memory.
7738 */
7739 switch (pVCpu->iem.s.enmEffOpSize)
7740 {
7741 case IEMMODE_16BIT:
7742 IEM_MC_BEGIN(0, 2);
7743 IEM_MC_LOCAL(uint16_t, u16Value);
7744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7747 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7748 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7749 IEM_MC_ADVANCE_RIP();
7750 IEM_MC_END();
7751 return VINF_SUCCESS;
7752
7753 case IEMMODE_32BIT:
7754 IEM_MC_BEGIN(0, 2);
7755 IEM_MC_LOCAL(uint32_t, u32Value);
7756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7759 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7760 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7761 IEM_MC_ADVANCE_RIP();
7762 IEM_MC_END();
7763 return VINF_SUCCESS;
7764
7765 case IEMMODE_64BIT:
7766 IEM_MC_BEGIN(0, 2);
7767 IEM_MC_LOCAL(uint64_t, u64Value);
7768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7771 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7772 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7773 IEM_MC_ADVANCE_RIP();
7774 IEM_MC_END();
7775 return VINF_SUCCESS;
7776
7777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7778 }
7779 }
7780}
7781
7782
7783/** Opcode 0x0f 0xbf. */
7784FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7785{
7786 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7787 IEMOP_HLP_MIN_386();
7788
7789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7790
7791 /** @todo Not entirely sure how the operand size prefix is handled here,
7792 * assuming that it will be ignored. Would be nice to have a few
7793 * test for this. */
7794 /*
7795 * If rm is denoting a register, no more instruction bytes.
7796 */
7797 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7798 {
7799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7800 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7801 {
7802 IEM_MC_BEGIN(0, 1);
7803 IEM_MC_LOCAL(uint32_t, u32Value);
7804 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7805 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7806 IEM_MC_ADVANCE_RIP();
7807 IEM_MC_END();
7808 }
7809 else
7810 {
7811 IEM_MC_BEGIN(0, 1);
7812 IEM_MC_LOCAL(uint64_t, u64Value);
7813 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7814 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7815 IEM_MC_ADVANCE_RIP();
7816 IEM_MC_END();
7817 }
7818 }
7819 else
7820 {
7821 /*
7822 * We're loading a register from memory.
7823 */
7824 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7825 {
7826 IEM_MC_BEGIN(0, 2);
7827 IEM_MC_LOCAL(uint32_t, u32Value);
7828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7831 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7832 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7833 IEM_MC_ADVANCE_RIP();
7834 IEM_MC_END();
7835 }
7836 else
7837 {
7838 IEM_MC_BEGIN(0, 2);
7839 IEM_MC_LOCAL(uint64_t, u64Value);
7840 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7843 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7844 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7845 IEM_MC_ADVANCE_RIP();
7846 IEM_MC_END();
7847 }
7848 }
7849 return VINF_SUCCESS;
7850}
7851
7852
7853/** Opcode 0x0f 0xc0. */
7854FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7855{
7856 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7857 IEMOP_HLP_MIN_486();
7858 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7859
7860 /*
7861 * If rm is denoting a register, no more instruction bytes.
7862 */
7863 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7864 {
7865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7866
7867 IEM_MC_BEGIN(3, 0);
7868 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7869 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7870 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7871
7872 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7873 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7874 IEM_MC_REF_EFLAGS(pEFlags);
7875 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7876
7877 IEM_MC_ADVANCE_RIP();
7878 IEM_MC_END();
7879 }
7880 else
7881 {
7882 /*
7883 * We're accessing memory.
7884 */
7885 IEM_MC_BEGIN(3, 3);
7886 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7887 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7888 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7889 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7891
7892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7893 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7894 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7895 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7896 IEM_MC_FETCH_EFLAGS(EFlags);
7897 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7898 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7899 else
7900 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7901
7902 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7903 IEM_MC_COMMIT_EFLAGS(EFlags);
7904 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7905 IEM_MC_ADVANCE_RIP();
7906 IEM_MC_END();
7907 return VINF_SUCCESS;
7908 }
7909 return VINF_SUCCESS;
7910}
7911
7912
7913/** Opcode 0x0f 0xc1. */
7914FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7915{
7916 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7917 IEMOP_HLP_MIN_486();
7918 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7919
7920 /*
7921 * If rm is denoting a register, no more instruction bytes.
7922 */
7923 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7924 {
7925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7926
7927 switch (pVCpu->iem.s.enmEffOpSize)
7928 {
7929 case IEMMODE_16BIT:
7930 IEM_MC_BEGIN(3, 0);
7931 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7932 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7933 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7934
7935 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7936 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7937 IEM_MC_REF_EFLAGS(pEFlags);
7938 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7939
7940 IEM_MC_ADVANCE_RIP();
7941 IEM_MC_END();
7942 return VINF_SUCCESS;
7943
7944 case IEMMODE_32BIT:
7945 IEM_MC_BEGIN(3, 0);
7946 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7947 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7948 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7949
7950 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7951 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7952 IEM_MC_REF_EFLAGS(pEFlags);
7953 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7954
7955 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7956 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7957 IEM_MC_ADVANCE_RIP();
7958 IEM_MC_END();
7959 return VINF_SUCCESS;
7960
7961 case IEMMODE_64BIT:
7962 IEM_MC_BEGIN(3, 0);
7963 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7964 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7965 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7966
7967 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7968 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7969 IEM_MC_REF_EFLAGS(pEFlags);
7970 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7971
7972 IEM_MC_ADVANCE_RIP();
7973 IEM_MC_END();
7974 return VINF_SUCCESS;
7975
7976 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7977 }
7978 }
7979 else
7980 {
7981 /*
7982 * We're accessing memory.
7983 */
7984 switch (pVCpu->iem.s.enmEffOpSize)
7985 {
7986 case IEMMODE_16BIT:
7987 IEM_MC_BEGIN(3, 3);
7988 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7989 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7990 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7991 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7992 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7993
7994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7995 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7996 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7997 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7998 IEM_MC_FETCH_EFLAGS(EFlags);
7999 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8000 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8001 else
8002 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
8003
8004 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8005 IEM_MC_COMMIT_EFLAGS(EFlags);
8006 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
8007 IEM_MC_ADVANCE_RIP();
8008 IEM_MC_END();
8009 return VINF_SUCCESS;
8010
8011 case IEMMODE_32BIT:
8012 IEM_MC_BEGIN(3, 3);
8013 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8014 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8015 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8016 IEM_MC_LOCAL(uint32_t, u32RegCopy);
8017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8018
8019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8020 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8021 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8022 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8023 IEM_MC_FETCH_EFLAGS(EFlags);
8024 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8025 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8026 else
8027 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8028
8029 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8030 IEM_MC_COMMIT_EFLAGS(EFlags);
8031 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8032 IEM_MC_ADVANCE_RIP();
8033 IEM_MC_END();
8034 return VINF_SUCCESS;
8035
8036 case IEMMODE_64BIT:
8037 IEM_MC_BEGIN(3, 3);
8038 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8039 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8040 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8041 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8043
8044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8045 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8046 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8047 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8048 IEM_MC_FETCH_EFLAGS(EFlags);
8049 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8050 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8051 else
8052 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8053
8054 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8055 IEM_MC_COMMIT_EFLAGS(EFlags);
8056 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8057 IEM_MC_ADVANCE_RIP();
8058 IEM_MC_END();
8059 return VINF_SUCCESS;
8060
8061 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8062 }
8063 }
8064}
8065
8066
8067/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8068FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8069/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8070FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8071/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8072FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8073/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8074FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8075
8076
8077/** Opcode 0x0f 0xc3. */
8078FNIEMOP_DEF(iemOp_movnti_My_Gy)
8079{
8080 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8081
8082 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8083
8084 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8085 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8086 {
8087 switch (pVCpu->iem.s.enmEffOpSize)
8088 {
8089 case IEMMODE_32BIT:
8090 IEM_MC_BEGIN(0, 2);
8091 IEM_MC_LOCAL(uint32_t, u32Value);
8092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8093
8094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8096 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8097 return IEMOP_RAISE_INVALID_OPCODE();
8098
8099 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8100 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8101 IEM_MC_ADVANCE_RIP();
8102 IEM_MC_END();
8103 break;
8104
8105 case IEMMODE_64BIT:
8106 IEM_MC_BEGIN(0, 2);
8107 IEM_MC_LOCAL(uint64_t, u64Value);
8108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8109
8110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8112 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8113 return IEMOP_RAISE_INVALID_OPCODE();
8114
8115 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8116 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8117 IEM_MC_ADVANCE_RIP();
8118 IEM_MC_END();
8119 break;
8120
8121 case IEMMODE_16BIT:
8122 /** @todo check this form. */
8123 return IEMOP_RAISE_INVALID_OPCODE();
8124 }
8125 }
8126 else
8127 return IEMOP_RAISE_INVALID_OPCODE();
8128 return VINF_SUCCESS;
8129}
8130/* Opcode 0x66 0x0f 0xc3 - invalid */
8131/* Opcode 0xf3 0x0f 0xc3 - invalid */
8132/* Opcode 0xf2 0x0f 0xc3 - invalid */
8133
8134/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8135FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8136/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8137FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8138/* Opcode 0xf3 0x0f 0xc4 - invalid */
8139/* Opcode 0xf2 0x0f 0xc4 - invalid */
8140
8141/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8142FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8143/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8144FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8145/* Opcode 0xf3 0x0f 0xc5 - invalid */
8146/* Opcode 0xf2 0x0f 0xc5 - invalid */
8147
8148/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8149FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8150/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8151FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8152/* Opcode 0xf3 0x0f 0xc6 - invalid */
8153/* Opcode 0xf2 0x0f 0xc6 - invalid */
8154
8155
8156/** Opcode 0x0f 0xc7 !11/1. */
8157FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8158{
8159 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8160
8161 IEM_MC_BEGIN(4, 3);
8162 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8163 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8164 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8165 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8166 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8167 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8169
8170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8171 IEMOP_HLP_DONE_DECODING();
8172 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8173
8174 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8175 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8176 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8177
8178 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8179 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8180 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8181
8182 IEM_MC_FETCH_EFLAGS(EFlags);
8183 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8184 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8185 else
8186 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8187
8188 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8189 IEM_MC_COMMIT_EFLAGS(EFlags);
8190 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8191 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8192 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8193 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8194 IEM_MC_ENDIF();
8195 IEM_MC_ADVANCE_RIP();
8196
8197 IEM_MC_END();
8198 return VINF_SUCCESS;
8199}
8200
8201
8202/** Opcode REX.W 0x0f 0xc7 !11/1. */
8203FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8204{
8205 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8206 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8207 {
8208#if 0
8209 RT_NOREF(bRm);
8210 IEMOP_BITCH_ABOUT_STUB();
8211 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8212#else
8213 IEM_MC_BEGIN(4, 3);
8214 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8215 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8216 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8217 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8218 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8219 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8221
8222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8223 IEMOP_HLP_DONE_DECODING();
8224 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8225 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8226
8227 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8228 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8229 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8230
8231 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8232 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8233 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8234
8235 IEM_MC_FETCH_EFLAGS(EFlags);
8236# ifdef RT_ARCH_AMD64
8237 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8238 {
8239 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8240 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8241 else
8242 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8243 }
8244 else
8245# endif
8246 {
8247 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8248 accesses and not all all atomic, which works fine on in UNI CPU guest
8249 configuration (ignoring DMA). If guest SMP is active we have no choice
8250 but to use a rendezvous callback here. Sigh. */
8251 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8252 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8253 else
8254 {
8255 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8256 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8257 }
8258 }
8259
8260 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8261 IEM_MC_COMMIT_EFLAGS(EFlags);
8262 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8263 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8264 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8265 IEM_MC_ENDIF();
8266 IEM_MC_ADVANCE_RIP();
8267
8268 IEM_MC_END();
8269 return VINF_SUCCESS;
8270#endif
8271 }
8272 Log(("cmpxchg16b -> #UD\n"));
8273 return IEMOP_RAISE_INVALID_OPCODE();
8274}
8275
8276FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8277{
8278 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8279 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8280 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8281}
8282
8283/** Opcode 0x0f 0xc7 11/6. */
8284FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8285
8286/** Opcode 0x0f 0xc7 !11/6. */
8287FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8288
8289/** Opcode 0x66 0x0f 0xc7 !11/6. */
8290FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8291
8292/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8293FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8294
8295/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8296FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8297
8298/** Opcode 0x0f 0xc7 11/7. */
8299FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8300
8301
8302/**
8303 * Group 9 jump table for register variant.
8304 */
8305IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8306{ /* pfx: none, 066h, 0f3h, 0f2h */
8307 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8308 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8309 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8310 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8311 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8312 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8313 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8314 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8315};
8316AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8317
8318
8319/**
8320 * Group 9 jump table for memory variant.
8321 */
8322IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8323{ /* pfx: none, 066h, 0f3h, 0f2h */
8324 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8325 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8326 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8327 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8328 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8329 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8330 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8331 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8332};
8333AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8334
8335
8336/** Opcode 0x0f 0xc7. */
8337FNIEMOP_DEF(iemOp_Grp9)
8338{
8339 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8340 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8341 /* register, register */
8342 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8343 + pVCpu->iem.s.idxPrefix], bRm);
8344 /* memory, register */
8345 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8346 + pVCpu->iem.s.idxPrefix], bRm);
8347}
8348
8349
8350/**
8351 * Common 'bswap register' helper.
8352 */
8353FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8354{
8355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8356 switch (pVCpu->iem.s.enmEffOpSize)
8357 {
8358 case IEMMODE_16BIT:
8359 IEM_MC_BEGIN(1, 0);
8360 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8361 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8362 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8363 IEM_MC_ADVANCE_RIP();
8364 IEM_MC_END();
8365 return VINF_SUCCESS;
8366
8367 case IEMMODE_32BIT:
8368 IEM_MC_BEGIN(1, 0);
8369 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8370 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8371 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8372 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8373 IEM_MC_ADVANCE_RIP();
8374 IEM_MC_END();
8375 return VINF_SUCCESS;
8376
8377 case IEMMODE_64BIT:
8378 IEM_MC_BEGIN(1, 0);
8379 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8380 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8381 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8382 IEM_MC_ADVANCE_RIP();
8383 IEM_MC_END();
8384 return VINF_SUCCESS;
8385
8386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8387 }
8388}
8389
8390
8391/** Opcode 0x0f 0xc8. */
8392FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8393{
8394 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8395 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8396 prefix. REX.B is the correct prefix it appears. For a parallel
8397 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8398 IEMOP_HLP_MIN_486();
8399 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8400}
8401
8402
8403/** Opcode 0x0f 0xc9. */
8404FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8405{
8406 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8407 IEMOP_HLP_MIN_486();
8408 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8409}
8410
8411
8412/** Opcode 0x0f 0xca. */
8413FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8414{
8415 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8416 IEMOP_HLP_MIN_486();
8417 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8418}
8419
8420
8421/** Opcode 0x0f 0xcb. */
8422FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8423{
8424 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8425 IEMOP_HLP_MIN_486();
8426 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8427}
8428
8429
8430/** Opcode 0x0f 0xcc. */
8431FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8432{
8433 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8434 IEMOP_HLP_MIN_486();
8435 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8436}
8437
8438
8439/** Opcode 0x0f 0xcd. */
8440FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8441{
8442 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8443 IEMOP_HLP_MIN_486();
8444 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8445}
8446
8447
8448/** Opcode 0x0f 0xce. */
8449FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8450{
8451 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8452 IEMOP_HLP_MIN_486();
8453 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8454}
8455
8456
8457/** Opcode 0x0f 0xcf. */
8458FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8459{
8460 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8461 IEMOP_HLP_MIN_486();
8462 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8463}
8464
8465
8466/* Opcode 0x0f 0xd0 - invalid */
8467/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8468FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8469/* Opcode 0xf3 0x0f 0xd0 - invalid */
8470/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8471FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8472
8473/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8474FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8475/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8476FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8477/* Opcode 0xf3 0x0f 0xd1 - invalid */
8478/* Opcode 0xf2 0x0f 0xd1 - invalid */
8479
8480/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8481FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8482/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8483FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8484/* Opcode 0xf3 0x0f 0xd2 - invalid */
8485/* Opcode 0xf2 0x0f 0xd2 - invalid */
8486
8487/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8488FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8489/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8490FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8491/* Opcode 0xf3 0x0f 0xd3 - invalid */
8492/* Opcode 0xf2 0x0f 0xd3 - invalid */
8493
8494/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8495FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8496/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8497FNIEMOP_STUB(iemOp_paddq_Vx_W);
8498/* Opcode 0xf3 0x0f 0xd4 - invalid */
8499/* Opcode 0xf2 0x0f 0xd4 - invalid */
8500
8501/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8502FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8503/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8504FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8505/* Opcode 0xf3 0x0f 0xd5 - invalid */
8506/* Opcode 0xf2 0x0f 0xd5 - invalid */
8507
8508/* Opcode 0x0f 0xd6 - invalid */
8509
8510/**
8511 * @opcode 0xd6
8512 * @oppfx 0x66
8513 * @opcpuid sse2
8514 * @opgroup og_sse2_pcksclr_datamove
8515 * @opxcpttype none
8516 * @optest op1=-1 op2=2 -> op1=2
8517 * @optest op1=0 op2=-42 -> op1=-42
8518 */
8519FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8520{
8521 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8522 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8523 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8524 {
8525 /*
8526 * Register, register.
8527 */
8528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8529 IEM_MC_BEGIN(0, 2);
8530 IEM_MC_LOCAL(uint64_t, uSrc);
8531
8532 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8533 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8534
8535 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8536 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8537
8538 IEM_MC_ADVANCE_RIP();
8539 IEM_MC_END();
8540 }
8541 else
8542 {
8543 /*
8544 * Memory, register.
8545 */
8546 IEM_MC_BEGIN(0, 2);
8547 IEM_MC_LOCAL(uint64_t, uSrc);
8548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8549
8550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8552 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8553 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8554
8555 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8556 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8557
8558 IEM_MC_ADVANCE_RIP();
8559 IEM_MC_END();
8560 }
8561 return VINF_SUCCESS;
8562}
8563
8564
8565/**
8566 * @opcode 0xd6
8567 * @opcodesub 11 mr/reg
8568 * @oppfx f3
8569 * @opcpuid sse2
8570 * @opgroup og_sse2_simdint_datamove
8571 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8572 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8573 */
8574FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8575{
8576 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8577 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8578 {
8579 /*
8580 * Register, register.
8581 */
8582 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8584 IEM_MC_BEGIN(0, 1);
8585 IEM_MC_LOCAL(uint64_t, uSrc);
8586
8587 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8588 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8589
8590 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8591 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8592 IEM_MC_FPU_TO_MMX_MODE();
8593
8594 IEM_MC_ADVANCE_RIP();
8595 IEM_MC_END();
8596 return VINF_SUCCESS;
8597 }
8598
8599 /**
8600 * @opdone
8601 * @opmnemonic udf30fd6mem
8602 * @opcode 0xd6
8603 * @opcodesub !11 mr/reg
8604 * @oppfx f3
8605 * @opunused intel-modrm
8606 * @opcpuid sse
8607 * @optest ->
8608 */
8609 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8610}
8611
8612
8613/**
8614 * @opcode 0xd6
8615 * @opcodesub 11 mr/reg
8616 * @oppfx f2
8617 * @opcpuid sse2
8618 * @opgroup og_sse2_simdint_datamove
8619 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8620 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8621 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
8622 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
8623 * @optest op1=-42 op2=0xfedcba9876543210
8624 * -> op1=0xfedcba9876543210 ftw=0xff
8625 */
8626FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
8627{
8628 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8629 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8630 {
8631 /*
8632 * Register, register.
8633 */
8634 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8636 IEM_MC_BEGIN(0, 1);
8637 IEM_MC_LOCAL(uint64_t, uSrc);
8638
8639 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8640 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8641
8642 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8643 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
8644 IEM_MC_FPU_TO_MMX_MODE();
8645
8646 IEM_MC_ADVANCE_RIP();
8647 IEM_MC_END();
8648 return VINF_SUCCESS;
8649 }
8650
8651 /**
8652 * @opdone
8653 * @opmnemonic udf20fd6mem
8654 * @opcode 0xd6
8655 * @opcodesub !11 mr/reg
8656 * @oppfx f2
8657 * @opunused intel-modrm
8658 * @opcpuid sse
8659 * @optest ->
8660 */
8661 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8662}
8663
8664/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8665FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8666{
8667 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8668 /** @todo testcase: Check that the instruction implicitly clears the high
8669 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8670 * and opcode modifications are made to work with the whole width (not
8671 * just 128). */
8672 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8673 /* Docs says register only. */
8674 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8675 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8676 {
8677 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8678 IEM_MC_BEGIN(2, 0);
8679 IEM_MC_ARG(uint64_t *, pDst, 0);
8680 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8681 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8682 IEM_MC_PREPARE_FPU_USAGE();
8683 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8684 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8685 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8686 IEM_MC_ADVANCE_RIP();
8687 IEM_MC_END();
8688 return VINF_SUCCESS;
8689 }
8690 return IEMOP_RAISE_INVALID_OPCODE();
8691}
8692
8693/** Opcode 0x66 0x0f 0xd7 - */
8694FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8695{
8696 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8697 /** @todo testcase: Check that the instruction implicitly clears the high
8698 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8699 * and opcode modifications are made to work with the whole width (not
8700 * just 128). */
8701 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8702 /* Docs says register only. */
8703 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8704 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8705 {
8706 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8707 IEM_MC_BEGIN(2, 0);
8708 IEM_MC_ARG(uint64_t *, pDst, 0);
8709 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8710 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8711 IEM_MC_PREPARE_SSE_USAGE();
8712 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8713 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8714 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8715 IEM_MC_ADVANCE_RIP();
8716 IEM_MC_END();
8717 return VINF_SUCCESS;
8718 }
8719 return IEMOP_RAISE_INVALID_OPCODE();
8720}
8721
8722/* Opcode 0xf3 0x0f 0xd7 - invalid */
8723/* Opcode 0xf2 0x0f 0xd7 - invalid */
8724
8725
8726/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8727FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8728/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8729FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8730/* Opcode 0xf3 0x0f 0xd8 - invalid */
8731/* Opcode 0xf2 0x0f 0xd8 - invalid */
8732
8733/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8734FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8735/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8736FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8737/* Opcode 0xf3 0x0f 0xd9 - invalid */
8738/* Opcode 0xf2 0x0f 0xd9 - invalid */
8739
8740/** Opcode 0x0f 0xda - pminub Pq, Qq */
8741FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8742/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8743FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8744/* Opcode 0xf3 0x0f 0xda - invalid */
8745/* Opcode 0xf2 0x0f 0xda - invalid */
8746
8747/** Opcode 0x0f 0xdb - pand Pq, Qq */
8748FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8749/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8750FNIEMOP_STUB(iemOp_pand_Vx_W);
8751/* Opcode 0xf3 0x0f 0xdb - invalid */
8752/* Opcode 0xf2 0x0f 0xdb - invalid */
8753
8754/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8755FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8756/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8757FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8758/* Opcode 0xf3 0x0f 0xdc - invalid */
8759/* Opcode 0xf2 0x0f 0xdc - invalid */
8760
8761/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8762FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8763/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8764FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8765/* Opcode 0xf3 0x0f 0xdd - invalid */
8766/* Opcode 0xf2 0x0f 0xdd - invalid */
8767
8768/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8769FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8770/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8771FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8772/* Opcode 0xf3 0x0f 0xde - invalid */
8773/* Opcode 0xf2 0x0f 0xde - invalid */
8774
8775/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8776FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8777/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8778FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8779/* Opcode 0xf3 0x0f 0xdf - invalid */
8780/* Opcode 0xf2 0x0f 0xdf - invalid */
8781
8782/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8783FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8784/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8785FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8786/* Opcode 0xf3 0x0f 0xe0 - invalid */
8787/* Opcode 0xf2 0x0f 0xe0 - invalid */
8788
8789/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8790FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8791/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8792FNIEMOP_STUB(iemOp_psraw_Vx_W);
8793/* Opcode 0xf3 0x0f 0xe1 - invalid */
8794/* Opcode 0xf2 0x0f 0xe1 - invalid */
8795
8796/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8797FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8798/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8799FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8800/* Opcode 0xf3 0x0f 0xe2 - invalid */
8801/* Opcode 0xf2 0x0f 0xe2 - invalid */
8802
8803/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8804FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8805/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8806FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8807/* Opcode 0xf3 0x0f 0xe3 - invalid */
8808/* Opcode 0xf2 0x0f 0xe3 - invalid */
8809
8810/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8811FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8812/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8813FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8814/* Opcode 0xf3 0x0f 0xe4 - invalid */
8815/* Opcode 0xf2 0x0f 0xe4 - invalid */
8816
8817/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8818FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8819/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8820FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8821/* Opcode 0xf3 0x0f 0xe5 - invalid */
8822/* Opcode 0xf2 0x0f 0xe5 - invalid */
8823
8824/* Opcode 0x0f 0xe6 - invalid */
8825/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8826FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8827/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8828FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8829/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8830FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8831
8832
8833/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8834FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8835{
8836 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8837 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8838 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8839 {
8840 /* Register, memory. */
8841 IEM_MC_BEGIN(0, 2);
8842 IEM_MC_LOCAL(uint64_t, uSrc);
8843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8844
8845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8847 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8848 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8849
8850 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8851 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8852
8853 IEM_MC_ADVANCE_RIP();
8854 IEM_MC_END();
8855 return VINF_SUCCESS;
8856 }
8857 /* The register, register encoding is invalid. */
8858 return IEMOP_RAISE_INVALID_OPCODE();
8859}
8860
8861/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
8862FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
8863{
8864 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8865 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8866 {
8867 /* Register, memory. */
8868 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
8869 IEM_MC_BEGIN(0, 2);
8870 IEM_MC_LOCAL(RTUINT128U, uSrc);
8871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8872
8873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8875 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8876 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8877
8878 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8879 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8880
8881 IEM_MC_ADVANCE_RIP();
8882 IEM_MC_END();
8883 return VINF_SUCCESS;
8884 }
8885
8886 /* The register, register encoding is invalid. */
8887 return IEMOP_RAISE_INVALID_OPCODE();
8888}
8889
8890/* Opcode 0xf3 0x0f 0xe7 - invalid */
8891/* Opcode 0xf2 0x0f 0xe7 - invalid */
8892
8893
8894/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8895FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8896/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8897FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8898/* Opcode 0xf3 0x0f 0xe8 - invalid */
8899/* Opcode 0xf2 0x0f 0xe8 - invalid */
8900
8901/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8902FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8903/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8904FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8905/* Opcode 0xf3 0x0f 0xe9 - invalid */
8906/* Opcode 0xf2 0x0f 0xe9 - invalid */
8907
8908/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8909FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8910/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8911FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8912/* Opcode 0xf3 0x0f 0xea - invalid */
8913/* Opcode 0xf2 0x0f 0xea - invalid */
8914
8915/** Opcode 0x0f 0xeb - por Pq, Qq */
8916FNIEMOP_STUB(iemOp_por_Pq_Qq);
8917/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8918FNIEMOP_STUB(iemOp_por_Vx_W);
8919/* Opcode 0xf3 0x0f 0xeb - invalid */
8920/* Opcode 0xf2 0x0f 0xeb - invalid */
8921
8922/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8923FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8924/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8925FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8926/* Opcode 0xf3 0x0f 0xec - invalid */
8927/* Opcode 0xf2 0x0f 0xec - invalid */
8928
8929/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8930FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8931/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8932FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8933/* Opcode 0xf3 0x0f 0xed - invalid */
8934/* Opcode 0xf2 0x0f 0xed - invalid */
8935
8936/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8937FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8938/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8939FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8940/* Opcode 0xf3 0x0f 0xee - invalid */
8941/* Opcode 0xf2 0x0f 0xee - invalid */
8942
8943
8944/** Opcode 0x0f 0xef - pxor Pq, Qq */
8945FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8946{
8947 IEMOP_MNEMONIC(pxor, "pxor");
8948 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8949}
8950
8951/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8952FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8953{
8954 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8955 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8956}
8957
8958/* Opcode 0xf3 0x0f 0xef - invalid */
8959/* Opcode 0xf2 0x0f 0xef - invalid */
8960
8961/* Opcode 0x0f 0xf0 - invalid */
8962/* Opcode 0x66 0x0f 0xf0 - invalid */
8963/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8964FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8965
8966/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8967FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8968/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8969FNIEMOP_STUB(iemOp_psllw_Vx_W);
8970/* Opcode 0xf2 0x0f 0xf1 - invalid */
8971
8972/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8973FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8974/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8975FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8976/* Opcode 0xf2 0x0f 0xf2 - invalid */
8977
8978/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8979FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8980/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8981FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8982/* Opcode 0xf2 0x0f 0xf3 - invalid */
8983
8984/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8985FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8986/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8987FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8988/* Opcode 0xf2 0x0f 0xf4 - invalid */
8989
8990/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8991FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8992/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
8993FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
8994/* Opcode 0xf2 0x0f 0xf5 - invalid */
8995
8996/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8997FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8998/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
8999FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
9000/* Opcode 0xf2 0x0f 0xf6 - invalid */
9001
9002/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
9003FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
9004/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
9005FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
9006/* Opcode 0xf2 0x0f 0xf7 - invalid */
9007
9008/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
9009FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
9010/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
9011FNIEMOP_STUB(iemOp_psubb_Vx_W);
9012/* Opcode 0xf2 0x0f 0xf8 - invalid */
9013
9014/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
9015FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
9016/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
9017FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
9018/* Opcode 0xf2 0x0f 0xf9 - invalid */
9019
9020/** Opcode 0x0f 0xfa - psubd Pq, Qq */
9021FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
9022/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9023FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9024/* Opcode 0xf2 0x0f 0xfa - invalid */
9025
9026/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9027FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9028/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9029FNIEMOP_STUB(iemOp_psubq_Vx_W);
9030/* Opcode 0xf2 0x0f 0xfb - invalid */
9031
9032/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9033FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9034/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9035FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9036/* Opcode 0xf2 0x0f 0xfc - invalid */
9037
9038/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9039FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9040/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9041FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9042/* Opcode 0xf2 0x0f 0xfd - invalid */
9043
9044/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9045FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9046/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9047FNIEMOP_STUB(iemOp_paddd_Vx_W);
9048/* Opcode 0xf2 0x0f 0xfe - invalid */
9049
9050
9051/** Opcode **** 0x0f 0xff - UD0 */
9052FNIEMOP_DEF(iemOp_ud0)
9053{
9054 IEMOP_MNEMONIC(ud0, "ud0");
9055 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9056 {
9057 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9058#ifndef TST_IEM_CHECK_MC
9059 RTGCPTR GCPtrEff;
9060 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9061 if (rcStrict != VINF_SUCCESS)
9062 return rcStrict;
9063#endif
9064 IEMOP_HLP_DONE_DECODING();
9065 }
9066 return IEMOP_RAISE_INVALID_OPCODE();
9067}
9068
9069
9070
9071/**
9072 * Two byte opcode map, first byte 0x0f.
9073 *
9074 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9075 * check if it needs updating as well when making changes.
9076 */
9077IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9078{
9079 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9080 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9081 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9082 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9083 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9084 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9085 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9086 /* 0x06 */ IEMOP_X4(iemOp_clts),
9087 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9088 /* 0x08 */ IEMOP_X4(iemOp_invd),
9089 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9090 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9091 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9092 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9093 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9094 /* 0x0e */ IEMOP_X4(iemOp_femms),
9095 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9096
9097 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9098 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9099 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9100 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9101 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9102 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9103 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9104 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9105 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9106 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9107 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9108 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9109 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9110 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9111 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9112 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9113
9114 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9115 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9116 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9117 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9118 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9119 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9120 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9121 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9122 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9123 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9124 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9125 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9126 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9127 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9128 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9129 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9130
9131 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9132 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9133 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9134 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9135 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9136 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9137 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9138 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9139 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9140 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9141 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9142 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9143 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9144 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9145 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9146 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9147
9148 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9149 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9150 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9151 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9152 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9153 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9154 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9155 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9156 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9157 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9158 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9159 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9160 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9161 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9162 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9163 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9164
9165 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9166 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9167 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9168 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9169 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9170 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9171 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9172 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9173 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9174 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9175 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9176 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9177 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9178 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9179 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9180 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9181
9182 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9183 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9184 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9185 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9186 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9187 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9188 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9189 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9190 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9191 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9192 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9193 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9194 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9195 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9196 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9197 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM,
9198
9199 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9200 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9201 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9202 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9203 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9204 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9205 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9206 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9207
9208 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9209 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9210 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9211 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9212 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9213 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9214 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9215 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9216
9217 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9218 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9219 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9220 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9221 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9222 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9223 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9224 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9225 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9226 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9227 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9228 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9229 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9230 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9231 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9232 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9233
9234 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9235 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9236 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9237 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9238 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9239 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9240 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9241 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9242 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9243 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9244 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9245 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9246 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9247 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9248 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9249 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9250
9251 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9252 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9253 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9254 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9255 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9256 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9257 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9258 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9259 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9260 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9261 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9262 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9263 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9264 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9265 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9266 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9267
9268 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9269 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9270 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9271 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9272 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9273 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9274 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9275 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9276 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9277 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9278 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9279 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9280 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9281 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9282 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9283 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9284
9285 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9286 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9287 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9288 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9289 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9290 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9291 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9292 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9293 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9294 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9295 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9296 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9297 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9298 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9299 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9300 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9301
9302 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9303 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9304 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9305 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9306 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9307 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9308 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9309 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9310 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9311 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9312 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9313 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9314 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9315 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9316 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9317 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9318
9319 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9320 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9321 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9322 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9323 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9324 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9325 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9326 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9327 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9328 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9329 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9330 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9331 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9332 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9333 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9334 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9335
9336 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9337 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9338 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9339 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9340 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9341 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9342 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9343 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9344 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9345 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9346 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9347 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9348 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9349 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9350 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9351 /* 0xff */ IEMOP_X4(iemOp_ud0),
9352};
9353AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9354
9355/** @} */
9356
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette